| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/sdk/android/src/jni/videodecoderwrapper.h" | 11 #include "webrtc/sdk/android/src/jni/videodecoderwrapper.h" |
| 12 | 12 |
| 13 #include "webrtc/api/video/video_frame.h" | 13 #include "webrtc/api/video/video_frame.h" |
| 14 #include "webrtc/modules/video_coding/include/video_codec_interface.h" | 14 #include "webrtc/modules/video_coding/include/video_codec_interface.h" |
| 15 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" | 15 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h" |
| 16 #include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h" | 16 #include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h" |
| 17 #include "webrtc/rtc_base/logging.h" | 17 #include "webrtc/rtc_base/logging.h" |
| 18 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 18 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 19 | 19 |
| 20 namespace webrtc_jni { | 20 namespace webrtc { |
| 21 namespace jni { |
| 21 | 22 |
| 22 VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder) | 23 VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder) |
| 23 : android_video_buffer_factory_(jni), | 24 : android_video_buffer_factory_(jni), |
| 24 decoder_(jni, decoder), | 25 decoder_(jni, decoder), |
| 25 encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")), | 26 encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")), |
| 26 frame_type_class_(jni, | 27 frame_type_class_(jni, |
| 27 FindClass(jni, "org/webrtc/EncodedImage$FrameType")), | 28 FindClass(jni, "org/webrtc/EncodedImage$FrameType")), |
| 28 settings_class_(jni, FindClass(jni, "org/webrtc/VideoDecoder$Settings")), | 29 settings_class_(jni, FindClass(jni, "org/webrtc/VideoDecoder$Settings")), |
| 29 video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")), | 30 video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")), |
| 30 video_codec_status_class_(jni, | 31 video_codec_status_class_(jni, |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 69 jni->GetMethodID(*video_codec_status_class_, "getNumber", "()I"); | 70 jni->GetMethodID(*video_codec_status_class_, "getNumber", "()I"); |
| 70 | 71 |
| 71 integer_constructor_ = jni->GetMethodID(*integer_class_, "<init>", "(I)V"); | 72 integer_constructor_ = jni->GetMethodID(*integer_class_, "<init>", "(I)V"); |
| 72 int_value_method_ = jni->GetMethodID(*integer_class_, "intValue", "()I"); | 73 int_value_method_ = jni->GetMethodID(*integer_class_, "intValue", "()I"); |
| 73 | 74 |
| 74 initialized_ = false; | 75 initialized_ = false; |
| 75 // QP parsing starts enabled and we disable it if the decoder provides frames. | 76 // QP parsing starts enabled and we disable it if the decoder provides frames. |
| 76 qp_parsing_enabled_ = true; | 77 qp_parsing_enabled_ = true; |
| 77 } | 78 } |
| 78 | 79 |
| 79 int32_t VideoDecoderWrapper::InitDecode( | 80 int32_t VideoDecoderWrapper::InitDecode(const VideoCodec* codec_settings, |
| 80 const webrtc::VideoCodec* codec_settings, | 81 int32_t number_of_cores) { |
| 81 int32_t number_of_cores) { | |
| 82 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 82 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 83 ScopedLocalRefFrame local_ref_frame(jni); | 83 ScopedLocalRefFrame local_ref_frame(jni); |
| 84 | 84 |
| 85 codec_settings_ = *codec_settings; | 85 codec_settings_ = *codec_settings; |
| 86 number_of_cores_ = number_of_cores; | 86 number_of_cores_ = number_of_cores; |
| 87 return InitDecodeInternal(jni); | 87 return InitDecodeInternal(jni); |
| 88 } | 88 } |
| 89 | 89 |
| 90 int32_t VideoDecoderWrapper::InitDecodeInternal(JNIEnv* jni) { | 90 int32_t VideoDecoderWrapper::InitDecodeInternal(JNIEnv* jni) { |
| 91 jobject settings = | 91 jobject settings = |
| (...skipping 14 matching lines...) Expand all Loading... |
| 106 } | 106 } |
| 107 | 107 |
| 108 // The decoder was reinitialized so re-enable the QP parsing in case it stops | 108 // The decoder was reinitialized so re-enable the QP parsing in case it stops |
| 109 // providing QP values. | 109 // providing QP values. |
| 110 qp_parsing_enabled_ = true; | 110 qp_parsing_enabled_ = true; |
| 111 | 111 |
| 112 return HandleReturnCode(jni, ret); | 112 return HandleReturnCode(jni, ret); |
| 113 } | 113 } |
| 114 | 114 |
| 115 int32_t VideoDecoderWrapper::Decode( | 115 int32_t VideoDecoderWrapper::Decode( |
| 116 const webrtc::EncodedImage& input_image, | 116 const EncodedImage& input_image, |
| 117 bool missing_frames, | 117 bool missing_frames, |
| 118 const webrtc::RTPFragmentationHeader* fragmentation, | 118 const RTPFragmentationHeader* fragmentation, |
| 119 const webrtc::CodecSpecificInfo* codec_specific_info, | 119 const CodecSpecificInfo* codec_specific_info, |
| 120 int64_t render_time_ms) { | 120 int64_t render_time_ms) { |
| 121 if (!initialized_) { | 121 if (!initialized_) { |
| 122 // Most likely initializing the codec failed. | 122 // Most likely initializing the codec failed. |
| 123 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 123 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 124 } | 124 } |
| 125 | 125 |
| 126 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 126 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 127 ScopedLocalRefFrame local_ref_frame(jni); | 127 ScopedLocalRefFrame local_ref_frame(jni); |
| 128 | 128 |
| 129 FrameExtraInfo frame_extra_info; | 129 FrameExtraInfo frame_extra_info; |
| 130 frame_extra_info.capture_time_ms = input_image.capture_time_ms_; | 130 frame_extra_info.capture_time_ms = input_image.capture_time_ms_; |
| 131 frame_extra_info.timestamp_rtp = input_image._timeStamp; | 131 frame_extra_info.timestamp_rtp = input_image._timeStamp; |
| 132 frame_extra_info.qp = | 132 frame_extra_info.qp = |
| 133 qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>(); | 133 qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>(); |
| 134 frame_extra_infos_.push_back(frame_extra_info); | 134 frame_extra_infos_.push_back(frame_extra_info); |
| 135 | 135 |
| 136 jobject jinput_image = | 136 jobject jinput_image = |
| 137 ConvertEncodedImageToJavaEncodedImage(jni, input_image); | 137 ConvertEncodedImageToJavaEncodedImage(jni, input_image); |
| 138 jobject ret = | 138 jobject ret = |
| 139 jni->CallObjectMethod(*decoder_, decode_method_, jinput_image, nullptr); | 139 jni->CallObjectMethod(*decoder_, decode_method_, jinput_image, nullptr); |
| 140 return HandleReturnCode(jni, ret); | 140 return HandleReturnCode(jni, ret); |
| 141 } | 141 } |
| 142 | 142 |
| 143 int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback( | 143 int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback( |
| 144 webrtc::DecodedImageCallback* callback) { | 144 DecodedImageCallback* callback) { |
| 145 callback_ = callback; | 145 callback_ = callback; |
| 146 return WEBRTC_VIDEO_CODEC_OK; | 146 return WEBRTC_VIDEO_CODEC_OK; |
| 147 } | 147 } |
| 148 | 148 |
| 149 int32_t VideoDecoderWrapper::Release() { | 149 int32_t VideoDecoderWrapper::Release() { |
| 150 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 150 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 151 ScopedLocalRefFrame local_ref_frame(jni); | 151 ScopedLocalRefFrame local_ref_frame(jni); |
| 152 jobject ret = jni->CallObjectMethod(*decoder_, release_method_); | 152 jobject ret = jni->CallObjectMethod(*decoder_, release_method_); |
| 153 frame_extra_infos_.clear(); | 153 frame_extra_infos_.clear(); |
| 154 initialized_ = false; | 154 initialized_ = false; |
| (...skipping 26 matching lines...) Expand all Loading... |
| 181 LOG(LS_WARNING) << "Java decoder produced an unexpected frame."; | 181 LOG(LS_WARNING) << "Java decoder produced an unexpected frame."; |
| 182 return; | 182 return; |
| 183 } | 183 } |
| 184 | 184 |
| 185 frame_extra_info = frame_extra_infos_.front(); | 185 frame_extra_info = frame_extra_infos_.front(); |
| 186 frame_extra_infos_.pop_front(); | 186 frame_extra_infos_.pop_front(); |
| 187 // If the decoder might drop frames so iterate through the queue until we | 187 // If the decoder might drop frames so iterate through the queue until we |
| 188 // find a matching timestamp. | 188 // find a matching timestamp. |
| 189 } while (frame_extra_info.capture_time_ms != capture_time_ms); | 189 } while (frame_extra_info.capture_time_ms != capture_time_ms); |
| 190 | 190 |
| 191 webrtc::VideoFrame frame = android_video_buffer_factory_.CreateFrame( | 191 VideoFrame frame = android_video_buffer_factory_.CreateFrame( |
| 192 jni, jframe, frame_extra_info.timestamp_rtp); | 192 jni, jframe, frame_extra_info.timestamp_rtp); |
| 193 | 193 |
| 194 rtc::Optional<int32_t> decoding_time_ms; | 194 rtc::Optional<int32_t> decoding_time_ms; |
| 195 if (jdecode_time_ms != nullptr) { | 195 if (jdecode_time_ms != nullptr) { |
| 196 decoding_time_ms = rtc::Optional<int32_t>( | 196 decoding_time_ms = rtc::Optional<int32_t>( |
| 197 jni->CallIntMethod(jdecode_time_ms, int_value_method_)); | 197 jni->CallIntMethod(jdecode_time_ms, int_value_method_)); |
| 198 } | 198 } |
| 199 | 199 |
| 200 rtc::Optional<uint8_t> qp; | 200 rtc::Optional<uint8_t> qp; |
| 201 if (jqp != nullptr) { | 201 if (jqp != nullptr) { |
| 202 qp = rtc::Optional<uint8_t>(jni->CallIntMethod(jqp, int_value_method_)); | 202 qp = rtc::Optional<uint8_t>(jni->CallIntMethod(jqp, int_value_method_)); |
| 203 // The decoder provides QP values itself, no need to parse the bitstream. | 203 // The decoder provides QP values itself, no need to parse the bitstream. |
| 204 qp_parsing_enabled_ = false; | 204 qp_parsing_enabled_ = false; |
| 205 } else { | 205 } else { |
| 206 qp = frame_extra_info.qp; | 206 qp = frame_extra_info.qp; |
| 207 // The decoder doesn't provide QP values, ensure bitstream parsing is | 207 // The decoder doesn't provide QP values, ensure bitstream parsing is |
| 208 // enabled. | 208 // enabled. |
| 209 qp_parsing_enabled_ = true; | 209 qp_parsing_enabled_ = true; |
| 210 } | 210 } |
| 211 | 211 |
| 212 callback_->Decoded(frame, decoding_time_ms, qp); | 212 callback_->Decoded(frame, decoding_time_ms, qp); |
| 213 } | 213 } |
| 214 | 214 |
| 215 jobject VideoDecoderWrapper::ConvertEncodedImageToJavaEncodedImage( | 215 jobject VideoDecoderWrapper::ConvertEncodedImageToJavaEncodedImage( |
| 216 JNIEnv* jni, | 216 JNIEnv* jni, |
| 217 const webrtc::EncodedImage& image) { | 217 const EncodedImage& image) { |
| 218 jobject buffer = jni->NewDirectByteBuffer(image._buffer, image._length); | 218 jobject buffer = jni->NewDirectByteBuffer(image._buffer, image._length); |
| 219 jfieldID frame_type_field; | 219 jfieldID frame_type_field; |
| 220 switch (image._frameType) { | 220 switch (image._frameType) { |
| 221 case webrtc::kEmptyFrame: | 221 case kEmptyFrame: |
| 222 frame_type_field = empty_frame_field_; | 222 frame_type_field = empty_frame_field_; |
| 223 break; | 223 break; |
| 224 case webrtc::kVideoFrameKey: | 224 case kVideoFrameKey: |
| 225 frame_type_field = video_frame_key_field_; | 225 frame_type_field = video_frame_key_field_; |
| 226 break; | 226 break; |
| 227 case webrtc::kVideoFrameDelta: | 227 case kVideoFrameDelta: |
| 228 frame_type_field = video_frame_delta_field_; | 228 frame_type_field = video_frame_delta_field_; |
| 229 break; | 229 break; |
| 230 default: | 230 default: |
| 231 RTC_NOTREACHED(); | 231 RTC_NOTREACHED(); |
| 232 return nullptr; | 232 return nullptr; |
| 233 } | 233 } |
| 234 jobject frame_type = | 234 jobject frame_type = |
| 235 jni->GetStaticObjectField(*frame_type_class_, frame_type_field); | 235 jni->GetStaticObjectField(*frame_type_class_, frame_type_field); |
| 236 jobject qp = nullptr; | 236 jobject qp = nullptr; |
| 237 if (image.qp_ != -1) { | 237 if (image.qp_ != -1) { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 254 } | 254 } |
| 255 | 255 |
| 256 LOG(LS_WARNING) << "Falling back to software decoder."; | 256 LOG(LS_WARNING) << "Falling back to software decoder."; |
| 257 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; | 257 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; |
| 258 } else { | 258 } else { |
| 259 return value; | 259 return value; |
| 260 } | 260 } |
| 261 } | 261 } |
| 262 | 262 |
| 263 rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP( | 263 rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP( |
| 264 const webrtc::EncodedImage& input_image) { | 264 const EncodedImage& input_image) { |
| 265 if (input_image.qp_ != -1) { | 265 if (input_image.qp_ != -1) { |
| 266 return rtc::Optional<uint8_t>(input_image.qp_); | 266 return rtc::Optional<uint8_t>(input_image.qp_); |
| 267 } | 267 } |
| 268 | 268 |
| 269 rtc::Optional<uint8_t> qp; | 269 rtc::Optional<uint8_t> qp; |
| 270 switch (codec_settings_.codecType) { | 270 switch (codec_settings_.codecType) { |
| 271 case webrtc::kVideoCodecVP8: { | 271 case kVideoCodecVP8: { |
| 272 int qp_int; | 272 int qp_int; |
| 273 if (webrtc::vp8::GetQp(input_image._buffer, input_image._length, | 273 if (vp8::GetQp(input_image._buffer, input_image._length, &qp_int)) { |
| 274 &qp_int)) { | |
| 275 qp = rtc::Optional<uint8_t>(qp_int); | 274 qp = rtc::Optional<uint8_t>(qp_int); |
| 276 } | 275 } |
| 277 break; | 276 break; |
| 278 } | 277 } |
| 279 case webrtc::kVideoCodecVP9: { | 278 case kVideoCodecVP9: { |
| 280 int qp_int; | 279 int qp_int; |
| 281 if (webrtc::vp9::GetQp(input_image._buffer, input_image._length, | 280 if (vp9::GetQp(input_image._buffer, input_image._length, &qp_int)) { |
| 282 &qp_int)) { | |
| 283 qp = rtc::Optional<uint8_t>(qp_int); | 281 qp = rtc::Optional<uint8_t>(qp_int); |
| 284 } | 282 } |
| 285 break; | 283 break; |
| 286 } | 284 } |
| 287 case webrtc::kVideoCodecH264: { | 285 case kVideoCodecH264: { |
| 288 h264_bitstream_parser_.ParseBitstream(input_image._buffer, | 286 h264_bitstream_parser_.ParseBitstream(input_image._buffer, |
| 289 input_image._length); | 287 input_image._length); |
| 290 int qp_int; | 288 int qp_int; |
| 291 if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { | 289 if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) { |
| 292 qp = rtc::Optional<uint8_t>(qp_int); | 290 qp = rtc::Optional<uint8_t>(qp_int); |
| 293 } | 291 } |
| 294 break; | 292 break; |
| 295 } | 293 } |
| 296 default: | 294 default: |
| 297 break; // Default is to not provide QP. | 295 break; // Default is to not provide QP. |
| 298 } | 296 } |
| 299 return qp; | 297 return qp; |
| 300 } | 298 } |
| 301 | 299 |
| 302 JNI_FUNCTION_DECLARATION(void, | 300 JNI_FUNCTION_DECLARATION(void, |
| 303 VideoDecoderWrapperCallback_nativeOnDecodedFrame, | 301 VideoDecoderWrapperCallback_nativeOnDecodedFrame, |
| 304 JNIEnv* jni, | 302 JNIEnv* jni, |
| 305 jclass, | 303 jclass, |
| 306 jlong jnative_decoder, | 304 jlong jnative_decoder, |
| 307 jobject jframe, | 305 jobject jframe, |
| 308 jobject jdecode_time_ms, | 306 jobject jdecode_time_ms, |
| 309 jobject jqp) { | 307 jobject jqp) { |
| 310 VideoDecoderWrapper* native_decoder = | 308 VideoDecoderWrapper* native_decoder = |
| 311 reinterpret_cast<VideoDecoderWrapper*>(jnative_decoder); | 309 reinterpret_cast<VideoDecoderWrapper*>(jnative_decoder); |
| 312 native_decoder->OnDecodedFrame(jni, jframe, jdecode_time_ms, jqp); | 310 native_decoder->OnDecodedFrame(jni, jframe, jdecode_time_ms, jqp); |
| 313 } | 311 } |
| 314 | 312 |
| 315 } // namespace webrtc_jni | 313 } // namespace jni |
| 314 } // namespace webrtc |
| OLD | NEW |