| OLD | NEW |
| 1 /* | 1 /* |
| 2 * libjingle | 2 * libjingle |
| 3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 50 using rtc::scoped_ptr; | 50 using rtc::scoped_ptr; |
| 51 | 51 |
| 52 using webrtc::CodecSpecificInfo; | 52 using webrtc::CodecSpecificInfo; |
| 53 using webrtc::EncodedImage; | 53 using webrtc::EncodedImage; |
| 54 using webrtc::VideoFrame; | 54 using webrtc::VideoFrame; |
| 55 using webrtc::RTPFragmentationHeader; | 55 using webrtc::RTPFragmentationHeader; |
| 56 using webrtc::VideoCodec; | 56 using webrtc::VideoCodec; |
| 57 using webrtc::VideoCodecType; | 57 using webrtc::VideoCodecType; |
| 58 using webrtc::kVideoCodecH264; | 58 using webrtc::kVideoCodecH264; |
| 59 using webrtc::kVideoCodecVP8; | 59 using webrtc::kVideoCodecVP8; |
| 60 using webrtc::kVideoCodecVP9; |
| 60 | 61 |
| 61 namespace webrtc_jni { | 62 namespace webrtc_jni { |
| 62 | 63 |
| 63 // H.264 start code length. | 64 // H.264 start code length. |
| 64 #define H264_SC_LENGTH 4 | 65 #define H264_SC_LENGTH 4 |
| 65 // Maximum allowed NALUs in one output frame. | 66 // Maximum allowed NALUs in one output frame. |
| 66 #define MAX_NALUS_PERFRAME 32 | 67 #define MAX_NALUS_PERFRAME 32 |
| 67 // Maximum supported HW video encoder resolution. | 68 // Maximum supported HW video encoder resolution. |
| 68 #define MAX_VIDEO_WIDTH 1280 | 69 #define MAX_VIDEO_WIDTH 1280 |
| 69 #define MAX_VIDEO_HEIGHT 1280 | 70 #define MAX_VIDEO_HEIGHT 1280 |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 206 // and the next Encode() call being ignored. | 207 // and the next Encode() call being ignored. |
| 207 bool drop_next_input_frame_; | 208 bool drop_next_input_frame_; |
| 208 // Global references; must be deleted in Release(). | 209 // Global references; must be deleted in Release(). |
| 209 std::vector<jobject> input_buffers_; | 210 std::vector<jobject> input_buffers_; |
| 210 webrtc::QualityScaler quality_scaler_; | 211 webrtc::QualityScaler quality_scaler_; |
| 211 // Dynamic resolution change, off by default. | 212 // Dynamic resolution change, off by default. |
| 212 bool scale_; | 213 bool scale_; |
| 213 | 214 |
| 214 // H264 bitstream parser, used to extract QP from encoded bitstreams. | 215 // H264 bitstream parser, used to extract QP from encoded bitstreams. |
| 215 webrtc::H264BitstreamParser h264_bitstream_parser_; | 216 webrtc::H264BitstreamParser h264_bitstream_parser_; |
| 217 |
| 218 // VP9 variables to populate codec specific structure. |
| 219 webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for |
| 220 // non-flexible VP9 mode. |
| 221 uint8_t tl0_pic_idx_; |
| 222 size_t gof_idx_; |
| 216 }; | 223 }; |
| 217 | 224 |
| 218 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { | 225 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { |
| 219 // Call Release() to ensure no more callbacks to us after we are deleted. | 226 // Call Release() to ensure no more callbacks to us after we are deleted. |
| 220 Release(); | 227 Release(); |
| 221 } | 228 } |
| 222 | 229 |
| 223 MediaCodecVideoEncoder::MediaCodecVideoEncoder( | 230 MediaCodecVideoEncoder::MediaCodecVideoEncoder( |
| 224 JNIEnv* jni, VideoCodecType codecType) : | 231 JNIEnv* jni, VideoCodecType codecType) : |
| 225 codecType_(codecType), | 232 codecType_(codecType), |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 280 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); | 287 GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); |
| 281 j_info_index_field_ = | 288 j_info_index_field_ = |
| 282 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); | 289 GetFieldID(jni, j_output_buffer_info_class, "index", "I"); |
| 283 j_info_buffer_field_ = GetFieldID( | 290 j_info_buffer_field_ = GetFieldID( |
| 284 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); | 291 jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); |
| 285 j_info_is_key_frame_field_ = | 292 j_info_is_key_frame_field_ = |
| 286 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); | 293 GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); |
| 287 j_info_presentation_timestamp_us_field_ = GetFieldID( | 294 j_info_presentation_timestamp_us_field_ = GetFieldID( |
| 288 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); | 295 jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); |
| 289 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; | 296 CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed"; |
| 297 srand(time(NULL)); |
| 290 AllowBlockingCalls(); | 298 AllowBlockingCalls(); |
| 291 } | 299 } |
| 292 | 300 |
| 293 int32_t MediaCodecVideoEncoder::InitEncode( | 301 int32_t MediaCodecVideoEncoder::InitEncode( |
| 294 const webrtc::VideoCodec* codec_settings, | 302 const webrtc::VideoCodec* codec_settings, |
| 295 int32_t /* number_of_cores */, | 303 int32_t /* number_of_cores */, |
| 296 size_t /* max_payload_size */) { | 304 size_t /* max_payload_size */) { |
| 297 const int kMinWidth = 320; | 305 const int kMinWidth = 320; |
| 298 const int kMinHeight = 180; | 306 const int kMinHeight = 180; |
| 299 const int kLowQpThresholdDenominator = 3; | 307 const int kLowQpThresholdDenominator = 3; |
| 300 if (codec_settings == NULL) { | 308 if (codec_settings == NULL) { |
| 301 ALOGE << "NULL VideoCodec instance"; | 309 ALOGE << "NULL VideoCodec instance"; |
| 302 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 310 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 303 } | 311 } |
| 304 // Factory should guard against other codecs being used with us. | 312 // Factory should guard against other codecs being used with us. |
| 305 RTC_CHECK(codec_settings->codecType == codecType_) | 313 RTC_CHECK(codec_settings->codecType == codecType_) |
| 306 << "Unsupported codec " << codec_settings->codecType << " for " | 314 << "Unsupported codec " << codec_settings->codecType << " for " |
| 307 << codecType_; | 315 << codecType_; |
| 308 | 316 |
| 309 ALOGD << "InitEncode request"; | 317 ALOGD << "InitEncode request"; |
| 310 scale_ = webrtc::field_trial::FindFullName( | 318 scale_ = (codecType_ != kVideoCodecVP9) && (webrtc::field_trial::FindFullName( |
| 311 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"; | 319 "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"); |
| 312 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); | 320 ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled"); |
| 313 if (scale_) { | 321 if (scale_) { |
| 314 if (codecType_ == kVideoCodecVP8) { | 322 if (codecType_ == kVideoCodecVP8) { |
| 315 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the | 323 // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the |
| 316 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is | 324 // (internal) range: [0, 127]. And we cannot change QP_max in HW, so it is |
| 317 // always = 127. Note that in SW, QP is that of the user-level range [0, | 325 // always = 127. Note that in SW, QP is that of the user-level range [0, |
| 318 // 63]. | 326 // 63]. |
| 319 const int kMaxQp = 127; | 327 const int kMaxQp = 127; |
| 320 // TODO(pbos): Investigate whether high-QP thresholds make sense for VP8. | 328 // TODO(pbos): Investigate whether high-QP thresholds make sense for VP8. |
| 321 // This effectively disables high QP as VP8 QP can't go above this | 329 // This effectively disables high QP as VP8 QP can't go above this |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 451 current_encoding_time_ms_ = 0; | 459 current_encoding_time_ms_ = 0; |
| 452 last_input_timestamp_ms_ = -1; | 460 last_input_timestamp_ms_ = -1; |
| 453 last_output_timestamp_ms_ = -1; | 461 last_output_timestamp_ms_ = -1; |
| 454 output_timestamp_ = 0; | 462 output_timestamp_ = 0; |
| 455 output_render_time_ms_ = 0; | 463 output_render_time_ms_ = 0; |
| 456 timestamps_.clear(); | 464 timestamps_.clear(); |
| 457 render_times_ms_.clear(); | 465 render_times_ms_.clear(); |
| 458 frame_rtc_times_ms_.clear(); | 466 frame_rtc_times_ms_.clear(); |
| 459 drop_next_input_frame_ = false; | 467 drop_next_input_frame_ = false; |
| 460 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; | 468 picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; |
| 469 gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); |
| 470 tl0_pic_idx_ = static_cast<uint8_t>(rand()); |
| 471 gof_idx_ = 0; |
| 461 | 472 |
| 462 // We enforce no extra stride/padding in the format creation step. | 473 // We enforce no extra stride/padding in the format creation step. |
| 463 jobject j_video_codec_enum = JavaEnumFromIndex( | 474 jobject j_video_codec_enum = JavaEnumFromIndex( |
| 464 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); | 475 jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); |
| 465 const bool encode_status = jni->CallBooleanMethod( | 476 const bool encode_status = jni->CallBooleanMethod( |
| 466 *j_media_codec_video_encoder_, j_init_encode_method_, | 477 *j_media_codec_video_encoder_, j_init_encode_method_, |
| 467 j_video_codec_enum, width, height, kbps, fps); | 478 j_video_codec_enum, width, height, kbps, fps); |
| 468 if (!encode_status) { | 479 if (!encode_status) { |
| 469 ALOGE << "Failed to configure encoder."; | 480 ALOGE << "Failed to configure encoder."; |
| 470 return WEBRTC_VIDEO_CODEC_ERROR; | 481 return WEBRTC_VIDEO_CODEC_ERROR; |
| (...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 829 memset(&info, 0, sizeof(info)); | 840 memset(&info, 0, sizeof(info)); |
| 830 info.codecType = codecType_; | 841 info.codecType = codecType_; |
| 831 if (codecType_ == kVideoCodecVP8) { | 842 if (codecType_ == kVideoCodecVP8) { |
| 832 info.codecSpecific.VP8.pictureId = picture_id_; | 843 info.codecSpecific.VP8.pictureId = picture_id_; |
| 833 info.codecSpecific.VP8.nonReference = false; | 844 info.codecSpecific.VP8.nonReference = false; |
| 834 info.codecSpecific.VP8.simulcastIdx = 0; | 845 info.codecSpecific.VP8.simulcastIdx = 0; |
| 835 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; | 846 info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx; |
| 836 info.codecSpecific.VP8.layerSync = false; | 847 info.codecSpecific.VP8.layerSync = false; |
| 837 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; | 848 info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx; |
| 838 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx; | 849 info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx; |
| 839 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 850 } else if (codecType_ == kVideoCodecVP9) { |
| 851 if (key_frame) { |
| 852 gof_idx_ = 0; |
| 853 } |
| 854 info.codecSpecific.VP9.picture_id = picture_id_; |
| 855 info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true; |
| 856 info.codecSpecific.VP9.flexible_mode = false; |
| 857 info.codecSpecific.VP9.ss_data_available = key_frame ? true : false; |
| 858 info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++; |
| 859 info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx; |
| 860 info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx; |
| 861 info.codecSpecific.VP9.temporal_up_switch = true; |
| 862 info.codecSpecific.VP9.inter_layer_predicted = false; |
| 863 info.codecSpecific.VP9.gof_idx = |
| 864 static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof); |
| 865 info.codecSpecific.VP9.num_spatial_layers = 1; |
| 866 info.codecSpecific.VP9.spatial_layer_resolution_present = false; |
| 867 if (info.codecSpecific.VP9.ss_data_available) { |
| 868 info.codecSpecific.VP9.spatial_layer_resolution_present = true; |
| 869 info.codecSpecific.VP9.width[0] = width_; |
| 870 info.codecSpecific.VP9.height[0] = height_; |
| 871 info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_); |
| 872 } |
| 840 } | 873 } |
| 874 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
| 841 | 875 |
| 842 // Generate a header describing a single fragment. | 876 // Generate a header describing a single fragment. |
| 843 webrtc::RTPFragmentationHeader header; | 877 webrtc::RTPFragmentationHeader header; |
| 844 memset(&header, 0, sizeof(header)); | 878 memset(&header, 0, sizeof(header)); |
| 845 if (codecType_ == kVideoCodecVP8) { | 879 if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) { |
| 846 header.VerifyAndAllocateFragmentationHeader(1); | 880 header.VerifyAndAllocateFragmentationHeader(1); |
| 847 header.fragmentationOffset[0] = 0; | 881 header.fragmentationOffset[0] = 0; |
| 848 header.fragmentationLength[0] = image->_length; | 882 header.fragmentationLength[0] = image->_length; |
| 849 header.fragmentationPlType[0] = 0; | 883 header.fragmentationPlType[0] = 0; |
| 850 header.fragmentationTimeDiff[0] = 0; | 884 header.fragmentationTimeDiff[0] = 0; |
| 851 if (scale_) { | 885 if (codecType_ == kVideoCodecVP8 && scale_) { |
| 852 int qp; | 886 int qp; |
| 853 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) | 887 if (webrtc::vp8::GetQp(payload, payload_size, &qp)) |
| 854 quality_scaler_.ReportQP(qp); | 888 quality_scaler_.ReportQP(qp); |
| 855 } | 889 } |
| 856 } else if (codecType_ == kVideoCodecH264) { | 890 } else if (codecType_ == kVideoCodecH264) { |
| 857 if (scale_) { | 891 if (scale_) { |
| 858 h264_bitstream_parser_.ParseBitstream(payload, payload_size); | 892 h264_bitstream_parser_.ParseBitstream(payload, payload_size); |
| 859 int qp; | 893 int qp; |
| 860 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) | 894 if (h264_bitstream_parser_.GetLastSliceQp(&qp)) |
| 861 quality_scaler_.ReportQP(qp); | 895 quality_scaler_.ReportQP(qp); |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 966 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( | 1000 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( |
| 967 j_encoder_class, | 1001 j_encoder_class, |
| 968 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); | 1002 GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); |
| 969 CHECK_EXCEPTION(jni); | 1003 CHECK_EXCEPTION(jni); |
| 970 if (is_vp8_hw_supported) { | 1004 if (is_vp8_hw_supported) { |
| 971 ALOGD << "VP8 HW Encoder supported."; | 1005 ALOGD << "VP8 HW Encoder supported."; |
| 972 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8", | 1006 supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8", |
| 973 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | 1007 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); |
| 974 } | 1008 } |
| 975 | 1009 |
| 1010 bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( |
| 1011 j_encoder_class, |
| 1012 GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z")); |
| 1013 CHECK_EXCEPTION(jni); |
| 1014 if (is_vp9_hw_supported) { |
| 1015 ALOGD << "VP9 HW Encoder supported."; |
| 1016 supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9", |
| 1017 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); |
| 1018 } |
| 1019 |
| 976 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( | 1020 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( |
| 977 j_encoder_class, | 1021 j_encoder_class, |
| 978 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); | 1022 GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); |
| 979 CHECK_EXCEPTION(jni); | 1023 CHECK_EXCEPTION(jni); |
| 980 if (is_h264_hw_supported) { | 1024 if (is_h264_hw_supported) { |
| 981 ALOGD << "H.264 HW Encoder supported."; | 1025 ALOGD << "H.264 HW Encoder supported."; |
| 982 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", | 1026 supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", |
| 983 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); | 1027 MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); |
| 984 } | 1028 } |
| 985 } | 1029 } |
| 986 | 1030 |
| 987 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {} | 1031 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {} |
| 988 | 1032 |
| 989 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( | 1033 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( |
| 990 VideoCodecType type) { | 1034 VideoCodecType type) { |
| 991 if (supported_codecs_.empty()) { | 1035 if (supported_codecs_.empty()) { |
| 1036 ALOGW << "No HW video encoder for type " << (int)type; |
| 992 return NULL; | 1037 return NULL; |
| 993 } | 1038 } |
| 994 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); | 1039 for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); |
| 995 it != supported_codecs_.end(); ++it) { | 1040 it != supported_codecs_.end(); ++it) { |
| 996 if (it->type == type) { | 1041 if (it->type == type) { |
| 997 ALOGD << "Create HW video encoder for type " << (int)type << | 1042 ALOGD << "Create HW video encoder for type " << (int)type << |
| 998 " (" << it->name << ")."; | 1043 " (" << it->name << ")."; |
| 999 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type); | 1044 return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type); |
| 1000 } | 1045 } |
| 1001 } | 1046 } |
| 1047 ALOGW << "Can not find HW video encoder for type " << (int)type; |
| 1002 return NULL; | 1048 return NULL; |
| 1003 } | 1049 } |
| 1004 | 1050 |
| 1005 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& | 1051 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>& |
| 1006 MediaCodecVideoEncoderFactory::codecs() const { | 1052 MediaCodecVideoEncoderFactory::codecs() const { |
| 1007 return supported_codecs_; | 1053 return supported_codecs_; |
| 1008 } | 1054 } |
| 1009 | 1055 |
| 1010 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( | 1056 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( |
| 1011 webrtc::VideoEncoder* encoder) { | 1057 webrtc::VideoEncoder* encoder) { |
| 1012 ALOGD << "Destroy video encoder."; | 1058 ALOGD << "Destroy video encoder."; |
| 1013 delete encoder; | 1059 delete encoder; |
| 1014 } | 1060 } |
| 1015 | 1061 |
| 1016 } // namespace webrtc_jni | 1062 } // namespace webrtc_jni |
| 1017 | 1063 |
| OLD | NEW |