| Index: webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
|
| diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
|
| index a8ff7530fc9e7cf9efb3d9b1795eb35f9ebd3798..7c40dac53dda6b25a4aba93a282d3fb0eb6db6ac 100644
|
| --- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
|
| +++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc
|
| @@ -50,18 +50,8 @@ using rtc::Bind;
|
| using rtc::Thread;
|
| using rtc::ThreadManager;
|
|
|
| -using webrtc::CodecSpecificInfo;
|
| -using webrtc::EncodedImage;
|
| -using webrtc::VideoFrame;
|
| -using webrtc::RTPFragmentationHeader;
|
| -using webrtc::VideoCodec;
|
| -using webrtc::VideoCodecType;
|
| -using webrtc::kVideoCodecH264;
|
| -using webrtc::kVideoCodecVP8;
|
| -using webrtc::kVideoCodecVP9;
|
| -using webrtc::QualityScaler;
|
| -
|
| -namespace webrtc_jni {
|
| +namespace webrtc {
|
| +namespace jni {
|
|
|
| // Maximum supported HW video encoder fps.
|
| #define MAX_VIDEO_FPS 30
|
| @@ -93,32 +83,32 @@ const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile";
|
| const char kCustomQPThresholdsFieldTrial[] = "WebRTC-CustomQPThresholds";
|
| } // namespace
|
|
|
| -// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
|
| +// MediaCodecVideoEncoder is a VideoEncoder implementation that uses
|
| // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
|
| // HW-backed video encode. This C++ class is implemented as a very thin shim,
|
| // delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
|
| // MediaCodecVideoEncoder must be operated on a single task queue, currently
|
| // this is the encoder queue from ViE encoder.
|
| -class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| +class MediaCodecVideoEncoder : public VideoEncoder {
|
| public:
|
| virtual ~MediaCodecVideoEncoder();
|
| MediaCodecVideoEncoder(JNIEnv* jni,
|
| const cricket::VideoCodec& codec,
|
| jobject egl_context);
|
|
|
| - // webrtc::VideoEncoder implementation.
|
| - int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
|
| + // VideoEncoder implementation.
|
| + int32_t InitEncode(const VideoCodec* codec_settings,
|
| int32_t /* number_of_cores */,
|
| size_t /* max_payload_size */) override;
|
| - int32_t Encode(const webrtc::VideoFrame& input_image,
|
| - const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
| - const std::vector<webrtc::FrameType>* frame_types) override;
|
| + int32_t Encode(const VideoFrame& input_image,
|
| + const CodecSpecificInfo* /* codec_specific_info */,
|
| + const std::vector<FrameType>* frame_types) override;
|
| int32_t RegisterEncodeCompleteCallback(
|
| - webrtc::EncodedImageCallback* callback) override;
|
| + EncodedImageCallback* callback) override;
|
| int32_t Release() override;
|
| int32_t SetChannelParameters(uint32_t /* packet_loss */,
|
| int64_t /* rtt */) override;
|
| - int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation,
|
| + int32_t SetRateAllocation(const BitrateAllocation& rate_allocation,
|
| uint32_t frame_rate) override;
|
|
|
| bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
|
| @@ -170,19 +160,17 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| // Reconfigure to match |frame| in width, height. Also reconfigures the
|
| // encoder if |frame| is a texture/byte buffer and the encoder is initialized
|
| // for byte buffer/texture. Returns false if reconfiguring fails.
|
| - bool MaybeReconfigureEncoder(JNIEnv* jni, const webrtc::VideoFrame& frame);
|
| + bool MaybeReconfigureEncoder(JNIEnv* jni, const VideoFrame& frame);
|
|
|
| // Returns true if the frame is a texture frame and we should use surface
|
| // based encoding.
|
| - bool IsTextureFrame(JNIEnv* jni, const webrtc::VideoFrame& frame);
|
| + bool IsTextureFrame(JNIEnv* jni, const VideoFrame& frame);
|
|
|
| bool EncodeByteBuffer(JNIEnv* jni,
|
| bool key_frame,
|
| - const webrtc::VideoFrame& frame,
|
| + const VideoFrame& frame,
|
| int input_buffer_index);
|
| - bool EncodeTexture(JNIEnv* jni,
|
| - bool key_frame,
|
| - const webrtc::VideoFrame& frame);
|
| + bool EncodeTexture(JNIEnv* jni, bool key_frame, const VideoFrame& frame);
|
| // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture
|
| // frame.
|
| bool EncodeJavaFrame(JNIEnv* jni,
|
| @@ -219,7 +207,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| // Type of video codec.
|
| const cricket::VideoCodec codec_;
|
|
|
| - webrtc::EncodedImageCallback* callback_;
|
| + EncodedImageCallback* callback_;
|
|
|
| // State that is constant for the lifetime of this object once the ctor
|
| // returns.
|
| @@ -273,7 +261,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| InputFrameInfo(int64_t encode_start_time,
|
| int32_t frame_timestamp,
|
| int64_t frame_render_time_ms,
|
| - webrtc::VideoRotation rotation)
|
| + VideoRotation rotation)
|
| : encode_start_time(encode_start_time),
|
| frame_timestamp(frame_timestamp),
|
| frame_render_time_ms(frame_render_time_ms),
|
| @@ -284,15 +272,15 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| // Input frame information.
|
| const int32_t frame_timestamp;
|
| const int64_t frame_render_time_ms;
|
| - const webrtc::VideoRotation rotation;
|
| + const VideoRotation rotation;
|
| };
|
| std::list<InputFrameInfo> input_frame_infos_;
|
| int32_t output_timestamp_; // Last output frame timestamp from
|
| // |input_frame_infos_|.
|
| int64_t output_render_time_ms_; // Last output frame render time from
|
| // |input_frame_infos_|.
|
| - webrtc::VideoRotation output_rotation_; // Last output frame rotation from
|
| - // |input_frame_infos_|.
|
| + VideoRotation output_rotation_; // Last output frame rotation from
|
| + // |input_frame_infos_|.
|
|
|
| // Frame size in bytes fed to MediaCodec.
|
| int yuv_size_;
|
| @@ -300,14 +288,14 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| // value and the next Encode() call being ignored.
|
| bool drop_next_input_frame_;
|
| bool scale_;
|
| - webrtc::H264::Profile profile_;
|
| + H264::Profile profile_;
|
| // Global references; must be deleted in Release().
|
| std::vector<jobject> input_buffers_;
|
| - webrtc::H264BitstreamParser h264_bitstream_parser_;
|
| + H264BitstreamParser h264_bitstream_parser_;
|
|
|
| // VP9 variables to populate codec specific structure.
|
| - webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
|
| - // non-flexible VP9 mode.
|
| + GofInfoVP9 gof_; // Contains each frame's temporal information for
|
| + // non-flexible VP9 mode.
|
| size_t gof_idx_;
|
|
|
| // EGL context - owned by factory, should not be allocated/destroyed
|
| @@ -319,7 +307,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
|
| // corresponding to a large image change).
|
| int64_t last_frame_received_ms_;
|
| int frames_received_since_last_key_;
|
| - webrtc::VideoCodecMode codec_mode_;
|
| + VideoCodecMode codec_mode_;
|
|
|
| // RTP state.
|
| uint16_t picture_id_;
|
| @@ -412,15 +400,14 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
|
| ProcessHWError(true /* reset_if_fallback_unavailable */);
|
| }
|
|
|
| - webrtc::Random random(rtc::TimeMicros());
|
| + Random random(rtc::TimeMicros());
|
| picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
|
| tl0_pic_idx_ = random.Rand<uint8_t>();
|
| }
|
|
|
| -int32_t MediaCodecVideoEncoder::InitEncode(
|
| - const webrtc::VideoCodec* codec_settings,
|
| - int32_t /* number_of_cores */,
|
| - size_t /* max_payload_size */) {
|
| +int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings,
|
| + int32_t /* number_of_cores */,
|
| + size_t /* max_payload_size */) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| if (codec_settings == NULL) {
|
| ALOGE << "NULL VideoCodec instance";
|
| @@ -452,10 +439,10 @@ int32_t MediaCodecVideoEncoder::InitEncode(
|
| ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
|
|
|
| // Check allowed H.264 profile
|
| - profile_ = webrtc::H264::Profile::kProfileBaseline;
|
| + profile_ = H264::Profile::kProfileBaseline;
|
| if (codec_type == kVideoCodecH264) {
|
| - const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
|
| - webrtc::H264::ParseSdpProfileLevelId(codec_.params);
|
| + const rtc::Optional<H264::ProfileLevelId> profile_level_id =
|
| + H264::ParseSdpProfileLevelId(codec_.params);
|
| RTC_DCHECK(profile_level_id);
|
| profile_ = profile_level_id->profile;
|
| ALOGD << "H.264 profile: " << profile_;
|
| @@ -553,7 +540,7 @@ int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() {
|
| }
|
|
|
| VideoCodecType MediaCodecVideoEncoder::GetCodecType() const {
|
| - return webrtc::PayloadStringToCodecType(codec_.name);
|
| + return PayloadStringToCodecType(codec_.name);
|
| }
|
|
|
| int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
|
| @@ -602,7 +589,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
|
| input_frame_infos_.clear();
|
| drop_next_input_frame_ = false;
|
| use_surface_ = use_surface;
|
| - gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
|
| + gof_.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
|
| gof_idx_ = 0;
|
| last_frame_received_ms_ = -1;
|
| frames_received_since_last_key_ = kMinKeyFrameInterval;
|
| @@ -686,9 +673,9 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
|
| }
|
|
|
| int32_t MediaCodecVideoEncoder::Encode(
|
| - const webrtc::VideoFrame& frame,
|
| - const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
| - const std::vector<webrtc::FrameType>* frame_types) {
|
| + const VideoFrame& frame,
|
| + const CodecSpecificInfo* /* codec_specific_info */,
|
| + const std::vector<FrameType>* frame_types) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| if (sw_fallback_required_)
|
| return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
|
| @@ -701,7 +688,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| }
|
|
|
| bool send_key_frame = false;
|
| - if (codec_mode_ == webrtc::kRealtimeVideo) {
|
| + if (codec_mode_ == kRealtimeVideo) {
|
| ++frames_received_since_last_key_;
|
| int64_t now_ms = rtc::TimeMillis();
|
| if (last_frame_received_ms_ != -1 &&
|
| @@ -761,8 +748,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| }
|
| consecutive_full_queue_frame_drops_ = 0;
|
|
|
| - rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
|
| - frame.video_frame_buffer());
|
| + rtc::scoped_refptr<VideoFrameBuffer> input_buffer(frame.video_frame_buffer());
|
|
|
| VideoFrame input_frame(input_buffer, frame.timestamp(),
|
| frame.render_time_ms(), frame.rotation());
|
| @@ -773,7 +759,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| }
|
|
|
| const bool key_frame =
|
| - frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
|
| + frame_types->front() != kVideoFrameDelta || send_key_frame;
|
| bool encode_status = true;
|
|
|
| int j_input_buffer_index = -1;
|
| @@ -803,7 +789,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| }
|
|
|
| if (input_frame.video_frame_buffer()->type() !=
|
| - webrtc::VideoFrameBuffer::Type::kNative) {
|
| + VideoFrameBuffer::Type::kNative) {
|
| encode_status =
|
| EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index);
|
| } else {
|
| @@ -852,9 +838,8 @@ int32_t MediaCodecVideoEncoder::Encode(
|
| return WEBRTC_VIDEO_CODEC_OK;
|
| }
|
|
|
| -bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(
|
| - JNIEnv* jni,
|
| - const webrtc::VideoFrame& frame) {
|
| +bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(JNIEnv* jni,
|
| + const VideoFrame& frame) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
|
|
| bool is_texture = IsTextureFrame(jni, frame);
|
| @@ -888,9 +873,8 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(
|
| }
|
|
|
| bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni,
|
| - const webrtc::VideoFrame& frame) {
|
| - if (frame.video_frame_buffer()->type() !=
|
| - webrtc::VideoFrameBuffer::Type::kNative) {
|
| + const VideoFrame& frame) {
|
| + if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) {
|
| return false;
|
| }
|
|
|
| @@ -911,12 +895,12 @@ bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni,
|
|
|
| bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
|
| bool key_frame,
|
| - const webrtc::VideoFrame& frame,
|
| + const VideoFrame& frame,
|
| int input_buffer_index) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| RTC_CHECK(!use_surface_);
|
|
|
| - rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
|
| + rtc::scoped_refptr<I420BufferInterface> i420_buffer =
|
| frame.video_frame_buffer()->ToI420();
|
| if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(),
|
| i420_buffer->StrideY(), i420_buffer->DataU(),
|
| @@ -962,7 +946,7 @@ bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni,
|
|
|
| bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
|
| bool key_frame,
|
| - const webrtc::VideoFrame& frame) {
|
| + const VideoFrame& frame) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| RTC_CHECK(use_surface_);
|
| NativeHandleImpl handle =
|
| @@ -997,7 +981,7 @@ bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni,
|
| }
|
|
|
| int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
|
| - webrtc::EncodedImageCallback* callback) {
|
| + EncodedImageCallback* callback) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
| ScopedLocalRefFrame local_ref_frame(jni);
|
| @@ -1038,7 +1022,7 @@ int32_t MediaCodecVideoEncoder::Release() {
|
| }
|
|
|
| int32_t MediaCodecVideoEncoder::SetRateAllocation(
|
| - const webrtc::BitrateAllocation& rate_allocation,
|
| + const BitrateAllocation& rate_allocation,
|
| uint32_t frame_rate) {
|
| RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
|
| const uint32_t new_bit_rate = rate_allocation.get_sum_kbps();
|
| @@ -1147,35 +1131,33 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
|
|
| // Callback - return encoded frame.
|
| const VideoCodecType codec_type = GetCodecType();
|
| - webrtc::EncodedImageCallback::Result callback_result(
|
| - webrtc::EncodedImageCallback::Result::OK);
|
| + EncodedImageCallback::Result callback_result(
|
| + EncodedImageCallback::Result::OK);
|
| if (callback_) {
|
| - std::unique_ptr<webrtc::EncodedImage> image(
|
| - new webrtc::EncodedImage(payload, payload_size, payload_size));
|
| + std::unique_ptr<EncodedImage> image(
|
| + new EncodedImage(payload, payload_size, payload_size));
|
| image->_encodedWidth = width_;
|
| image->_encodedHeight = height_;
|
| image->_timeStamp = output_timestamp_;
|
| image->capture_time_ms_ = output_render_time_ms_;
|
| image->rotation_ = output_rotation_;
|
| - image->content_type_ =
|
| - (codec_mode_ == webrtc::VideoCodecMode::kScreensharing)
|
| - ? webrtc::VideoContentType::SCREENSHARE
|
| - : webrtc::VideoContentType::UNSPECIFIED;
|
| - image->timing_.flags = webrtc::TimingFrameFlags::kInvalid;
|
| - image->_frameType =
|
| - (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
|
| + image->content_type_ = (codec_mode_ == VideoCodecMode::kScreensharing)
|
| + ? VideoContentType::SCREENSHARE
|
| + : VideoContentType::UNSPECIFIED;
|
| + image->timing_.flags = TimingFrameFlags::kInvalid;
|
| + image->_frameType = (key_frame ? kVideoFrameKey : kVideoFrameDelta);
|
| image->_completeFrame = true;
|
| - webrtc::CodecSpecificInfo info;
|
| + CodecSpecificInfo info;
|
| memset(&info, 0, sizeof(info));
|
| info.codecType = codec_type;
|
| if (codec_type == kVideoCodecVP8) {
|
| info.codecSpecific.VP8.pictureId = picture_id_;
|
| info.codecSpecific.VP8.nonReference = false;
|
| info.codecSpecific.VP8.simulcastIdx = 0;
|
| - info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
|
| + info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
|
| info.codecSpecific.VP8.layerSync = false;
|
| - info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
|
| - info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
|
| + info.codecSpecific.VP8.tl0PicIdx = kNoTl0PicIdx;
|
| + info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
|
| } else if (codec_type == kVideoCodecVP9) {
|
| if (key_frame) {
|
| gof_idx_ = 0;
|
| @@ -1185,8 +1167,8 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| info.codecSpecific.VP9.flexible_mode = false;
|
| info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
|
| info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
|
| - info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
|
| - info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
|
| + info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
|
| + info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
|
| info.codecSpecific.VP9.temporal_up_switch = true;
|
| info.codecSpecific.VP9.inter_layer_predicted = false;
|
| info.codecSpecific.VP9.gof_idx =
|
| @@ -1203,7 +1185,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| picture_id_ = (picture_id_ + 1) & 0x7FFF;
|
|
|
| // Generate a header describing a single fragment.
|
| - webrtc::RTPFragmentationHeader header;
|
| + RTPFragmentationHeader header;
|
| memset(&header, 0, sizeof(header));
|
| if (codec_type == kVideoCodecVP8 || codec_type == kVideoCodecVP9) {
|
| header.VerifyAndAllocateFragmentationHeader(1);
|
| @@ -1213,13 +1195,13 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| header.fragmentationTimeDiff[0] = 0;
|
| if (codec_type == kVideoCodecVP8) {
|
| int qp;
|
| - if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
|
| + if (vp8::GetQp(payload, payload_size, &qp)) {
|
| current_acc_qp_ += qp;
|
| image->qp_ = qp;
|
| }
|
| } else if (codec_type == kVideoCodecVP9) {
|
| int qp;
|
| - if (webrtc::vp9::GetQp(payload, payload_size, &qp)) {
|
| + if (vp9::GetQp(payload, payload_size, &qp)) {
|
| current_acc_qp_ += qp;
|
| image->qp_ = qp;
|
| }
|
| @@ -1232,8 +1214,8 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
| image->qp_ = qp;
|
| }
|
| // For H.264 search for start codes.
|
| - const std::vector<webrtc::H264::NaluIndex> nalu_idxs =
|
| - webrtc::H264::FindNaluIndices(payload, payload_size);
|
| + const std::vector<H264::NaluIndex> nalu_idxs =
|
| + H264::FindNaluIndices(payload, payload_size);
|
| if (nalu_idxs.empty()) {
|
| ALOGE << "Start code is not found!";
|
| ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
|
| @@ -1316,12 +1298,12 @@ void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
|
| }
|
| }
|
|
|
| -webrtc::VideoEncoder::ScalingSettings
|
| -MediaCodecVideoEncoder::GetScalingSettings() const {
|
| - if (webrtc::field_trial::IsEnabled(kCustomQPThresholdsFieldTrial)) {
|
| +VideoEncoder::ScalingSettings MediaCodecVideoEncoder::GetScalingSettings()
|
| + const {
|
| + if (field_trial::IsEnabled(kCustomQPThresholdsFieldTrial)) {
|
| const VideoCodecType codec_type = GetCodecType();
|
| std::string experiment_string =
|
| - webrtc::field_trial::FindFullName(kCustomQPThresholdsFieldTrial);
|
| + field_trial::FindFullName(kCustomQPThresholdsFieldTrial);
|
| ALOGD << "QP custom thresholds: " << experiment_string << " for codec "
|
| << codec_type;
|
| int low_vp8_qp_threshold;
|
| @@ -1391,11 +1373,11 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
|
| // TODO(magjed): Enumerate actual level instead of using hardcoded level
|
| // 3.1. Level 3.1 is 1280x720@30fps which is enough for now.
|
| cricket::VideoCodec constrained_high(cricket::kH264CodecName);
|
| - const webrtc::H264::ProfileLevelId constrained_high_profile(
|
| - webrtc::H264::kProfileConstrainedHigh, webrtc::H264::kLevel3_1);
|
| + const H264::ProfileLevelId constrained_high_profile(
|
| + H264::kProfileConstrainedHigh, H264::kLevel3_1);
|
| constrained_high.SetParam(
|
| cricket::kH264FmtpProfileLevelId,
|
| - *webrtc::H264::ProfileLevelIdToString(constrained_high_profile));
|
| + *H264::ProfileLevelIdToString(constrained_high_profile));
|
| constrained_high.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
|
| constrained_high.SetParam(cricket::kH264FmtpPacketizationMode, "1");
|
| supported_codecs_with_h264_hp_.push_back(constrained_high);
|
| @@ -1412,11 +1394,11 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
|
| // profile as long as we have decode support for it and still send Baseline
|
| // since Baseline is a subset of the High profile.
|
| cricket::VideoCodec constrained_baseline(cricket::kH264CodecName);
|
| - const webrtc::H264::ProfileLevelId constrained_baseline_profile(
|
| - webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel3_1);
|
| + const H264::ProfileLevelId constrained_baseline_profile(
|
| + H264::kProfileConstrainedBaseline, H264::kLevel3_1);
|
| constrained_baseline.SetParam(
|
| cricket::kH264FmtpProfileLevelId,
|
| - *webrtc::H264::ProfileLevelIdToString(constrained_baseline_profile));
|
| + *H264::ProfileLevelIdToString(constrained_baseline_profile));
|
| constrained_baseline.SetParam(cricket::kH264FmtpLevelAsymmetryAllowed, "1");
|
| constrained_baseline.SetParam(cricket::kH264FmtpPacketizationMode, "1");
|
| supported_codecs_.push_back(constrained_baseline);
|
| @@ -1445,7 +1427,7 @@ void MediaCodecVideoEncoderFactory::SetEGLContext(
|
| }
|
| }
|
|
|
| -webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
|
| +VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
|
| const cricket::VideoCodec& codec) {
|
| if (supported_codecs().empty()) {
|
| ALOGW << "No HW video encoder for codec " << codec.name;
|
| @@ -1463,15 +1445,14 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
|
|
|
| const std::vector<cricket::VideoCodec>&
|
| MediaCodecVideoEncoderFactory::supported_codecs() const {
|
| - if (webrtc::field_trial::IsEnabled(kH264HighProfileFieldTrial)) {
|
| + if (field_trial::IsEnabled(kH264HighProfileFieldTrial)) {
|
| return supported_codecs_with_h264_hp_;
|
| } else {
|
| return supported_codecs_;
|
| }
|
| }
|
|
|
| -void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
| - webrtc::VideoEncoder* encoder) {
|
| +void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(VideoEncoder* encoder) {
|
| ALOGD << "Destroy video encoder.";
|
| delete encoder;
|
| }
|
| @@ -1500,4 +1481,5 @@ JNI_FUNCTION_DECLARATION(void,
|
| stride_u, buffer_v, stride_v);
|
| }
|
|
|
| -} // namespace webrtc_jni
|
| +} // namespace jni
|
| +} // namespace webrtc
|
|
|