OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "modules/video_coding/codecs/stereo/include/stereo_decoder_adapter.h" |
| 12 |
| 13 #include "api/video/i420_buffer.h" |
| 14 #include "common_video/include/video_frame.h" |
| 15 #include "common_video/include/video_frame_buffer.h" |
| 16 #include "common_video/libyuv/include/webrtc_libyuv.h" |
| 17 #include "rtc_base/keep_ref_until_done.h" |
| 18 #include "rtc_base/logging.h" |
| 19 |
| 20 namespace webrtc { |
| 21 |
| 22 class StereoDecoderAdapter::AdapterDecodedImageCallback |
| 23 : public webrtc::DecodedImageCallback { |
| 24 public: |
| 25 AdapterDecodedImageCallback(webrtc::StereoDecoderAdapter* adapter, |
| 26 StereoCodecStream stream_idx) |
| 27 : adapter_(adapter), stream_idx_(stream_idx) {} |
| 28 |
| 29 void Decoded(VideoFrame& decodedImage, |
| 30 rtc::Optional<int32_t> decode_time_ms, |
| 31 rtc::Optional<uint8_t> qp) override { |
| 32 if (!adapter_) |
| 33 return; |
| 34 adapter_->Decoded(stream_idx_, decodedImage, decode_time_ms, qp); |
| 35 } |
| 36 int32_t Decoded(VideoFrame& decodedImage) override { |
| 37 RTC_NOTREACHED(); |
| 38 return WEBRTC_VIDEO_CODEC_OK; |
| 39 } |
| 40 int32_t Decoded(VideoFrame& decodedImage, int64_t decode_time_ms) override { |
| 41 RTC_NOTREACHED(); |
| 42 return WEBRTC_VIDEO_CODEC_OK; |
| 43 } |
| 44 |
| 45 private: |
| 46 StereoDecoderAdapter* adapter_; |
| 47 const StereoCodecStream stream_idx_; |
| 48 }; |
| 49 |
| 50 struct StereoDecoderAdapter::DecodedImageData { |
| 51 explicit DecodedImageData(StereoCodecStream stream_idx) |
| 52 : stream_idx_(stream_idx), |
| 53 decodedImage_(I420Buffer::Create(1 /* width */, 1 /* height */), |
| 54 0, |
| 55 0, |
| 56 kVideoRotation_0) { |
| 57 RTC_DCHECK_EQ(kAXXStream, stream_idx); |
| 58 } |
| 59 DecodedImageData(StereoCodecStream stream_idx, |
| 60 const VideoFrame& decodedImage, |
| 61 const rtc::Optional<int32_t>& decode_time_ms, |
| 62 const rtc::Optional<uint8_t>& qp) |
| 63 : stream_idx_(stream_idx), |
| 64 decodedImage_(decodedImage), |
| 65 decode_time_ms_(decode_time_ms), |
| 66 qp_(qp) {} |
| 67 const StereoCodecStream stream_idx_; |
| 68 VideoFrame decodedImage_; |
| 69 const rtc::Optional<int32_t> decode_time_ms_; |
| 70 const rtc::Optional<uint8_t> qp_; |
| 71 |
| 72 private: |
| 73 RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DecodedImageData); |
| 74 }; |
| 75 |
| 76 StereoDecoderAdapter::StereoDecoderAdapter(VideoDecoderFactoryEx* factory) |
| 77 : factory_(factory) {} |
| 78 |
| 79 StereoDecoderAdapter::~StereoDecoderAdapter() { |
| 80 Release(); |
| 81 } |
| 82 |
| 83 int32_t StereoDecoderAdapter::InitDecode(const VideoCodec* codec_settings, |
| 84 int32_t number_of_cores) { |
| 85 VideoCodec settings = *codec_settings; |
| 86 settings.codecType = kVideoCodecVP9; |
| 87 for (size_t i = 0; i < kStereoCodecStreams; ++i) { |
| 88 VideoDecoder* decoder = factory_->Create(); |
| 89 const int32_t rv = decoder->InitDecode(&settings, number_of_cores); |
| 90 if (rv) |
| 91 return rv; |
| 92 decoders_.push_back(decoder); |
| 93 adapter_callbacks_.emplace_back( |
| 94 new StereoDecoderAdapter::AdapterDecodedImageCallback( |
| 95 this, static_cast<StereoCodecStream>(i))); |
| 96 decoder->RegisterDecodeCompleteCallback(adapter_callbacks_.back().get()); |
| 97 } |
| 98 return WEBRTC_VIDEO_CODEC_OK; |
| 99 } |
| 100 |
| 101 int32_t StereoDecoderAdapter::Decode( |
| 102 const EncodedImage& input_image, |
| 103 bool missing_frames, |
| 104 const RTPFragmentationHeader* /*fragmentation*/, |
| 105 const CodecSpecificInfo* codec_specific_info, |
| 106 int64_t render_time_ms) { |
| 107 LOG(LS_ERROR) << __func__; |
| 108 LOG(LS_ERROR) << __func__ |
| 109 << static_cast<int>(codec_specific_info->stereoInfo.frameIndex); |
| 110 LOG(LS_ERROR) << __func__ |
| 111 << static_cast<int>(codec_specific_info->stereoInfo.frameCount); |
| 112 LOG(LS_ERROR) << __func__ |
| 113 << static_cast<int>( |
| 114 codec_specific_info->stereoInfo.pictureIndex); |
| 115 if (codec_specific_info->stereoInfo.frameCount == 1) { |
| 116 RTC_DCHECK(decoded_data_.find(input_image._timeStamp) == |
| 117 decoded_data_.end()); |
| 118 decoded_data_.emplace(std::piecewise_construct, |
| 119 std::forward_as_tuple(input_image._timeStamp), |
| 120 std::forward_as_tuple(kAXXStream)); |
| 121 } |
| 122 |
| 123 int32_t rv = decoders_[codec_specific_info->stereoInfo.frameIndex]->Decode( |
| 124 input_image, missing_frames, nullptr, codec_specific_info, |
| 125 render_time_ms); |
| 126 return rv; |
| 127 } |
| 128 |
| 129 int32_t StereoDecoderAdapter::RegisterDecodeCompleteCallback( |
| 130 DecodedImageCallback* callback) { |
| 131 decoded_complete_callback_ = callback; |
| 132 return WEBRTC_VIDEO_CODEC_OK; |
| 133 } |
| 134 |
| 135 int32_t StereoDecoderAdapter::Release() { |
| 136 for (auto decoder : decoders_) { |
| 137 const int32_t rv = decoder->Release(); |
| 138 if (rv) |
| 139 return rv; |
| 140 factory_->Destroy(decoder); |
| 141 } |
| 142 decoders_.clear(); |
| 143 adapter_callbacks_.clear(); |
| 144 return WEBRTC_VIDEO_CODEC_OK; |
| 145 } |
| 146 |
| 147 void StereoDecoderAdapter::Decoded(StereoCodecStream stream_idx, |
| 148 VideoFrame& decoded_image, |
| 149 rtc::Optional<int32_t> decode_time_ms, |
| 150 rtc::Optional<uint8_t> qp) { |
| 151 const auto& other_decoded_data_it = |
| 152 decoded_data_.find(decoded_image.timestamp()); |
| 153 if (other_decoded_data_it != decoded_data_.end()) { |
| 154 auto& other_image_data = other_decoded_data_it->second; |
| 155 if (stream_idx == kYUVStream) { |
| 156 RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_); |
| 157 MergeDecodedImages( |
| 158 decoded_image, decode_time_ms, qp, other_image_data.decodedImage_, |
| 159 other_image_data.decode_time_ms_, other_image_data.qp_); |
| 160 } else { |
| 161 RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_); |
| 162 RTC_DCHECK_EQ(kAXXStream, stream_idx); |
| 163 MergeDecodedImages(other_image_data.decodedImage_, |
| 164 other_image_data.decode_time_ms_, other_image_data.qp_, |
| 165 decoded_image, decode_time_ms, qp); |
| 166 } |
| 167 decoded_data_.erase(decoded_data_.begin(), other_decoded_data_it); |
| 168 return; |
| 169 } |
| 170 RTC_DCHECK(decoded_data_.find(decoded_image.timestamp()) == |
| 171 decoded_data_.end()); |
| 172 decoded_data_.emplace( |
| 173 std::piecewise_construct, |
| 174 std::forward_as_tuple(decoded_image.timestamp()), |
| 175 std::forward_as_tuple(stream_idx, decoded_image, decode_time_ms, qp)); |
| 176 } |
| 177 |
| 178 void StereoDecoderAdapter::MergeDecodedImages( |
| 179 VideoFrame& decodedImage, |
| 180 const rtc::Optional<int32_t>& decode_time_ms, |
| 181 const rtc::Optional<uint8_t>& qp, |
| 182 VideoFrame& alpha_decodedImage, |
| 183 const rtc::Optional<int32_t>& stereo_decode_time_ms, |
| 184 const rtc::Optional<uint8_t>& stereo_qp) { |
| 185 if (!alpha_decodedImage.timestamp()) { |
| 186 decoded_complete_callback_->Decoded(decodedImage, decode_time_ms, qp); |
| 187 return; |
| 188 } |
| 189 rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer = |
| 190 alpha_decodedImage.video_frame_buffer()->ToI420(); |
| 191 rtc::scoped_refptr<WrappedI420ABuffer> wrapped_buffer( |
| 192 new rtc::RefCountedObject<webrtc::WrappedI420ABuffer>( |
| 193 decodedImage.video_frame_buffer(), alpha_buffer->DataY(), |
| 194 alpha_buffer->StrideY(), |
| 195 rtc::KeepRefUntilDone(alpha_decodedImage.video_frame_buffer()))); |
| 196 VideoFrame wrapped_image(wrapped_buffer, decodedImage.timestamp(), |
| 197 0 /* render_time_ms */, decodedImage.rotation()); |
| 198 decoded_complete_callback_->Decoded(wrapped_image, decode_time_ms, qp); |
| 199 } |
| 200 |
| 201 } // namespace webrtc |
OLD | NEW |