| Index: webrtc/video/video_receive_stream.cc
|
| diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
|
| index be623cef56ade6bd4c3f60357bdc7eb47db151ad..dd2faf3e68f867da03d2bbd834d174ed3f166201 100644
|
| --- a/webrtc/video/video_receive_stream.cc
|
| +++ b/webrtc/video/video_receive_stream.cc
|
| @@ -20,6 +20,7 @@
|
| #include "webrtc/base/location.h"
|
| #include "webrtc/base/logging.h"
|
| #include "webrtc/base/optional.h"
|
| +#include "webrtc/base/timeutils.h"
|
| #include "webrtc/base/trace_event.h"
|
| #include "webrtc/common_video/h264/profile_level_id.h"
|
| #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
| @@ -236,8 +237,12 @@ VideoReceiveStream::~VideoReceiveStream() {
|
| LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString();
|
| Stop();
|
|
|
| + LOG(LS_INFO) << "~VideoReceiveStream: DeRegisterModule rtp_stream_sync_.";
|
| process_thread_->DeRegisterModule(&rtp_stream_sync_);
|
| + LOG(LS_INFO) << "~VideoReceiveStream: DeRegisterModule video_receiver_.";
|
| process_thread_->DeRegisterModule(&video_receiver_);
|
| +
|
| + LOG(LS_INFO) << "~VideoReceiveStream: Modules DeRegistered.";
|
| }
|
|
|
| void VideoReceiveStream::SignalNetworkState(NetworkState state) {
|
| @@ -310,29 +315,26 @@ void VideoReceiveStream::Start() {
|
| call_stats_->RegisterStatsObserver(video_stream_decoder_.get());
|
| // Start the decode thread
|
| decode_thread_.Start();
|
| -#if !defined(WEBRTC_ANDROID)
|
| - // On android, the decoding happens on a different thread and frames
|
| - // are delivered on that thread (that in itself needs to be fixed).
|
| - // In any event, the actual decoding work is higher priority than the
|
| - // |decode_thread_| on Android, so we only raise the deocode priority on
|
| - // platforms other than Android.
|
| - decode_thread_.SetPriority(rtc::kHighestPriority);
|
| -#endif
|
| rtp_stream_receiver_.StartReceive();
|
| }
|
|
|
| void VideoReceiveStream::Stop() {
|
| RTC_DCHECK_RUN_ON(&worker_thread_checker_);
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: StopReceive";
|
| rtp_stream_receiver_.StopReceive();
|
| // TriggerDecoderShutdown will release any waiting decoder thread and make it
|
| // stop immediately, instead of waiting for a timeout. Needs to be called
|
| // before joining the decoder thread thread.
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: TriggerDecoderShutdown";
|
| video_receiver_.TriggerDecoderShutdown();
|
|
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: frame_buffer_";
|
| frame_buffer_->Stop();
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: call_stats_";
|
| call_stats_->DeregisterStatsObserver(&rtp_stream_receiver_);
|
|
|
| if (decode_thread_.IsRunning()) {
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: decode_thread_";
|
| decode_thread_.Stop();
|
| // Deregister external decoders so they are no longer running during
|
| // destruction. This effectively stops the VCM since the decoder thread is
|
| @@ -342,10 +344,15 @@ void VideoReceiveStream::Stop() {
|
| video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type);
|
| }
|
|
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: call_stats_";
|
| call_stats_->DeregisterStatsObserver(video_stream_decoder_.get());
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: video_stream_decoder_";
|
| video_stream_decoder_.reset();
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: incoming_video_stream_";
|
| incoming_video_stream_.reset();
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: transport_adapter_";
|
| transport_adapter_.Disable();
|
| + LOG(LS_INFO) << "VideoReceiveStream::Stop: done";
|
| }
|
|
|
| VideoReceiveStream::Stats VideoReceiveStream::GetStats() const {
|
| @@ -477,28 +484,48 @@ void VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) {
|
| video_receiver_.SetMinimumPlayoutDelay(delay_ms);
|
| }
|
|
|
| -bool VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
| - return static_cast<VideoReceiveStream*>(ptr)->Decode();
|
| +void VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
| + while (static_cast<VideoReceiveStream*>(ptr)->Decode()) {
|
| + }
|
| }
|
|
|
| bool VideoReceiveStream::Decode() {
|
| TRACE_EVENT0("webrtc", "VideoReceiveStream::Decode");
|
| static const int kMaxWaitForFrameMs = 3000;
|
| std::unique_ptr<video_coding::FrameObject> frame;
|
| - video_coding::FrameBuffer::ReturnReason res =
|
| - frame_buffer_->NextFrame(kMaxWaitForFrameMs, &frame);
|
| +
|
| + video_coding::FrameBuffer::ReturnReason res;
|
| +#if defined(WEBRTC_ANDROID)
|
| + static const int kPollIntervalMs = 10;
|
| + int time_remaining = kMaxWaitForFrameMs;
|
| + do {
|
| + res = frame_buffer_->NextFrame(kPollIntervalMs, &frame);
|
| + if (res != video_coding::FrameBuffer::ReturnReason::kTimeout)
|
| + break;
|
| + time_remaining -= kPollIntervalMs;
|
| + video_receiver_.PollDecodedFrames();
|
| + } while (time_remaining > 0);
|
| +#else
|
| + res = frame_buffer_->NextFrame(kMaxWaitForFrameMs, &frame);
|
| +#endif
|
|
|
| if (res == video_coding::FrameBuffer::ReturnReason::kStopped)
|
| return false;
|
|
|
| if (frame) {
|
| - if (video_receiver_.Decode(frame.get()) == VCM_OK)
|
| + auto ret = video_receiver_.Decode(frame.get());
|
| + if (ret == VCM_OK) {
|
| rtp_stream_receiver_.FrameDecoded(frame->picture_id);
|
| + } else {
|
| + printf("ret = %i\n", ret);
|
| + }
|
| } else {
|
| + RTC_DCHECK_EQ(res, video_coding::FrameBuffer::ReturnReason::kTimeout);
|
| LOG(LS_WARNING) << "No decodable frame in " << kMaxWaitForFrameMs
|
| << " ms, requesting keyframe.";
|
| RequestKeyFrame();
|
| }
|
| +
|
| return true;
|
| }
|
| } // namespace internal
|
|
|