| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 284 int64_t nextRenderTimeMs; | 284 int64_t nextRenderTimeMs; |
| 285 bool supports_render_scheduling; | 285 bool supports_render_scheduling; |
| 286 { | 286 { |
| 287 CriticalSectionScoped cs(_receiveCritSect); | 287 CriticalSectionScoped cs(_receiveCritSect); |
| 288 supports_render_scheduling = _codecDataBase.SupportsRenderScheduling(); | 288 supports_render_scheduling = _codecDataBase.SupportsRenderScheduling(); |
| 289 } | 289 } |
| 290 | 290 |
| 291 VCMEncodedFrame* frame = _receiver.FrameForDecoding( | 291 VCMEncodedFrame* frame = _receiver.FrameForDecoding( |
| 292 maxWaitTimeMs, nextRenderTimeMs, supports_render_scheduling); | 292 maxWaitTimeMs, nextRenderTimeMs, supports_render_scheduling); |
| 293 | 293 |
| 294 if (frame == NULL) { | 294 if (frame == NULL) |
| 295 return VCM_FRAME_NOT_READY; | 295 return VCM_FRAME_NOT_READY; |
| 296 } else { | |
| 297 CriticalSectionScoped cs(_receiveCritSect); | |
| 298 | 296 |
| 299 // If this frame was too late, we should adjust the delay accordingly | 297 CriticalSectionScoped cs(_receiveCritSect); |
| 300 _timing.UpdateCurrentDelay(frame->RenderTimeMs(), | |
| 301 clock_->TimeInMilliseconds()); | |
| 302 | 298 |
| 303 if (pre_decode_image_callback_) { | 299 // If this frame was too late, we should adjust the delay accordingly |
| 304 EncodedImage encoded_image(frame->EncodedImage()); | 300 _timing.UpdateCurrentDelay(frame->RenderTimeMs(), |
| 305 int qp = -1; | 301 clock_->TimeInMilliseconds()); |
| 306 if (qp_parser_.GetQp(*frame, &qp)) { | 302 |
| 307 encoded_image.qp_ = qp; | 303 if (pre_decode_image_callback_) { |
| 308 } | 304 EncodedImage encoded_image(frame->EncodedImage()); |
| 309 pre_decode_image_callback_->Encoded( | 305 int qp = -1; |
| 310 encoded_image, frame->CodecSpecific(), NULL); | 306 if (qp_parser_.GetQp(*frame, &qp)) { |
| 307 encoded_image.qp_ = qp; |
| 311 } | 308 } |
| 309 pre_decode_image_callback_->Encoded(encoded_image, frame->CodecSpecific(), |
| 310 NULL); |
| 311 } |
| 312 | 312 |
| 313 #ifdef DEBUG_DECODER_BIT_STREAM | 313 #ifdef DEBUG_DECODER_BIT_STREAM |
| 314 if (_bitStreamBeforeDecoder != NULL) { | 314 if (_bitStreamBeforeDecoder != NULL) { |
| 315 // Write bit stream to file for debugging purposes | 315 // Write bit stream to file for debugging purposes |
| 316 if (fwrite( | 316 if (fwrite(frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) != |
| 317 frame->Buffer(), 1, frame->Length(), _bitStreamBeforeDecoder) != | 317 frame->Length()) { |
| 318 frame->Length()) { | 318 return -1; |
| 319 return -1; | |
| 320 } | |
| 321 } | |
| 322 #endif | |
| 323 const int32_t ret = Decode(*frame); | |
| 324 _receiver.ReleaseFrame(frame); | |
| 325 frame = NULL; | |
| 326 if (ret != VCM_OK) { | |
| 327 return ret; | |
| 328 } | 319 } |
| 329 } | 320 } |
| 330 return VCM_OK; | 321 #endif |
| 322 const int32_t ret = Decode(*frame); |
| 323 _receiver.ReleaseFrame(frame); |
| 324 return ret; |
| 331 } | 325 } |
| 332 | 326 |
| 333 int32_t VideoReceiver::RequestSliceLossIndication( | 327 int32_t VideoReceiver::RequestSliceLossIndication( |
| 334 const uint64_t pictureID) const { | 328 const uint64_t pictureID) const { |
| 335 TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID); | 329 TRACE_EVENT1("webrtc", "RequestSLI", "picture_id", pictureID); |
| 336 CriticalSectionScoped cs(process_crit_sect_.get()); | 330 CriticalSectionScoped cs(process_crit_sect_.get()); |
| 337 if (_frameTypeCallback != NULL) { | 331 if (_frameTypeCallback != NULL) { |
| 338 const int32_t ret = | 332 const int32_t ret = |
| 339 _frameTypeCallback->SliceLossIndicationRequest(pictureID); | 333 _frameTypeCallback->SliceLossIndicationRequest(pictureID); |
| 340 if (ret < 0) { | 334 if (ret < 0) { |
| (...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 569 } | 563 } |
| 570 | 564 |
| 571 void VideoReceiver::RegisterPreDecodeImageCallback( | 565 void VideoReceiver::RegisterPreDecodeImageCallback( |
| 572 EncodedImageCallback* observer) { | 566 EncodedImageCallback* observer) { |
| 573 CriticalSectionScoped cs(_receiveCritSect); | 567 CriticalSectionScoped cs(_receiveCritSect); |
| 574 pre_decode_image_callback_ = observer; | 568 pre_decode_image_callback_ = observer; |
| 575 } | 569 } |
| 576 | 570 |
| 577 } // namespace vcm | 571 } // namespace vcm |
| 578 } // namespace webrtc | 572 } // namespace webrtc |
| OLD | NEW |