Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(244)

Side by Side Diff: talk/app/webrtc/java/jni/androidmediadecoder_jni.cc

Issue 1396653002: Use WebRTC logging in MediaCodec JNI code. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc@master
Patch Set: i Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2015 Google Inc. 3 * Copyright 2015 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
233 jni, j_decoder_decoded_byte_buffer_class, "index", "I"); 233 jni, j_decoder_decoded_byte_buffer_class, "index", "I");
234 j_info_offset_field_ = GetFieldID( 234 j_info_offset_field_ = GetFieldID(
235 jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); 235 jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
236 j_info_size_field_ = GetFieldID( 236 j_info_size_field_ = GetFieldID(
237 jni, j_decoder_decoded_byte_buffer_class, "size", "I"); 237 jni, j_decoder_decoded_byte_buffer_class, "size", "I");
238 j_info_presentation_timestamp_us_field_ = GetFieldID( 238 j_info_presentation_timestamp_us_field_ = GetFieldID(
239 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); 239 jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
240 240
241 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; 241 CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
242 use_surface_ = (render_egl_context_ != NULL); 242 use_surface_ = (render_egl_context_ != NULL);
243 ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); 243 ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
244 memset(&codec_, 0, sizeof(codec_)); 244 memset(&codec_, 0, sizeof(codec_));
245 AllowBlockingCalls(); 245 AllowBlockingCalls();
246 } 246 }
247 247
248 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { 248 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
249 // Call Release() to ensure no more callbacks to us after we are deleted. 249 // Call Release() to ensure no more callbacks to us after we are deleted.
250 Release(); 250 Release();
251 } 251 }
252 252
253 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, 253 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
254 int32_t numberOfCores) { 254 int32_t numberOfCores) {
255 ALOGD("InitDecode."); 255 ALOGD << "InitDecode.";
256 if (inst == NULL) { 256 if (inst == NULL) {
257 ALOGE("NULL VideoCodec instance"); 257 ALOGE << "NULL VideoCodec instance";
258 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 258 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
259 } 259 }
260 // Factory should guard against other codecs being used with us. 260 // Factory should guard against other codecs being used with us.
261 RTC_CHECK(inst->codecType == codecType_) 261 RTC_CHECK(inst->codecType == codecType_)
262 << "Unsupported codec " << inst->codecType << " for " << codecType_; 262 << "Unsupported codec " << inst->codecType << " for " << codecType_;
263 263
264 if (sw_fallback_required_) { 264 if (sw_fallback_required_) {
265 ALOGE("InitDecode() - fallback to SW decoder"); 265 ALOGE << "InitDecode() - fallback to SW decoder";
266 return WEBRTC_VIDEO_CODEC_OK; 266 return WEBRTC_VIDEO_CODEC_OK;
267 } 267 }
268 // Save VideoCodec instance for later. 268 // Save VideoCodec instance for later.
269 if (&codec_ != inst) { 269 if (&codec_ != inst) {
270 codec_ = *inst; 270 codec_ = *inst;
271 } 271 }
272 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1; 272 codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
273 273
274 // Call Java init. 274 // Call Java init.
275 return codec_thread_->Invoke<int32_t>( 275 return codec_thread_->Invoke<int32_t>(
276 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this)); 276 Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
277 } 277 }
278 278
279 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { 279 int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
280 CheckOnCodecThread(); 280 CheckOnCodecThread();
281 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 281 JNIEnv* jni = AttachCurrentThreadIfNeeded();
282 ScopedLocalRefFrame local_ref_frame(jni); 282 ScopedLocalRefFrame local_ref_frame(jni);
283 ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d.", 283 ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
284 (int)codecType_, codec_.width, codec_.height, 284 << codec_.width << " x " << codec_.height << ". Fps: " <<
285 codec_.maxFramerate); 285 (int)codec_.maxFramerate;
286 286
287 // Release previous codec first if it was allocated before. 287 // Release previous codec first if it was allocated before.
288 int ret_val = ReleaseOnCodecThread(); 288 int ret_val = ReleaseOnCodecThread();
289 if (ret_val < 0) { 289 if (ret_val < 0) {
290 ALOGE("Release failure: %d - fallback to SW codec", ret_val); 290 ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
291 sw_fallback_required_ = true; 291 sw_fallback_required_ = true;
292 return WEBRTC_VIDEO_CODEC_ERROR; 292 return WEBRTC_VIDEO_CODEC_ERROR;
293 } 293 }
294 294
295 // Always start with a complete key frame. 295 // Always start with a complete key frame.
296 key_frame_required_ = true; 296 key_frame_required_ = true;
297 frames_received_ = 0; 297 frames_received_ = 0;
298 frames_decoded_ = 0; 298 frames_decoded_ = 0;
299 299
300 if (use_surface_) { 300 if (use_surface_) {
301 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>( 301 surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
302 jni, render_egl_context_); 302 jni, render_egl_context_);
303 } 303 }
304 304
305 jobject j_video_codec_enum = JavaEnumFromIndex( 305 jobject j_video_codec_enum = JavaEnumFromIndex(
306 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); 306 jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
307 bool success = jni->CallBooleanMethod( 307 bool success = jni->CallBooleanMethod(
308 *j_media_codec_video_decoder_, 308 *j_media_codec_video_decoder_,
309 j_init_decode_method_, 309 j_init_decode_method_,
310 j_video_codec_enum, 310 j_video_codec_enum,
311 codec_.width, 311 codec_.width,
312 codec_.height, 312 codec_.height,
313 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() 313 use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
314 : nullptr); 314 : nullptr);
315 if (CheckException(jni) || !success) { 315 if (CheckException(jni) || !success) {
316 ALOGE("Codec initialization error - fallback to SW codec."); 316 ALOGE << "Codec initialization error - fallback to SW codec.";
317 sw_fallback_required_ = true; 317 sw_fallback_required_ = true;
318 return WEBRTC_VIDEO_CODEC_ERROR; 318 return WEBRTC_VIDEO_CODEC_ERROR;
319 } 319 }
320 inited_ = true; 320 inited_ = true;
321 321
322 switch (codecType_) { 322 switch (codecType_) {
323 case kVideoCodecVP8: 323 case kVideoCodecVP8:
324 max_pending_frames_ = kMaxPendingFramesVp8; 324 max_pending_frames_ = kMaxPendingFramesVp8;
325 break; 325 break;
326 case kVideoCodecH264: 326 case kVideoCodecH264:
(...skipping 11 matching lines...) Expand all
338 frame_rtc_times_ms_.clear(); 338 frame_rtc_times_ms_.clear();
339 339
340 jobjectArray input_buffers = (jobjectArray)GetObjectField( 340 jobjectArray input_buffers = (jobjectArray)GetObjectField(
341 jni, *j_media_codec_video_decoder_, j_input_buffers_field_); 341 jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
342 size_t num_input_buffers = jni->GetArrayLength(input_buffers); 342 size_t num_input_buffers = jni->GetArrayLength(input_buffers);
343 input_buffers_.resize(num_input_buffers); 343 input_buffers_.resize(num_input_buffers);
344 for (size_t i = 0; i < num_input_buffers; ++i) { 344 for (size_t i = 0; i < num_input_buffers; ++i) {
345 input_buffers_[i] = 345 input_buffers_[i] =
346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); 346 jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
347 if (CheckException(jni)) { 347 if (CheckException(jni)) {
348 ALOGE("NewGlobalRef error - fallback to SW codec."); 348 ALOGE << "NewGlobalRef error - fallback to SW codec.";
349 sw_fallback_required_ = true; 349 sw_fallback_required_ = true;
350 return WEBRTC_VIDEO_CODEC_ERROR; 350 return WEBRTC_VIDEO_CODEC_ERROR;
351 } 351 }
352 } 352 }
353 353
354 codec_thread_->PostDelayed(kMediaCodecPollMs, this); 354 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
355 355
356 return WEBRTC_VIDEO_CODEC_OK; 356 return WEBRTC_VIDEO_CODEC_OK;
357 } 357 }
358 358
359 int32_t MediaCodecVideoDecoder::Release() { 359 int32_t MediaCodecVideoDecoder::Release() {
360 ALOGD("DecoderRelease request"); 360 ALOGD << "DecoderRelease request";
361 return codec_thread_->Invoke<int32_t>( 361 return codec_thread_->Invoke<int32_t>(
362 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); 362 Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
363 } 363 }
364 364
365 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { 365 int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
366 if (!inited_) { 366 if (!inited_) {
367 return WEBRTC_VIDEO_CODEC_OK; 367 return WEBRTC_VIDEO_CODEC_OK;
368 } 368 }
369 CheckOnCodecThread(); 369 CheckOnCodecThread();
370 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 370 JNIEnv* jni = AttachCurrentThreadIfNeeded();
371 ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_); 371 ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_;
372 ScopedLocalRefFrame local_ref_frame(jni); 372 ScopedLocalRefFrame local_ref_frame(jni);
373 for (size_t i = 0; i < input_buffers_.size(); i++) { 373 for (size_t i = 0; i < input_buffers_.size(); i++) {
374 jni->DeleteGlobalRef(input_buffers_[i]); 374 jni->DeleteGlobalRef(input_buffers_[i]);
375 } 375 }
376 input_buffers_.clear(); 376 input_buffers_.clear();
377 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); 377 jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
378 surface_texture_helper_ = nullptr; 378 surface_texture_helper_ = nullptr;
379 inited_ = false; 379 inited_ = false;
380 rtc::MessageQueueManager::Clear(this); 380 rtc::MessageQueueManager::Clear(this);
381 if (CheckException(jni)) { 381 if (CheckException(jni)) {
382 ALOGE("Decoder release exception"); 382 ALOGE << "Decoder release exception";
383 return WEBRTC_VIDEO_CODEC_ERROR; 383 return WEBRTC_VIDEO_CODEC_ERROR;
384 } 384 }
385 ALOGD << "DecoderReleaseOnCodecThread done";
385 return WEBRTC_VIDEO_CODEC_OK; 386 return WEBRTC_VIDEO_CODEC_OK;
386 } 387 }
387 388
388 void MediaCodecVideoDecoder::CheckOnCodecThread() { 389 void MediaCodecVideoDecoder::CheckOnCodecThread() {
389 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread()) 390 RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
390 << "Running on wrong thread!"; 391 << "Running on wrong thread!";
391 } 392 }
392 393
393 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() { 394 int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
394 CheckOnCodecThread(); 395 CheckOnCodecThread();
395 int ret_val = ReleaseOnCodecThread(); 396 int ret_val = ReleaseOnCodecThread();
396 if (ret_val < 0) { 397 if (ret_val < 0) {
397 ALOGE("ProcessHWError: Release failure"); 398 ALOGE << "ProcessHWError: Release failure";
398 } 399 }
399 if (codecType_ == kVideoCodecH264) { 400 if (codecType_ == kVideoCodecH264) {
400 // For now there is no SW H.264 which can be used as fallback codec. 401 // For now there is no SW H.264 which can be used as fallback codec.
401 // So try to restart hw codec for now. 402 // So try to restart hw codec for now.
402 ret_val = InitDecodeOnCodecThread(); 403 ret_val = InitDecodeOnCodecThread();
403 ALOGE("Reset H.264 codec done. Status: %d", ret_val); 404 ALOGE << "Reset H.264 codec done. Status: " << ret_val;
404 if (ret_val == WEBRTC_VIDEO_CODEC_OK) { 405 if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
405 // H.264 codec was succesfully reset - return regular error code. 406 // H.264 codec was succesfully reset - return regular error code.
406 return WEBRTC_VIDEO_CODEC_ERROR; 407 return WEBRTC_VIDEO_CODEC_ERROR;
407 } else { 408 } else {
408 // Fail to restart H.264 codec - return error code which should stop the 409 // Fail to restart H.264 codec - return error code which should stop the
409 // call. 410 // call.
410 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 411 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
411 } 412 }
412 } else { 413 } else {
413 sw_fallback_required_ = true; 414 sw_fallback_required_ = true;
414 ALOGE("Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"); 415 ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
415 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 416 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
416 } 417 }
417 } 418 }
418 419
419 int32_t MediaCodecVideoDecoder::Decode( 420 int32_t MediaCodecVideoDecoder::Decode(
420 const EncodedImage& inputImage, 421 const EncodedImage& inputImage,
421 bool missingFrames, 422 bool missingFrames,
422 const RTPFragmentationHeader* fragmentation, 423 const RTPFragmentationHeader* fragmentation,
423 const CodecSpecificInfo* codecSpecificInfo, 424 const CodecSpecificInfo* codecSpecificInfo,
424 int64_t renderTimeMs) { 425 int64_t renderTimeMs) {
425 if (sw_fallback_required_) { 426 if (sw_fallback_required_) {
426 ALOGE("Decode() - fallback to SW codec"); 427 ALOGE << "Decode() - fallback to SW codec";
427 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 428 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
428 } 429 }
429 if (callback_ == NULL) { 430 if (callback_ == NULL) {
430 ALOGE("Decode() - callback_ is NULL"); 431 ALOGE << "Decode() - callback_ is NULL";
431 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 432 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
432 } 433 }
433 if (inputImage._buffer == NULL && inputImage._length > 0) { 434 if (inputImage._buffer == NULL && inputImage._length > 0) {
434 ALOGE("Decode() - inputImage is incorrect"); 435 ALOGE << "Decode() - inputImage is incorrect";
435 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 436 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
436 } 437 }
437 if (!inited_) { 438 if (!inited_) {
438 ALOGE("Decode() - decoder is not initialized"); 439 ALOGE << "Decode() - decoder is not initialized";
439 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 440 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
440 } 441 }
441 442
442 // Check if encoded frame dimension has changed. 443 // Check if encoded frame dimension has changed.
443 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) && 444 if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
444 (inputImage._encodedWidth != codec_.width || 445 (inputImage._encodedWidth != codec_.width ||
445 inputImage._encodedHeight != codec_.height)) { 446 inputImage._encodedHeight != codec_.height)) {
446 codec_.width = inputImage._encodedWidth; 447 codec_.width = inputImage._encodedWidth;
447 codec_.height = inputImage._encodedHeight; 448 codec_.height = inputImage._encodedHeight;
448 int32_t ret = InitDecode(&codec_, 1); 449 int32_t ret = InitDecode(&codec_, 1);
449 if (ret < 0) { 450 if (ret < 0) {
450 ALOGE("InitDecode failure: %d - fallback to SW codec", ret); 451 ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
451 sw_fallback_required_ = true; 452 sw_fallback_required_ = true;
452 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 453 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
453 } 454 }
454 } 455 }
455 456
456 // Always start with a complete key frame. 457 // Always start with a complete key frame.
457 if (key_frame_required_) { 458 if (key_frame_required_) {
458 if (inputImage._frameType != webrtc::kKeyFrame) { 459 if (inputImage._frameType != webrtc::kKeyFrame) {
459 ALOGE("Decode() - key frame is required"); 460 ALOGE << "Decode() - key frame is required";
460 return WEBRTC_VIDEO_CODEC_ERROR; 461 return WEBRTC_VIDEO_CODEC_ERROR;
461 } 462 }
462 if (!inputImage._completeFrame) { 463 if (!inputImage._completeFrame) {
463 ALOGE("Decode() - complete frame is required"); 464 ALOGE << "Decode() - complete frame is required";
464 return WEBRTC_VIDEO_CODEC_ERROR; 465 return WEBRTC_VIDEO_CODEC_ERROR;
465 } 466 }
466 key_frame_required_ = false; 467 key_frame_required_ = false;
467 } 468 }
468 if (inputImage._length == 0) { 469 if (inputImage._length == 0) {
469 return WEBRTC_VIDEO_CODEC_ERROR; 470 return WEBRTC_VIDEO_CODEC_ERROR;
470 } 471 }
471 472
472 return codec_thread_->Invoke<int32_t>(Bind( 473 return codec_thread_->Invoke<int32_t>(Bind(
473 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage)); 474 &MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
474 } 475 }
475 476
476 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( 477 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
477 const EncodedImage& inputImage) { 478 const EncodedImage& inputImage) {
478 CheckOnCodecThread(); 479 CheckOnCodecThread();
479 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 480 JNIEnv* jni = AttachCurrentThreadIfNeeded();
480 ScopedLocalRefFrame local_ref_frame(jni); 481 ScopedLocalRefFrame local_ref_frame(jni);
481 482
482 // Try to drain the decoder and wait until output is not too 483 // Try to drain the decoder and wait until output is not too
483 // much behind the input. 484 // much behind the input.
484 if (frames_received_ > frames_decoded_ + max_pending_frames_) { 485 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
485 ALOGV("Received: %d. Decoded: %d. Wait for output...", 486 ALOGV("Received: %d. Decoded: %d. Wait for output...",
486 frames_received_, frames_decoded_); 487 frames_received_, frames_decoded_);
487 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { 488 if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
488 ALOGE("DeliverPendingOutputs error"); 489 ALOGE << "DeliverPendingOutputs error";
489 return ProcessHWErrorOnCodecThread(); 490 return ProcessHWErrorOnCodecThread();
490 } 491 }
491 if (frames_received_ > frames_decoded_ + max_pending_frames_) { 492 if (frames_received_ > frames_decoded_ + max_pending_frames_) {
492 ALOGE("Output buffer dequeue timeout"); 493 ALOGE << "Output buffer dequeue timeout";
493 return ProcessHWErrorOnCodecThread(); 494 return ProcessHWErrorOnCodecThread();
494 } 495 }
495 } 496 }
496 497
497 // Get input buffer. 498 // Get input buffer.
498 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, 499 int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
499 j_dequeue_input_buffer_method_); 500 j_dequeue_input_buffer_method_);
500 if (CheckException(jni) || j_input_buffer_index < 0) { 501 if (CheckException(jni) || j_input_buffer_index < 0) {
501 ALOGE("dequeueInputBuffer error"); 502 ALOGE << "dequeueInputBuffer error";
502 return ProcessHWErrorOnCodecThread(); 503 return ProcessHWErrorOnCodecThread();
503 } 504 }
504 505
505 // Copy encoded data to Java ByteBuffer. 506 // Copy encoded data to Java ByteBuffer.
506 jobject j_input_buffer = input_buffers_[j_input_buffer_index]; 507 jobject j_input_buffer = input_buffers_[j_input_buffer_index];
507 uint8_t* buffer = 508 uint8_t* buffer =
508 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer)); 509 reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
509 RTC_CHECK(buffer) << "Indirect buffer??"; 510 RTC_CHECK(buffer) << "Indirect buffer??";
510 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); 511 int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
511 if (CheckException(jni) || buffer_capacity < inputImage._length) { 512 if (CheckException(jni) || buffer_capacity < inputImage._length) {
512 ALOGE("Input frame size %d is bigger than buffer size %d.", 513 ALOGE << "Input frame size "<< inputImage._length <<
513 inputImage._length, buffer_capacity); 514 " is bigger than buffer size " << buffer_capacity;
514 return ProcessHWErrorOnCodecThread(); 515 return ProcessHWErrorOnCodecThread();
515 } 516 }
516 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; 517 jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
517 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d", 518 ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
518 frames_received_, inputImage._frameType, j_input_buffer_index, 519 frames_received_, inputImage._frameType, j_input_buffer_index,
519 timestamp_us / 1000, inputImage._length); 520 timestamp_us / 1000, inputImage._length);
520 memcpy(buffer, inputImage._buffer, inputImage._length); 521 memcpy(buffer, inputImage._buffer, inputImage._length);
521 522
522 // Save input image timestamps for later output. 523 // Save input image timestamps for later output.
523 frames_received_++; 524 frames_received_++;
524 current_bytes_ += inputImage._length; 525 current_bytes_ += inputImage._length;
525 timestamps_.push_back(inputImage._timeStamp); 526 timestamps_.push_back(inputImage._timeStamp);
526 ntp_times_ms_.push_back(inputImage.ntp_time_ms_); 527 ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
527 frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); 528 frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
528 529
529 // Feed input to decoder. 530 // Feed input to decoder.
530 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, 531 bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
531 j_queue_input_buffer_method_, 532 j_queue_input_buffer_method_,
532 j_input_buffer_index, 533 j_input_buffer_index,
533 inputImage._length, 534 inputImage._length,
534 timestamp_us); 535 timestamp_us);
535 if (CheckException(jni) || !success) { 536 if (CheckException(jni) || !success) {
536 ALOGE("queueInputBuffer error"); 537 ALOGE << "queueInputBuffer error";
537 return ProcessHWErrorOnCodecThread(); 538 return ProcessHWErrorOnCodecThread();
538 } 539 }
539 540
540 // Try to drain the decoder 541 // Try to drain the decoder
541 if (!DeliverPendingOutputs(jni, 0)) { 542 if (!DeliverPendingOutputs(jni, 0)) {
542 ALOGE("DeliverPendingOutputs error"); 543 ALOGE << "DeliverPendingOutputs error";
543 return ProcessHWErrorOnCodecThread(); 544 return ProcessHWErrorOnCodecThread();
544 } 545 }
545 546
546 return WEBRTC_VIDEO_CODEC_OK; 547 return WEBRTC_VIDEO_CODEC_OK;
547 } 548 }
548 549
549 bool MediaCodecVideoDecoder::DeliverPendingOutputs( 550 bool MediaCodecVideoDecoder::DeliverPendingOutputs(
550 JNIEnv* jni, int dequeue_timeout_ms) { 551 JNIEnv* jni, int dequeue_timeout_ms) {
551 if (frames_received_ <= frames_decoded_) { 552 if (frames_received_ <= frames_decoded_) {
552 // No need to query for output buffers - decoder is drained. 553 // No need to query for output buffers - decoder is drained.
553 return true; 554 return true;
554 } 555 }
555 // Get decoder output. 556 // Get decoder output.
556 jobject j_decoder_output_buffer = jni->CallObjectMethod( 557 jobject j_decoder_output_buffer = jni->CallObjectMethod(
557 *j_media_codec_video_decoder_, 558 *j_media_codec_video_decoder_,
558 j_dequeue_output_buffer_method_, 559 j_dequeue_output_buffer_method_,
559 dequeue_timeout_ms); 560 dequeue_timeout_ms);
560 if (CheckException(jni)) { 561 if (CheckException(jni)) {
561 ALOGE("dequeueOutputBuffer() error"); 562 ALOGE << "dequeueOutputBuffer() error";
562 return false; 563 return false;
563 } 564 }
564 if (IsNull(jni, j_decoder_output_buffer)) { 565 if (IsNull(jni, j_decoder_output_buffer)) {
565 // No decoded frame ready. 566 // No decoded frame ready.
566 return true; 567 return true;
567 } 568 }
568 569
569 // Get decoded video frame properties. 570 // Get decoded video frame properties.
570 int color_format = GetIntField(jni, *j_media_codec_video_decoder_, 571 int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
571 j_color_format_field_); 572 j_color_format_field_);
(...skipping 25 matching lines...) Expand all
597 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); 598 GetIntField(jni, j_decoder_output_buffer, j_info_index_field_);
598 const int output_buffer_offset = 599 const int output_buffer_offset =
599 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); 600 GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_);
600 const int output_buffer_size = 601 const int output_buffer_size =
601 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); 602 GetIntField(jni, j_decoder_output_buffer, j_info_size_field_);
602 const int64_t timestamp_us = GetLongField( 603 const int64_t timestamp_us = GetLongField(
603 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); 604 jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
604 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; 605 output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
605 606
606 if (output_buffer_size < width * height * 3 / 2) { 607 if (output_buffer_size < width * height * 3 / 2) {
607 ALOGE("Insufficient output buffer size: %d", output_buffer_size); 608 ALOGE << "Insufficient output buffer size: " << output_buffer_size;
608 return false; 609 return false;
609 } 610 }
610 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( 611 jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
611 jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); 612 jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
612 jobject output_buffer = 613 jobject output_buffer =
613 jni->GetObjectArrayElement(output_buffers, output_buffer_index); 614 jni->GetObjectArrayElement(output_buffers, output_buffer_index);
614 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress( 615 uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
615 output_buffer)); 616 output_buffer));
616 if (CheckException(jni)) { 617 if (CheckException(jni)) {
617 return false; 618 return false;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
652 frame_buffer->MutableData(webrtc::kVPlane), 653 frame_buffer->MutableData(webrtc::kVPlane),
653 frame_buffer->stride(webrtc::kVPlane), 654 frame_buffer->stride(webrtc::kVPlane),
654 width, height); 655 width, height);
655 } 656 }
656 // Return output byte buffer back to codec. 657 // Return output byte buffer back to codec.
657 jni->CallVoidMethod( 658 jni->CallVoidMethod(
658 *j_media_codec_video_decoder_, 659 *j_media_codec_video_decoder_,
659 j_return_decoded_byte_buffer_method_, 660 j_return_decoded_byte_buffer_method_,
660 output_buffer_index); 661 output_buffer_index);
661 if (CheckException(jni)) { 662 if (CheckException(jni)) {
662 ALOGE("returnDecodedByteBuffer error"); 663 ALOGE << "returnDecodedByteBuffer error";
663 return false; 664 return false;
664 } 665 }
665 } 666 }
666 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); 667 VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
667 668
668 // Get frame timestamps from a queue. 669 // Get frame timestamps from a queue.
669 if (timestamps_.size() > 0) { 670 if (timestamps_.size() > 0) {
670 decoded_frame.set_timestamp(timestamps_.front()); 671 decoded_frame.set_timestamp(timestamps_.front());
671 timestamps_.erase(timestamps_.begin()); 672 timestamps_.erase(timestamps_.begin());
672 } 673 }
(...skipping 10 matching lines...) Expand all
683 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, 684 " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
684 color_format, output_timestamps_ms, frame_decoding_time_ms); 685 color_format, output_timestamps_ms, frame_decoding_time_ms);
685 686
686 // Calculate and print decoding statistics - every 3 seconds. 687 // Calculate and print decoding statistics - every 3 seconds.
687 frames_decoded_++; 688 frames_decoded_++;
688 current_frames_++; 689 current_frames_++;
689 current_decoding_time_ms_ += frame_decoding_time_ms; 690 current_decoding_time_ms_ += frame_decoding_time_ms;
690 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; 691 int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
691 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && 692 if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
692 current_frames_ > 0) { 693 current_frames_ > 0) {
693 ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms", 694 ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
694 current_bytes_ * 8 / statistic_time_ms, 695 (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
695 (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, 696 ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
696 current_decoding_time_ms_ / current_frames_, statistic_time_ms); 697 << ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
698 " for last " << statistic_time_ms << " ms.";
697 start_time_ms_ = GetCurrentTimeMs(); 699 start_time_ms_ = GetCurrentTimeMs();
698 current_frames_ = 0; 700 current_frames_ = 0;
699 current_bytes_ = 0; 701 current_bytes_ = 0;
700 current_decoding_time_ms_ = 0; 702 current_decoding_time_ms_ = 0;
701 } 703 }
702 704
703 // Callback - output decoded frame. 705 // Callback - output decoded frame.
704 const int32_t callback_status = callback_->Decoded(decoded_frame); 706 const int32_t callback_status = callback_->Decoded(decoded_frame);
705 if (callback_status > 0) { 707 if (callback_status > 0) {
706 ALOGE("callback error"); 708 ALOGE << "callback error";
707 } 709 }
708 710
709 return true; 711 return true;
710 } 712 }
711 713
712 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( 714 int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
713 DecodedImageCallback* callback) { 715 DecodedImageCallback* callback) {
714 callback_ = callback; 716 callback_ = callback;
715 return WEBRTC_VIDEO_CODEC_OK; 717 return WEBRTC_VIDEO_CODEC_OK;
716 } 718 }
717 719
718 int32_t MediaCodecVideoDecoder::Reset() { 720 int32_t MediaCodecVideoDecoder::Reset() {
719 ALOGD("DecoderReset"); 721 ALOGD << "DecoderReset";
720 if (!inited_) { 722 if (!inited_) {
721 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 723 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
722 } 724 }
723 return InitDecode(&codec_, 1); 725 return InitDecode(&codec_, 1);
724 } 726 }
725 727
726 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { 728 void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
727 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 729 JNIEnv* jni = AttachCurrentThreadIfNeeded();
728 ScopedLocalRefFrame local_ref_frame(jni); 730 ScopedLocalRefFrame local_ref_frame(jni);
729 if (!inited_) { 731 if (!inited_) {
730 return; 732 return;
731 } 733 }
732 // We only ever send one message to |this| directly (not through a Bind()'d 734 // We only ever send one message to |this| directly (not through a Bind()'d
733 // functor), so expect no ID/data. 735 // functor), so expect no ID/data.
734 RTC_CHECK(!msg->message_id) << "Unexpected message!"; 736 RTC_CHECK(!msg->message_id) << "Unexpected message!";
735 RTC_CHECK(!msg->pdata) << "Unexpected message!"; 737 RTC_CHECK(!msg->pdata) << "Unexpected message!";
736 CheckOnCodecThread(); 738 CheckOnCodecThread();
737 739
738 if (!DeliverPendingOutputs(jni, 0)) { 740 if (!DeliverPendingOutputs(jni, 0)) {
739 ALOGE("OnMessage: DeliverPendingOutputs error"); 741 ALOGE << "OnMessage: DeliverPendingOutputs error";
740 ProcessHWErrorOnCodecThread(); 742 ProcessHWErrorOnCodecThread();
741 return; 743 return;
742 } 744 }
743 codec_thread_->PostDelayed(kMediaCodecPollMs, this); 745 codec_thread_->PostDelayed(kMediaCodecPollMs, this);
744 } 746 }
745 747
746 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() : 748 MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
747 render_egl_context_(NULL) { 749 render_egl_context_(NULL) {
748 ALOGD("MediaCodecVideoDecoderFactory ctor"); 750 ALOGD << "MediaCodecVideoDecoderFactory ctor";
749 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 751 JNIEnv* jni = AttachCurrentThreadIfNeeded();
750 ScopedLocalRefFrame local_ref_frame(jni); 752 ScopedLocalRefFrame local_ref_frame(jni);
751 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); 753 jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
752 supported_codec_types_.clear(); 754 supported_codec_types_.clear();
753 755
754 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( 756 bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
755 j_decoder_class, 757 j_decoder_class,
756 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z")); 758 GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
757 if (CheckException(jni)) { 759 if (CheckException(jni)) {
758 is_vp8_hw_supported = false; 760 is_vp8_hw_supported = false;
759 } 761 }
760 if (is_vp8_hw_supported) { 762 if (is_vp8_hw_supported) {
761 ALOGD("VP8 HW Decoder supported."); 763 ALOGD << "VP8 HW Decoder supported.";
762 supported_codec_types_.push_back(kVideoCodecVP8); 764 supported_codec_types_.push_back(kVideoCodecVP8);
763 } 765 }
764 766
765 bool is_h264_hw_supported = jni->CallStaticBooleanMethod( 767 bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
766 j_decoder_class, 768 j_decoder_class,
767 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z")); 769 GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
768 if (CheckException(jni)) { 770 if (CheckException(jni)) {
769 is_h264_hw_supported = false; 771 is_h264_hw_supported = false;
770 } 772 }
771 if (is_h264_hw_supported) { 773 if (is_h264_hw_supported) {
772 ALOGD("H264 HW Decoder supported."); 774 ALOGD << "H264 HW Decoder supported.";
773 supported_codec_types_.push_back(kVideoCodecH264); 775 supported_codec_types_.push_back(kVideoCodecH264);
774 } 776 }
775 } 777 }
776 778
777 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() { 779 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
778 ALOGD("MediaCodecVideoDecoderFactory dtor"); 780 ALOGD << "MediaCodecVideoDecoderFactory dtor";
779 if (render_egl_context_) { 781 if (render_egl_context_) {
780 JNIEnv* jni = AttachCurrentThreadIfNeeded(); 782 JNIEnv* jni = AttachCurrentThreadIfNeeded();
781 jni->DeleteGlobalRef(render_egl_context_); 783 jni->DeleteGlobalRef(render_egl_context_);
782 render_egl_context_ = NULL; 784 render_egl_context_ = NULL;
783 } 785 }
784 } 786 }
785 787
786 void MediaCodecVideoDecoderFactory::SetEGLContext( 788 void MediaCodecVideoDecoderFactory::SetEGLContext(
787 JNIEnv* jni, jobject render_egl_context) { 789 JNIEnv* jni, jobject render_egl_context) {
788 ALOGD("MediaCodecVideoDecoderFactory::SetEGLContext"); 790 ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
789 if (render_egl_context_) { 791 if (render_egl_context_) {
790 jni->DeleteGlobalRef(render_egl_context_); 792 jni->DeleteGlobalRef(render_egl_context_);
791 render_egl_context_ = NULL; 793 render_egl_context_ = NULL;
792 } 794 }
793 if (!IsNull(jni, render_egl_context)) { 795 if (!IsNull(jni, render_egl_context)) {
794 render_egl_context_ = jni->NewGlobalRef(render_egl_context); 796 render_egl_context_ = jni->NewGlobalRef(render_egl_context);
795 if (CheckException(jni)) { 797 if (CheckException(jni)) {
796 ALOGE("error calling NewGlobalRef for EGL Context."); 798 ALOGE << "error calling NewGlobalRef for EGL Context.";
797 render_egl_context_ = NULL; 799 render_egl_context_ = NULL;
798 } else { 800 } else {
799 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext"); 801 jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
800 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) { 802 if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
801 ALOGE("Wrong EGL Context."); 803 ALOGE << "Wrong EGL Context.";
802 jni->DeleteGlobalRef(render_egl_context_); 804 jni->DeleteGlobalRef(render_egl_context_);
803 render_egl_context_ = NULL; 805 render_egl_context_ = NULL;
804 } 806 }
805 } 807 }
806 } 808 }
807 if (render_egl_context_ == NULL) { 809 if (render_egl_context_ == NULL) {
808 ALOGW("NULL VideoDecoder EGL context - HW surface decoding is disabled."); 810 ALOGW << "NULL VideoDecoder EGL context - HW surface decoding is disabled.";
809 } 811 }
810 } 812 }
811 813
812 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( 814 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
813 VideoCodecType type) { 815 VideoCodecType type) {
814 if (supported_codec_types_.empty()) { 816 if (supported_codec_types_.empty()) {
815 ALOGE("No HW video decoder for type %d.", (int)type); 817 ALOGE << "No HW video decoder for type " << (int)type;
816 return NULL; 818 return NULL;
817 } 819 }
818 for (VideoCodecType codec_type : supported_codec_types_) { 820 for (VideoCodecType codec_type : supported_codec_types_) {
819 if (codec_type == type) { 821 if (codec_type == type) {
820 ALOGD("Create HW video decoder for type %d.", (int)type); 822 ALOGD << "Create HW video decoder for type " << (int)type;
821 return new MediaCodecVideoDecoder( 823 return new MediaCodecVideoDecoder(
822 AttachCurrentThreadIfNeeded(), type, render_egl_context_); 824 AttachCurrentThreadIfNeeded(), type, render_egl_context_);
823 } 825 }
824 } 826 }
825 ALOGE("Can not find HW video decoder for type %d.", (int)type); 827 ALOGE << "Can not find HW video decoder for type " << (int)type;
826 return NULL; 828 return NULL;
827 } 829 }
828 830
829 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( 831 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
830 webrtc::VideoDecoder* decoder) { 832 webrtc::VideoDecoder* decoder) {
833 ALOGD << "Destroy video decoder.";
831 delete decoder; 834 delete decoder;
832 } 835 }
833 836
834 } // namespace webrtc_jni 837 } // namespace webrtc_jni
835 838
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/androidmediacodeccommon.h ('k') | talk/app/webrtc/java/jni/androidmediaencoder_jni.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698