Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1440343002: Reland again Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Drop frames if texture is not returned Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2014 Google Inc. 3 * Copyright 2014 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 15 matching lines...) Expand all
26 */ 26 */
27 27
28 package org.webrtc; 28 package org.webrtc;
29 29
30 import android.graphics.SurfaceTexture; 30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec; 31 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo; 32 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities; 33 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList; 34 import android.media.MediaCodecList;
35 import android.media.MediaFormat; 35 import android.media.MediaFormat;
36 import android.opengl.GLES11Ext;
37 import android.opengl.GLES20;
38 import android.os.Build; 36 import android.os.Build;
37 import android.os.SystemClock;
39 import android.view.Surface; 38 import android.view.Surface;
40 39
41 import org.webrtc.Logging; 40 import org.webrtc.Logging;
42 41
43 import java.nio.ByteBuffer; 42 import java.nio.ByteBuffer;
44 import java.util.Arrays; 43 import java.util.Arrays;
44 import java.util.LinkedList;
45 import java.util.List; 45 import java.util.List;
46 import java.util.concurrent.CountDownLatch; 46 import java.util.concurrent.CountDownLatch;
47 47 import java.util.Queue;
48 import javax.microedition.khronos.egl.EGLContext; 48 import java.util.concurrent.TimeUnit;
49 49
50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. 50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
51 // This class is an implementation detail of the Java PeerConnection API. 51 // This class is an implementation detail of the Java PeerConnection API.
52 @SuppressWarnings("deprecation") 52 @SuppressWarnings("deprecation")
53 public class MediaCodecVideoDecoder { 53 public class MediaCodecVideoDecoder {
54 // This class is constructed, operated, and destroyed by its C++ incarnation, 54 // This class is constructed, operated, and destroyed by its C++ incarnation,
55 // so the class and its methods have non-public visibility. The API this 55 // so the class and its methods have non-public visibility. The API this
56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as 56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
57 // possibly to minimize the amount of translation work necessary. 57 // possibly to minimize the amount of translation work necessary.
58 58
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
97 private static final List<Integer> supportedColorList = Arrays.asList( 97 private static final List<Integer> supportedColorList = Arrays.asList(
98 CodecCapabilities.COLOR_FormatYUV420Planar, 98 CodecCapabilities.COLOR_FormatYUV420Planar,
99 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 99 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
100 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 100 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
101 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); 101 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
102 private int colorFormat; 102 private int colorFormat;
103 private int width; 103 private int width;
104 private int height; 104 private int height;
105 private int stride; 105 private int stride;
106 private int sliceHeight; 106 private int sliceHeight;
107 private boolean hasDecodedFirstFrame;
108 private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
107 private boolean useSurface; 109 private boolean useSurface;
108 private int textureID = 0; 110
109 private SurfaceTexture surfaceTexture = null; 111 // The below variables are only used when decoding to a Surface.
112 private TextureListener textureListener;
113 // Max number of output buffers queued before starting to drop decoded frames.
114 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
115 private int droppedFrames;
110 private Surface surface = null; 116 private Surface surface = null;
111 private EglBase eglBase; 117 private final Queue<DecodedOutputBuffer>
112 118 dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
113 private MediaCodecVideoDecoder() {
114 }
115 119
116 // MediaCodec error handler - invoked when critical error happens which may pr event 120 // MediaCodec error handler - invoked when critical error happens which may pr event
117 // further use of media codec API. Now it means that one of media codec instan ces 121 // further use of media codec API. Now it means that one of media codec instan ces
118 // is hanging and can no longer be used in the next call. 122 // is hanging and can no longer be used in the next call.
119 public static interface MediaCodecVideoDecoderErrorCallback { 123 public static interface MediaCodecVideoDecoderErrorCallback {
120 void onMediaCodecVideoDecoderCriticalError(int codecErrors); 124 void onMediaCodecVideoDecoderCriticalError(int codecErrors);
121 } 125 }
122 126
123 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) { 127 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) {
124 Logging.d(TAG, "Set error callback"); 128 Logging.d(TAG, "Set error callback");
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
216 } 220 }
217 221
218 private void checkOnMediaCodecThread() throws IllegalStateException { 222 private void checkOnMediaCodecThread() throws IllegalStateException {
219 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { 223 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
220 throw new IllegalStateException( 224 throw new IllegalStateException(
221 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + 225 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
222 " but is now called on " + Thread.currentThread()); 226 " but is now called on " + Thread.currentThread());
223 } 227 }
224 } 228 }
225 229
226 // Pass null in |sharedContext| to configure the codec for ByteBuffer output. 230 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput.
227 private boolean initDecode(VideoCodecType type, int width, int height, EGLCont ext sharedContext) { 231 private boolean initDecode(
232 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) {
228 if (mediaCodecThread != null) { 233 if (mediaCodecThread != null) {
229 throw new RuntimeException("Forgot to release()?"); 234 throw new RuntimeException("Forgot to release()?");
230 } 235 }
231 useSurface = (sharedContext != null); 236 useSurface = (surfaceTextureHelper != null);
232 String mime = null; 237 String mime = null;
233 String[] supportedCodecPrefixes = null; 238 String[] supportedCodecPrefixes = null;
234 if (type == VideoCodecType.VIDEO_CODEC_VP8) { 239 if (type == VideoCodecType.VIDEO_CODEC_VP8) {
235 mime = VP8_MIME_TYPE; 240 mime = VP8_MIME_TYPE;
236 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; 241 supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
237 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { 242 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
238 mime = VP9_MIME_TYPE; 243 mime = VP9_MIME_TYPE;
239 supportedCodecPrefixes = supportedVp9HwCodecPrefixes; 244 supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
240 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { 245 } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
241 mime = H264_MIME_TYPE; 246 mime = H264_MIME_TYPE;
242 supportedCodecPrefixes = supportedH264HwCodecPrefixes; 247 supportedCodecPrefixes = supportedH264HwCodecPrefixes;
243 } else { 248 } else {
244 throw new RuntimeException("Non supported codec " + type); 249 throw new RuntimeException("Non supported codec " + type);
245 } 250 }
246 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); 251 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
247 if (properties == null) { 252 if (properties == null) {
248 throw new RuntimeException("Cannot find HW decoder for " + type); 253 throw new RuntimeException("Cannot find HW decoder for " + type);
249 } 254 }
250 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + 255 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
251 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + 256 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
252 ". Use Surface: " + useSurface); 257 ". Use Surface: " + useSurface);
253 if (sharedContext != null) {
254 Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
255 }
256 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. 258 runningInstance = this; // Decoder is now running and can be queried for sta ck traces.
257 mediaCodecThread = Thread.currentThread(); 259 mediaCodecThread = Thread.currentThread();
258 try { 260 try {
259 this.width = width; 261 this.width = width;
260 this.height = height; 262 this.height = height;
261 stride = width; 263 stride = width;
262 sliceHeight = height; 264 sliceHeight = height;
263 265
264 if (useSurface) { 266 if (useSurface) {
265 // Create shared EGL context. 267 textureListener = new TextureListener(surfaceTextureHelper);
266 eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); 268 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
267 eglBase.createDummyPbufferSurface();
268 eglBase.makeCurrent();
269
270 // Create output surface
271 textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
272 Logging.d(TAG, "Video decoder TextureID = " + textureID);
273 surfaceTexture = new SurfaceTexture(textureID);
274 surface = new Surface(surfaceTexture);
275 } 269 }
276 270
277 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 271 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
278 if (!useSurface) { 272 if (!useSurface) {
279 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 273 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
280 } 274 }
281 Logging.d(TAG, " Format: " + format); 275 Logging.d(TAG, " Format: " + format);
282 mediaCodec = 276 mediaCodec =
283 MediaCodecVideoEncoder.createByCodecName(properties.codecName); 277 MediaCodecVideoEncoder.createByCodecName(properties.codecName);
284 if (mediaCodec == null) { 278 if (mediaCodec == null) {
285 Logging.e(TAG, "Can not create media decoder"); 279 Logging.e(TAG, "Can not create media decoder");
286 return false; 280 return false;
287 } 281 }
288 mediaCodec.configure(format, surface, null, 0); 282 mediaCodec.configure(format, surface, null, 0);
289 mediaCodec.start(); 283 mediaCodec.start();
290 colorFormat = properties.colorFormat; 284 colorFormat = properties.colorFormat;
291 outputBuffers = mediaCodec.getOutputBuffers(); 285 outputBuffers = mediaCodec.getOutputBuffers();
292 inputBuffers = mediaCodec.getInputBuffers(); 286 inputBuffers = mediaCodec.getInputBuffers();
287 decodeStartTimeMs.clear();
288 hasDecodedFirstFrame = false;
289 dequeuedSurfaceOutputBuffers.clear();
290 droppedFrames = 0;
293 Logging.d(TAG, "Input buffers: " + inputBuffers.length + 291 Logging.d(TAG, "Input buffers: " + inputBuffers.length +
294 ". Output buffers: " + outputBuffers.length); 292 ". Output buffers: " + outputBuffers.length);
295 return true; 293 return true;
296 } catch (IllegalStateException e) { 294 } catch (IllegalStateException e) {
297 Logging.e(TAG, "initDecode failed", e); 295 Logging.e(TAG, "initDecode failed", e);
298 return false; 296 return false;
299 } 297 }
300 } 298 }
301 299
302 private void release() { 300 private void release() {
303 Logging.d(TAG, "Java releaseDecoder"); 301 Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + dro ppedFrames);
304 checkOnMediaCodecThread(); 302 checkOnMediaCodecThread();
305 303
306 // Run Mediacodec stop() and release() on separate thread since sometime 304 // Run Mediacodec stop() and release() on separate thread since sometime
307 // Mediacodec.stop() may hang. 305 // Mediacodec.stop() may hang.
308 final CountDownLatch releaseDone = new CountDownLatch(1); 306 final CountDownLatch releaseDone = new CountDownLatch(1);
309 307
310 Runnable runMediaCodecRelease = new Runnable() { 308 Runnable runMediaCodecRelease = new Runnable() {
311 @Override 309 @Override
312 public void run() { 310 public void run() {
313 try { 311 try {
(...skipping 17 matching lines...) Expand all
331 errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors); 329 errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
332 } 330 }
333 } 331 }
334 332
335 mediaCodec = null; 333 mediaCodec = null;
336 mediaCodecThread = null; 334 mediaCodecThread = null;
337 runningInstance = null; 335 runningInstance = null;
338 if (useSurface) { 336 if (useSurface) {
339 surface.release(); 337 surface.release();
340 surface = null; 338 surface = null;
341 Logging.d(TAG, "Delete video decoder TextureID " + textureID); 339 textureListener.release();
342 GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
343 textureID = 0;
344 eglBase.release();
345 eglBase = null;
346 } 340 }
347 Logging.d(TAG, "Java releaseDecoder done"); 341 Logging.d(TAG, "Java releaseDecoder done");
348 } 342 }
349 343
350 // Dequeue an input buffer and return its index, -1 if no input buffer is 344 // Dequeue an input buffer and return its index, -1 if no input buffer is
351 // available, or -2 if the codec is no longer operative. 345 // available, or -2 if the codec is no longer operative.
352 private int dequeueInputBuffer() { 346 private int dequeueInputBuffer() {
353 checkOnMediaCodecThread(); 347 checkOnMediaCodecThread();
354 try { 348 try {
355 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); 349 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
356 } catch (IllegalStateException e) { 350 } catch (IllegalStateException e) {
357 Logging.e(TAG, "dequeueIntputBuffer failed", e); 351 Logging.e(TAG, "dequeueIntputBuffer failed", e);
358 return -2; 352 return -2;
359 } 353 }
360 } 354 }
361 355
362 private boolean queueInputBuffer( 356 private boolean queueInputBuffer(
363 int inputBufferIndex, int size, long timestampUs) { 357 int inputBufferIndex, int size, long timestampUs) {
364 checkOnMediaCodecThread(); 358 checkOnMediaCodecThread();
365 try { 359 try {
366 inputBuffers[inputBufferIndex].position(0); 360 inputBuffers[inputBufferIndex].position(0);
367 inputBuffers[inputBufferIndex].limit(size); 361 inputBuffers[inputBufferIndex].limit(size);
362 decodeStartTimeMs.add(SystemClock.elapsedRealtime());
368 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); 363 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
369 return true; 364 return true;
370 } 365 }
371 catch (IllegalStateException e) { 366 catch (IllegalStateException e) {
372 Logging.e(TAG, "decode failed", e); 367 Logging.e(TAG, "decode failed", e);
373 return false; 368 return false;
374 } 369 }
375 } 370 }
376 371
377 // Helper structs for dequeueOutputBuffer() below. 372 // Helper struct for dequeueOutputBuffer() below.
378 private static class DecodedByteBuffer { 373 private static class DecodedOutputBuffer {
379 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) { 374 public DecodedOutputBuffer(int index, int offset, int size, long presentatio nTimestampUs,
375 long decodeTime, long endDecodeTime) {
380 this.index = index; 376 this.index = index;
381 this.offset = offset; 377 this.offset = offset;
382 this.size = size; 378 this.size = size;
383 this.presentationTimestampUs = presentationTimestampUs; 379 this.presentationTimestampUs = presentationTimestampUs;
380 this.decodeTimeMs = decodeTime;
381 this.endDecodeTimeMs = endDecodeTime;
384 } 382 }
385 383
386 private final int index; 384 private final int index;
387 private final int offset; 385 private final int offset;
388 private final int size; 386 private final int size;
389 private final long presentationTimestampUs; 387 private final long presentationTimestampUs;
388 // Number of ms it took to decode this frame.
389 private final long decodeTimeMs;
390 // System time when this frame finished decoding.
391 private final long endDecodeTimeMs;
390 } 392 }
391 393
394 // Helper struct for dequeueTextureBuffer() below.
392 private static class DecodedTextureBuffer { 395 private static class DecodedTextureBuffer {
393 private final int textureID; 396 private final int textureID;
397 private final float[] transformMatrix;
394 private final long presentationTimestampUs; 398 private final long presentationTimestampUs;
399 private final long decodeTimeMs;
400 // Interval from when the frame finished decoding until this buffer has been created.
401 // Since there is only one texture, this interval depend on the time from wh en
402 // a frame is decoded and provided to C++ and until that frame is returned t o the MediaCodec
403 // so that the texture can be updated with the next decoded frame.
404 private final long frameDelayMs;
395 405
396 public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { 406 // A DecodedTextureBuffer with zero |textureID| has special meaning and repr esents a frame
407 // that was dropped.
408 public DecodedTextureBuffer(int textureID, float[] transformMatrix,
409 long presentationTimestampUs, long decodeTimeMs, long frameDelay) {
397 this.textureID = textureID; 410 this.textureID = textureID;
411 this.transformMatrix = transformMatrix;
398 this.presentationTimestampUs = presentationTimestampUs; 412 this.presentationTimestampUs = presentationTimestampUs;
413 this.decodeTimeMs = decodeTimeMs;
414 this.frameDelayMs = frameDelay;
399 } 415 }
400 } 416 }
401 417
402 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or 418 // Poll based texture listener.
403 // DecodedTexturebuffer depending on |useSurface| configuration. 419 private static class TextureListener
420 implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
421 private final SurfaceTextureHelper surfaceTextureHelper;
422 // |newFrameLock| is used to synchronize arrival of new frames with wait()/n otifyAll().
423 private final Object newFrameLock = new Object();
424 private DecodedOutputBuffer bufferToRender;
425 private DecodedTextureBuffer renderedBuffer;
426 // |isWaitingForTexture| is true when waiting for the transition:
magjed_webrtc 2015/11/17 11:58:19 Keep comment that bufferToRender is non-null when
perkj_webrtc 2015/11/17 12:44:19 Done.
427 // addBufferToRender() -> onTextureFrameAvailable().
428 private boolean isWaitingForTexture;
429
430 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
431 this.surfaceTextureHelper = surfaceTextureHelper;
432 surfaceTextureHelper.setListener(this);
433 }
434
435 public void addBufferToRender(DecodedOutputBuffer buffer) {
436 if (isWaitingForTexture) {
437 Logging.e(TAG,
438 "Unexpected addBufferToRender() called while waiting for a texture." );
439 throw new IllegalStateException("Waiting for a texture.");
440 }
441 bufferToRender = buffer;
442 isWaitingForTexture = true;
443 }
444
445 public boolean isWaitingForTexture() {
446 return isWaitingForTexture;
447 }
448
449 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d.
450 @Override
451 public void onTextureFrameAvailable(
452 int oesTextureId, float[] transformMatrix, long timestampNs) {
453 synchronized (newFrameLock) {
454 if (renderedBuffer != null) {
455 Logging.e(TAG,
456 "Unexpected onTextureFrameAvailable() called while already holding a texture.");
457 throw new IllegalStateException("Already holding a texture.");
458 }
459 // |timestampNs| is always zero on some Android versions.
460 renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
461 bufferToRender.presentationTimestampUs, bufferToRender.decodeTimeMs,
462 SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
463
magjed_webrtc 2015/11/16 12:56:47 You can set |isWaitingForTexture| to false here in
464 newFrameLock.notifyAll();
465 }
466 }
467
468 // Dequeues and returns a DecodedTextureBuffer if available, or null otherwi se.
469 public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
470 synchronized (newFrameLock) {
471 if (renderedBuffer == null && timeoutMs > 0) {
472 try {
473 newFrameLock.wait(timeoutMs);
474 } catch(InterruptedException e) {
475 // Restore the interrupted status by reinterrupting the thread.
476 Thread.currentThread().interrupt();
477 }
478 }
479 DecodedTextureBuffer returnedBuffer = renderedBuffer;
480 renderedBuffer = null;
481 isWaitingForTexture = (returnedBuffer == null);
482 return returnedBuffer;
483 }
484 }
485
486 public void release() {
487 // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAv ailable() in
488 // progress is done. Therefore, the call to disconnect() must be outside a ny synchronized
489 // statement that is also used in the onTextureFrameAvailable() above to a void deadlocks.
490 surfaceTextureHelper.disconnect();
491 synchronized (newFrameLock) {
492 if (renderedBuffer != null) {
493 surfaceTextureHelper.returnTextureFrame();
494 renderedBuffer = null;
495 }
496 }
497 }
498 }
499
500 // Returns null if no decoded buffer is available, and otherwise a DecodedByte Buffer.
404 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an 501 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
405 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException 502 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
406 // upon codec error. 503 // upon codec error.
407 private Object dequeueOutputBuffer(int dequeueTimeoutUs) 504 private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
408 throws IllegalStateException, MediaCodec.CodecException {
409 checkOnMediaCodecThread(); 505 checkOnMediaCodecThread();
410 506 if (decodeStartTimeMs.isEmpty()) {
507 return null;
508 }
411 // Drain the decoder until receiving a decoded buffer or hitting 509 // Drain the decoder until receiving a decoded buffer or hitting
412 // MediaCodec.INFO_TRY_AGAIN_LATER. 510 // MediaCodec.INFO_TRY_AGAIN_LATER.
413 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 511 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
414 while (true) { 512 while (true) {
415 final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); 513 final int result = mediaCodec.dequeueOutputBuffer(
514 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
416 switch (result) { 515 switch (result) {
417 case MediaCodec.INFO_TRY_AGAIN_LATER:
418 return null;
419 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 516 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
420 outputBuffers = mediaCodec.getOutputBuffers(); 517 outputBuffers = mediaCodec.getOutputBuffers();
421 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); 518 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
519 if (hasDecodedFirstFrame) {
520 throw new RuntimeException("Unexpected output buffer change event.") ;
521 }
422 break; 522 break;
423 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 523 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
424 MediaFormat format = mediaCodec.getOutputFormat(); 524 MediaFormat format = mediaCodec.getOutputFormat();
425 Logging.d(TAG, "Decoder format changed: " + format.toString()); 525 Logging.d(TAG, "Decoder format changed: " + format.toString());
526 int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
527 int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
528 if (hasDecodedFirstFrame && (new_width != width || new_height != heigh t)) {
529 throw new RuntimeException("Unexpected size change. Configured " + w idth + "*" +
530 height + ". New " + new_width + "*" + new_height);
531 }
426 width = format.getInteger(MediaFormat.KEY_WIDTH); 532 width = format.getInteger(MediaFormat.KEY_WIDTH);
427 height = format.getInteger(MediaFormat.KEY_HEIGHT); 533 height = format.getInteger(MediaFormat.KEY_HEIGHT);
534
428 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 535 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
429 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 536 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
430 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 537 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
431 if (!supportedColorList.contains(colorFormat)) { 538 if (!supportedColorList.contains(colorFormat)) {
432 throw new IllegalStateException("Non supported color format: " + c olorFormat); 539 throw new IllegalStateException("Non supported color format: " + c olorFormat);
433 } 540 }
434 } 541 }
435 if (format.containsKey("stride")) { 542 if (format.containsKey("stride")) {
436 stride = format.getInteger("stride"); 543 stride = format.getInteger("stride");
437 } 544 }
438 if (format.containsKey("slice-height")) { 545 if (format.containsKey("slice-height")) {
439 sliceHeight = format.getInteger("slice-height"); 546 sliceHeight = format.getInteger("slice-height");
440 } 547 }
441 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); 548 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
442 stride = Math.max(width, stride); 549 stride = Math.max(width, stride);
443 sliceHeight = Math.max(height, sliceHeight); 550 sliceHeight = Math.max(height, sliceHeight);
444 break; 551 break;
552 case MediaCodec.INFO_TRY_AGAIN_LATER:
553 return null;
445 default: 554 default:
446 // Output buffer decoded. 555 hasDecodedFirstFrame = true;
447 if (useSurface) { 556 return new DecodedOutputBuffer(result, info.offset, info.size, info.pr esentationTimeUs,
448 mediaCodec.releaseOutputBuffer(result, true /* render */); 557 SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(),
449 // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before r eturning a texture 558 SystemClock.elapsedRealtime());
450 // frame. 559 }
451 return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
452 } else {
453 return new DecodedByteBuffer(result, info.offset, info.size, info.pr esentationTimeUs);
454 }
455 }
456 } 560 }
457 } 561 }
458 562
563 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer.
564 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
565 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
566 // upon codec error.
567 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
568 checkOnMediaCodecThread();
569 if (!useSurface) {
570 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding.");
571 }
572
573 DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
AlexG 2015/11/17 00:47:35 If dequeueTimeoutMs > 0 (in case decoder is draini
perkj_webrtc 2015/11/17 11:02:19 That is what I also discovered. And that in turns
574 if (outputBuffer != null) {
575 dequeuedSurfaceOutputBuffers.add(outputBuffer);
576 }
577
578 if (dequeuedSurfaceOutputBuffers.isEmpty()) {
579 return null;
580 }
581
582 if (!textureListener.isWaitingForTexture()) {
magjed_webrtc 2015/11/16 12:56:47 I would prefer if you add !dequeuedSurfaceOutputBu
perkj_webrtc 2015/11/17 11:02:19 Done.
583 // Get the first frame in the queue and render to the decoder output surfa ce.
584 final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
585 textureListener.addBufferToRender(buffer);
586 mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
587 }
588
589 // We are waiting for a frame to be rendered to the decoder surface.
590 // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can onl y be one frame
591 // rendered at a time.
592 DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(d equeueTimeoutMs);
593 if (renderedBuffer != null) {
594 if (!dequeuedSurfaceOutputBuffers.isEmpty()) {
595 // Get the next frame in the queue and render to the decoder output surf ace.
596 final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove() ;
597 textureListener.addBufferToRender(buffer);
598 mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
599 }
600 return renderedBuffer;
601 }
602
603 if ((dequeuedSurfaceOutputBuffers.size() >= Math.min(
604 MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) || dequeueTimeoutMs > 0 ) {
magjed_webrtc 2015/11/16 12:56:47 I think for the case 'dequeueTimeoutMs > 0', |dequ
605 ++droppedFrames;
606 if (dequeueTimeoutMs > 0) {
AlexG 2015/11/17 00:47:35 Have you checked that you can still receive onText
607 Logging.w(TAG, "Waiting for texture to be rendered, dropping next frame. "
608 + " Total number of dropped frames: " + droppedFrames);
609 } else {
610 Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: "
611 + droppedFrames);
612 }
613 // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
614 // The oldest frame is owned by |textureListener| and can't be dropped sin ce
615 // mediaCodec.releaseOutputBuffer has already been called. Dropping the th is frame will lead
magjed_webrtc 2015/11/16 12:56:47 I think you should remove the timestamp queue in C
AlexG 2015/11/17 00:47:35 "frames_received_ - frames_decoded_" are used by C
perkj_webrtc 2015/11/17 11:02:18 Acknowledged.
perkj_webrtc 2015/11/17 11:02:19 Done.
616 // to a shift of timestamps by one frame in MediaCodecVideoDecoder::Deliv erPendingOutputs.
617 final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remo ve();
618 mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
619 return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimestam pUs,
620 droppedFrame.decodeTimeMs,
621 SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
622 }
623 return null;
624 }
625
459 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for 626 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
460 // non-surface decoding. 627 // non-surface decoding.
461 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured 628 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
462 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows 629 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
463 // MediaCodec.CodecException upon codec error. 630 // MediaCodec.CodecException upon codec error.
464 private void returnDecodedByteBuffer(int index) 631 private void returnDecodedOutputBuffer(int index)
465 throws IllegalStateException, MediaCodec.CodecException { 632 throws IllegalStateException, MediaCodec.CodecException {
466 checkOnMediaCodecThread(); 633 checkOnMediaCodecThread();
467 if (useSurface) { 634 if (useSurface) {
468 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding."); 635 throw new IllegalStateException("returnDecodedOutputBuffer() called for su rface decoding.");
469 } 636 }
470 mediaCodec.releaseOutputBuffer(index, false /* render */); 637 mediaCodec.releaseOutputBuffer(index, false /* render */);
471 } 638 }
472 } 639 }
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc ('k') | talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698