Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(28)

Side by Side Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1441363002: Revert of Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Manual revert. Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2014 Google Inc. 3 * Copyright 2014 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
(...skipping 15 matching lines...) Expand all
26 */ 26 */
27 27
28 package org.webrtc; 28 package org.webrtc;
29 29
30 import android.graphics.SurfaceTexture; 30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec; 31 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo; 32 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities; 33 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList; 34 import android.media.MediaCodecList;
35 import android.media.MediaFormat; 35 import android.media.MediaFormat;
36 import android.opengl.GLES11Ext;
37 import android.opengl.GLES20;
36 import android.os.Build; 38 import android.os.Build;
37 import android.os.SystemClock;
38 import android.view.Surface; 39 import android.view.Surface;
39 40
40 import org.webrtc.Logging; 41 import org.webrtc.Logging;
41 42
42 import java.nio.ByteBuffer; 43 import java.nio.ByteBuffer;
43 import java.util.Arrays; 44 import java.util.Arrays;
44 import java.util.LinkedList;
45 import java.util.List; 45 import java.util.List;
46 import java.util.concurrent.CountDownLatch; 46 import java.util.concurrent.CountDownLatch;
47 import java.util.Queue; 47
48 import java.util.concurrent.TimeUnit; 48 import javax.microedition.khronos.egl.EGLContext;
49 49
50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. 50 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
51 // This class is an implementation detail of the Java PeerConnection API. 51 // This class is an implementation detail of the Java PeerConnection API.
52 @SuppressWarnings("deprecation") 52 @SuppressWarnings("deprecation")
53 public class MediaCodecVideoDecoder { 53 public class MediaCodecVideoDecoder {
54 // This class is constructed, operated, and destroyed by its C++ incarnation, 54 // This class is constructed, operated, and destroyed by its C++ incarnation,
55 // so the class and its methods have non-public visibility. The API this 55 // so the class and its methods have non-public visibility. The API this
56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as 56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
57 // possibly to minimize the amount of translation work necessary. 57 // possibly to minimize the amount of translation work necessary.
58 58
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
97 private static final List<Integer> supportedColorList = Arrays.asList( 97 private static final List<Integer> supportedColorList = Arrays.asList(
98 CodecCapabilities.COLOR_FormatYUV420Planar, 98 CodecCapabilities.COLOR_FormatYUV420Planar,
99 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 99 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
100 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 100 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
101 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); 101 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
102 private int colorFormat; 102 private int colorFormat;
103 private int width; 103 private int width;
104 private int height; 104 private int height;
105 private int stride; 105 private int stride;
106 private int sliceHeight; 106 private int sliceHeight;
107 private boolean hasDecodedFirstFrame;
108 private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
109 private boolean useSurface; 107 private boolean useSurface;
108 private int textureID = 0;
109 private SurfaceTexture surfaceTexture = null;
110 private Surface surface = null;
111 private EglBase eglBase;
110 112
111 // The below variables are only used when decoding to a Surface. 113 private MediaCodecVideoDecoder() {
112 private TextureListener textureListener; 114 }
113 // Max number of output buffers queued before starting to drop decoded frames.
114 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
115 private int droppedFrames;
116 // |isWaitingForTexture| is true when waiting for the transition:
117 // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
118 private boolean isWaitingForTexture;
119 private Surface surface = null;
120 private final Queue<DecodedOutputBuffer>
121 dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
122 115
123 // MediaCodec error handler - invoked when critical error happens which may pr event 116 // MediaCodec error handler - invoked when critical error happens which may pr event
124 // further use of media codec API. Now it means that one of media codec instan ces 117 // further use of media codec API. Now it means that one of media codec instan ces
125 // is hanging and can no longer be used in the next call. 118 // is hanging and can no longer be used in the next call.
126 public static interface MediaCodecVideoDecoderErrorCallback { 119 public static interface MediaCodecVideoDecoderErrorCallback {
127 void onMediaCodecVideoDecoderCriticalError(int codecErrors); 120 void onMediaCodecVideoDecoderCriticalError(int codecErrors);
128 } 121 }
129 122
130 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) { 123 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) {
131 Logging.d(TAG, "Set error callback"); 124 Logging.d(TAG, "Set error callback");
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
223 } 216 }
224 217
225 private void checkOnMediaCodecThread() throws IllegalStateException { 218 private void checkOnMediaCodecThread() throws IllegalStateException {
226 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { 219 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
227 throw new IllegalStateException( 220 throw new IllegalStateException(
228 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + 221 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
229 " but is now called on " + Thread.currentThread()); 222 " but is now called on " + Thread.currentThread());
230 } 223 }
231 } 224 }
232 225
233 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput. 226 // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
234 private boolean initDecode( 227 private boolean initDecode(VideoCodecType type, int width, int height, EGLCont ext sharedContext) {
235 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) {
236 if (mediaCodecThread != null) { 228 if (mediaCodecThread != null) {
237 throw new RuntimeException("Forgot to release()?"); 229 throw new RuntimeException("Forgot to release()?");
238 } 230 }
239 useSurface = (surfaceTextureHelper != null); 231 useSurface = (sharedContext != null);
240 String mime = null; 232 String mime = null;
241 String[] supportedCodecPrefixes = null; 233 String[] supportedCodecPrefixes = null;
242 if (type == VideoCodecType.VIDEO_CODEC_VP8) { 234 if (type == VideoCodecType.VIDEO_CODEC_VP8) {
243 mime = VP8_MIME_TYPE; 235 mime = VP8_MIME_TYPE;
244 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; 236 supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
245 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { 237 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
246 mime = VP9_MIME_TYPE; 238 mime = VP9_MIME_TYPE;
247 supportedCodecPrefixes = supportedVp9HwCodecPrefixes; 239 supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
248 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { 240 } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
249 mime = H264_MIME_TYPE; 241 mime = H264_MIME_TYPE;
250 supportedCodecPrefixes = supportedH264HwCodecPrefixes; 242 supportedCodecPrefixes = supportedH264HwCodecPrefixes;
251 } else { 243 } else {
252 throw new RuntimeException("Non supported codec " + type); 244 throw new RuntimeException("Non supported codec " + type);
253 } 245 }
254 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); 246 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
255 if (properties == null) { 247 if (properties == null) {
256 throw new RuntimeException("Cannot find HW decoder for " + type); 248 throw new RuntimeException("Cannot find HW decoder for " + type);
257 } 249 }
258 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + 250 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
259 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + 251 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
260 ". Use Surface: " + useSurface); 252 ". Use Surface: " + useSurface);
253 if (sharedContext != null) {
254 Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
255 }
261 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. 256 runningInstance = this; // Decoder is now running and can be queried for sta ck traces.
262 mediaCodecThread = Thread.currentThread(); 257 mediaCodecThread = Thread.currentThread();
263 try { 258 try {
264 this.width = width; 259 this.width = width;
265 this.height = height; 260 this.height = height;
266 stride = width; 261 stride = width;
267 sliceHeight = height; 262 sliceHeight = height;
268 263
269 if (useSurface) { 264 if (useSurface) {
270 textureListener = new TextureListener(surfaceTextureHelper); 265 // Create shared EGL context.
271 surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); 266 eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
267 eglBase.createDummyPbufferSurface();
268 eglBase.makeCurrent();
269
270 // Create output surface
271 textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
272 Logging.d(TAG, "Video decoder TextureID = " + textureID);
273 surfaceTexture = new SurfaceTexture(textureID);
274 surface = new Surface(surfaceTexture);
272 } 275 }
273 276
274 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 277 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
275 if (!useSurface) { 278 if (!useSurface) {
276 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 279 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
277 } 280 }
278 Logging.d(TAG, " Format: " + format); 281 Logging.d(TAG, " Format: " + format);
279 mediaCodec = 282 mediaCodec =
280 MediaCodecVideoEncoder.createByCodecName(properties.codecName); 283 MediaCodecVideoEncoder.createByCodecName(properties.codecName);
281 if (mediaCodec == null) { 284 if (mediaCodec == null) {
282 Logging.e(TAG, "Can not create media decoder"); 285 Logging.e(TAG, "Can not create media decoder");
283 return false; 286 return false;
284 } 287 }
285 mediaCodec.configure(format, surface, null, 0); 288 mediaCodec.configure(format, surface, null, 0);
286 mediaCodec.start(); 289 mediaCodec.start();
287 colorFormat = properties.colorFormat; 290 colorFormat = properties.colorFormat;
288 outputBuffers = mediaCodec.getOutputBuffers(); 291 outputBuffers = mediaCodec.getOutputBuffers();
289 inputBuffers = mediaCodec.getInputBuffers(); 292 inputBuffers = mediaCodec.getInputBuffers();
290 decodeStartTimeMs.clear();
291 hasDecodedFirstFrame = false;
292 dequeuedSurfaceOutputBuffers.clear();
293 droppedFrames = 0;
294 isWaitingForTexture = false;
295 Logging.d(TAG, "Input buffers: " + inputBuffers.length + 293 Logging.d(TAG, "Input buffers: " + inputBuffers.length +
296 ". Output buffers: " + outputBuffers.length); 294 ". Output buffers: " + outputBuffers.length);
297 return true; 295 return true;
298 } catch (IllegalStateException e) { 296 } catch (IllegalStateException e) {
299 Logging.e(TAG, "initDecode failed", e); 297 Logging.e(TAG, "initDecode failed", e);
300 return false; 298 return false;
301 } 299 }
302 } 300 }
303 301
304 private void release() { 302 private void release() {
305 Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + dro ppedFrames); 303 Logging.d(TAG, "Java releaseDecoder");
306 checkOnMediaCodecThread(); 304 checkOnMediaCodecThread();
307 305
308 // Run Mediacodec stop() and release() on separate thread since sometime 306 // Run Mediacodec stop() and release() on separate thread since sometime
309 // Mediacodec.stop() may hang. 307 // Mediacodec.stop() may hang.
310 final CountDownLatch releaseDone = new CountDownLatch(1); 308 final CountDownLatch releaseDone = new CountDownLatch(1);
311 309
312 Runnable runMediaCodecRelease = new Runnable() { 310 Runnable runMediaCodecRelease = new Runnable() {
313 @Override 311 @Override
314 public void run() { 312 public void run() {
315 try { 313 try {
(...skipping 17 matching lines...) Expand all
333 errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors); 331 errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
334 } 332 }
335 } 333 }
336 334
337 mediaCodec = null; 335 mediaCodec = null;
338 mediaCodecThread = null; 336 mediaCodecThread = null;
339 runningInstance = null; 337 runningInstance = null;
340 if (useSurface) { 338 if (useSurface) {
341 surface.release(); 339 surface.release();
342 surface = null; 340 surface = null;
343 textureListener.release(); 341 Logging.d(TAG, "Delete video decoder TextureID " + textureID);
342 GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
343 textureID = 0;
344 eglBase.release();
345 eglBase = null;
344 } 346 }
345 Logging.d(TAG, "Java releaseDecoder done"); 347 Logging.d(TAG, "Java releaseDecoder done");
346 } 348 }
347 349
348 // Dequeue an input buffer and return its index, -1 if no input buffer is 350 // Dequeue an input buffer and return its index, -1 if no input buffer is
349 // available, or -2 if the codec is no longer operative. 351 // available, or -2 if the codec is no longer operative.
350 private int dequeueInputBuffer() { 352 private int dequeueInputBuffer() {
351 checkOnMediaCodecThread(); 353 checkOnMediaCodecThread();
352 try { 354 try {
353 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); 355 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
354 } catch (IllegalStateException e) { 356 } catch (IllegalStateException e) {
355 Logging.e(TAG, "dequeueIntputBuffer failed", e); 357 Logging.e(TAG, "dequeueIntputBuffer failed", e);
356 return -2; 358 return -2;
357 } 359 }
358 } 360 }
359 361
360 private boolean queueInputBuffer( 362 private boolean queueInputBuffer(
361 int inputBufferIndex, int size, long timestampUs) { 363 int inputBufferIndex, int size, long timestampUs) {
362 checkOnMediaCodecThread(); 364 checkOnMediaCodecThread();
363 try { 365 try {
364 inputBuffers[inputBufferIndex].position(0); 366 inputBuffers[inputBufferIndex].position(0);
365 inputBuffers[inputBufferIndex].limit(size); 367 inputBuffers[inputBufferIndex].limit(size);
366 decodeStartTimeMs.add(SystemClock.elapsedRealtime());
367 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); 368 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
368 return true; 369 return true;
369 } 370 }
370 catch (IllegalStateException e) { 371 catch (IllegalStateException e) {
371 Logging.e(TAG, "decode failed", e); 372 Logging.e(TAG, "decode failed", e);
372 return false; 373 return false;
373 } 374 }
374 } 375 }
375 376
376 // Helper struct for dequeueOutputBuffer() below. 377 // Helper structs for dequeueOutputBuffer() below.
377 private static class DecodedOutputBuffer { 378 private static class DecodedByteBuffer {
378 public DecodedOutputBuffer(int index, int offset, int size, long presentatio nTimestampUs, 379 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) {
379 long decodeTime, long endDecodeTime) {
380 this.index = index; 380 this.index = index;
381 this.offset = offset; 381 this.offset = offset;
382 this.size = size; 382 this.size = size;
383 this.presentationTimestampUs = presentationTimestampUs; 383 this.presentationTimestampUs = presentationTimestampUs;
384 this.decodeTimeMs = decodeTime;
385 this.endDecodeTimeMs = endDecodeTime;
386 } 384 }
387 385
388 private final int index; 386 private final int index;
389 private final int offset; 387 private final int offset;
390 private final int size; 388 private final int size;
391 private final long presentationTimestampUs; 389 private final long presentationTimestampUs;
392 // Number of ms it took to decode this frame.
393 private final long decodeTimeMs;
394 // System time when this frame finished decoding.
395 private final long endDecodeTimeMs;
396 } 390 }
397 391
398 // Helper struct for dequeueTextureBuffer() below.
399 private static class DecodedTextureBuffer { 392 private static class DecodedTextureBuffer {
400 private final int textureID; 393 private final int textureID;
401 private final float[] transformMatrix;
402 private final long presentationTimestampUs; 394 private final long presentationTimestampUs;
403 private final long decodeTimeMs;
404 // Interval from when the frame finished decoding until this buffer has been created.
405 // Since there is only one texture, this interval depend on the time from wh en
406 // a frame is decoded and provided to C++ and until that frame is returned t o the MediaCodec
407 // so that the texture can be updated with the next decoded frame.
408 private final long frameDelayMs;
409 395
410 // A DecodedTextureBuffer with zero |textureID| has special meaning and repr esents a frame 396 public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
411 // that was dropped.
412 public DecodedTextureBuffer(int textureID, float[] transformMatrix,
413 long presentationTimestampUs, long decodeTimeMs, long frameDelay) {
414 this.textureID = textureID; 397 this.textureID = textureID;
415 this.transformMatrix = transformMatrix;
416 this.presentationTimestampUs = presentationTimestampUs; 398 this.presentationTimestampUs = presentationTimestampUs;
417 this.decodeTimeMs = decodeTimeMs;
418 this.frameDelayMs = frameDelay;
419 } 399 }
420 } 400 }
421 401
422 // Poll based texture listener. 402 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or
423 private static class TextureListener 403 // DecodedTexturebuffer depending on |useSurface| configuration.
424 implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
425 public static class TextureInfo {
426 private final int textureID;
427 private final float[] transformMatrix;
428
429 TextureInfo(int textureId, float[] transformMatrix) {
430 this.textureID = textureId;
431 this.transformMatrix = transformMatrix;
432 }
433 }
434 private final SurfaceTextureHelper surfaceTextureHelper;
435 private TextureInfo textureInfo;
436 // |newFrameLock| is used to synchronize arrival of new frames with wait()/n otifyAll().
437 private final Object newFrameLock = new Object();
438
439 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
440 this.surfaceTextureHelper = surfaceTextureHelper;
441 surfaceTextureHelper.setListener(this);
442 }
443
444 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d.
445 @Override
446 public void onTextureFrameAvailable(
447 int oesTextureId, float[] transformMatrix, long timestampNs) {
448 synchronized (newFrameLock) {
449 if (textureInfo != null) {
450 Logging.e(TAG,
451 "Unexpected onTextureFrameAvailable() called while already holding a texture.");
452 throw new IllegalStateException("Already holding a texture.");
453 }
454 // |timestampNs| is always zero on some Android versions.
455 textureInfo = new TextureInfo(oesTextureId, transformMatrix);
456 newFrameLock.notifyAll();
457 }
458 }
459
460 // Dequeues and returns a TextureInfo if available, or null otherwise.
461 public TextureInfo dequeueTextureInfo(int timeoutMs) {
462 synchronized (newFrameLock) {
463 if (textureInfo == null && timeoutMs > 0) {
464 try {
465 newFrameLock.wait(timeoutMs);
466 } catch(InterruptedException e) {
467 // Restore the interrupted status by reinterrupting the thread.
468 Thread.currentThread().interrupt();
469 }
470 }
471 TextureInfo returnedInfo = textureInfo;
472 textureInfo = null;
473 return returnedInfo;
474 }
475 }
476
477 public void release() {
478 // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAv ailable() in
479 // progress is done. Therefore, the call to disconnect() must be outside a ny synchronized
480 // statement that is also used in the onTextureFrameAvailable() above to a void deadlocks.
481 surfaceTextureHelper.disconnect();
482 synchronized (newFrameLock) {
483 if (textureInfo != null) {
484 surfaceTextureHelper.returnTextureFrame();
485 textureInfo = null;
486 }
487 }
488 }
489 }
490
491 // Returns null if no decoded buffer is available, and otherwise a DecodedByte Buffer.
492 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an 404 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
493 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException 405 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
494 // upon codec error. 406 // upon codec error.
495 private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { 407 private Object dequeueOutputBuffer(int dequeueTimeoutUs)
408 throws IllegalStateException, MediaCodec.CodecException {
496 checkOnMediaCodecThread(); 409 checkOnMediaCodecThread();
497 if (decodeStartTimeMs.isEmpty()) { 410
498 return null;
499 }
500 // Drain the decoder until receiving a decoded buffer or hitting 411 // Drain the decoder until receiving a decoded buffer or hitting
501 // MediaCodec.INFO_TRY_AGAIN_LATER. 412 // MediaCodec.INFO_TRY_AGAIN_LATER.
502 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 413 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
503 while (true) { 414 while (true) {
504 final int result = mediaCodec.dequeueOutputBuffer( 415 final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
505 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
506 switch (result) { 416 switch (result) {
417 case MediaCodec.INFO_TRY_AGAIN_LATER:
418 return null;
507 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 419 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
508 outputBuffers = mediaCodec.getOutputBuffers(); 420 outputBuffers = mediaCodec.getOutputBuffers();
509 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); 421 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
510 if (hasDecodedFirstFrame) {
511 throw new RuntimeException("Unexpected output buffer change event.") ;
512 }
513 break; 422 break;
514 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 423 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
515 MediaFormat format = mediaCodec.getOutputFormat(); 424 MediaFormat format = mediaCodec.getOutputFormat();
516 Logging.d(TAG, "Decoder format changed: " + format.toString()); 425 Logging.d(TAG, "Decoder format changed: " + format.toString());
517 int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
518 int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
519 if (hasDecodedFirstFrame && (new_width != width || new_height != heigh t)) {
520 throw new RuntimeException("Unexpected size change. Configured " + w idth + "*" +
521 height + ". New " + new_width + "*" + new_height);
522 }
523 width = format.getInteger(MediaFormat.KEY_WIDTH); 426 width = format.getInteger(MediaFormat.KEY_WIDTH);
524 height = format.getInteger(MediaFormat.KEY_HEIGHT); 427 height = format.getInteger(MediaFormat.KEY_HEIGHT);
525
526 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 428 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
527 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 429 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
528 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 430 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
529 if (!supportedColorList.contains(colorFormat)) { 431 if (!supportedColorList.contains(colorFormat)) {
530 throw new IllegalStateException("Non supported color format: " + c olorFormat); 432 throw new IllegalStateException("Non supported color format: " + c olorFormat);
531 } 433 }
532 } 434 }
533 if (format.containsKey("stride")) { 435 if (format.containsKey("stride")) {
534 stride = format.getInteger("stride"); 436 stride = format.getInteger("stride");
535 } 437 }
536 if (format.containsKey("slice-height")) { 438 if (format.containsKey("slice-height")) {
537 sliceHeight = format.getInteger("slice-height"); 439 sliceHeight = format.getInteger("slice-height");
538 } 440 }
539 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); 441 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
540 stride = Math.max(width, stride); 442 stride = Math.max(width, stride);
541 sliceHeight = Math.max(height, sliceHeight); 443 sliceHeight = Math.max(height, sliceHeight);
542 break; 444 break;
543 case MediaCodec.INFO_TRY_AGAIN_LATER:
544 return null;
545 default: 445 default:
546 hasDecodedFirstFrame = true; 446 // Output buffer decoded.
547 return new DecodedOutputBuffer(result, info.offset, info.size, info.pr esentationTimeUs, 447 if (useSurface) {
548 SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(), 448 mediaCodec.releaseOutputBuffer(result, true /* render */);
549 SystemClock.elapsedRealtime()); 449 // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before r eturning a texture
550 } 450 // frame.
451 return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
452 } else {
453 return new DecodedByteBuffer(result, info.offset, info.size, info.pr esentationTimeUs);
454 }
455 }
551 } 456 }
552 } 457 }
553 458
554 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer.
555 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
556 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
557 // upon codec error.
558 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
559 checkOnMediaCodecThread();
560 if (!useSurface) {
561 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding.");
562 }
563
564 DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
565 if (outputBuffer != null) {
566 if (dequeuedSurfaceOutputBuffers.size() >= Math.min(
567 MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) {
568 ++droppedFrames;
569 Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: "
570 + droppedFrames);
571 // Drop the newest frame. Don't drop the oldest since if |isWaitingForTe xture|
572 // releaseOutputBuffer has already been called. Dropping the newest fram e will lead to a
573 // shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverP endingOutputs.
574 mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */);
575 return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimest ampUs,
576 outputBuffer.decodeTimeMs,
577 SystemClock.elapsedRealtime() - outputBuffer.endDecodeTimeMs);
578 }
579 dequeuedSurfaceOutputBuffers.add(outputBuffer);
580 }
581
582 if (dequeuedSurfaceOutputBuffers.isEmpty()) {
583 return null;
584 }
585
586 if (!isWaitingForTexture) {
587 // Get the first frame in the queue and render to the decoder output surfa ce.
588 mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
589 isWaitingForTexture = true;
590 }
591
592 // We are waiting for a frame to be rendered to the decoder surface.
593 // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can onl y be one frame
594 // rendered at a time.
595 TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeu eTimeoutMs);
596 if (info != null) {
597 isWaitingForTexture = false;
598 final DecodedOutputBuffer renderedBuffer =
599 dequeuedSurfaceOutputBuffers.remove();
600 if (!dequeuedSurfaceOutputBuffers.isEmpty()) {
601 // Get the next frame in the queue and render to the decoder output surf ace.
602 mediaCodec.releaseOutputBuffer(
603 dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
604 isWaitingForTexture = true;
605 }
606
607 return new DecodedTextureBuffer(info.textureID, info.transformMatrix,
608 renderedBuffer.presentationTimestampUs, renderedBuffer.decodeTimeMs,
609 SystemClock.elapsedRealtime() - renderedBuffer.endDecodeTimeMs);
610 }
611 return null;
612 }
613
614 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for 459 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
615 // non-surface decoding. 460 // non-surface decoding.
616 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured 461 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
617 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows 462 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
618 // MediaCodec.CodecException upon codec error. 463 // MediaCodec.CodecException upon codec error.
619 private void returnDecodedOutputBuffer(int index) 464 private void returnDecodedByteBuffer(int index)
620 throws IllegalStateException, MediaCodec.CodecException { 465 throws IllegalStateException, MediaCodec.CodecException {
621 checkOnMediaCodecThread(); 466 checkOnMediaCodecThread();
622 if (useSurface) { 467 if (useSurface) {
623 throw new IllegalStateException("returnDecodedOutputBuffer() called for su rface decoding."); 468 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding.");
624 } 469 }
625 mediaCodec.releaseOutputBuffer(index, false /* render */); 470 mediaCodec.releaseOutputBuffer(index, false /* render */);
626 } 471 }
627 } 472 }
OLDNEW
« no previous file with comments | « talk/app/webrtc/java/jni/surfacetexturehelper_jni.cc ('k') | talk/app/webrtc/java/src/org/webrtc/VideoRenderer.java » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698