Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(211)

Side by Side Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Added frame dropping. Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2014 Google Inc. 3 * Copyright 2014 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation 11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution. 12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products 13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission. 14 * derived from this software without specific prior written permission.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */ 26 */
27 27
28 package org.webrtc; 28 package org.webrtc;
29 29
30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec; 30 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo; 31 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities; 32 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList; 33 import android.media.MediaCodecList;
35 import android.media.MediaFormat; 34 import android.media.MediaFormat;
36 import android.opengl.GLES11Ext;
37 import android.opengl.GLES20;
38 import android.os.Build; 35 import android.os.Build;
36 import android.os.SystemClock;
39 import android.view.Surface; 37 import android.view.Surface;
40 38
41 import org.webrtc.Logging; 39 import org.webrtc.Logging;
42 40
43 import java.nio.ByteBuffer; 41 import java.nio.ByteBuffer;
44 import java.util.Arrays; 42 import java.util.Arrays;
43 import java.util.LinkedList;
45 import java.util.List; 44 import java.util.List;
46 45 import java.util.Queue;
47 import javax.microedition.khronos.egl.EGLContext; 46 import java.util.concurrent.TimeUnit;
48 47
49 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. 48 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
50 // This class is an implementation detail of the Java PeerConnection API. 49 // This class is an implementation detail of the Java PeerConnection API.
51 // MediaCodec is thread-hostile so this class must be operated on a single 50 // MediaCodec is thread-hostile so this class must be operated on a single
52 // thread. 51 // thread.
53 public class MediaCodecVideoDecoder { 52 public class MediaCodecVideoDecoder {
54 // This class is constructed, operated, and destroyed by its C++ incarnation, 53 // This class is constructed, operated, and destroyed by its C++ incarnation,
55 // so the class and its methods have non-public visibility. The API this 54 // so the class and its methods have non-public visibility. The API this
56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as 55 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
57 // possibly to minimize the amount of translation work necessary. 56 // possibly to minimize the amount of translation work necessary.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 private static final List<Integer> supportedColorList = Arrays.asList( 88 private static final List<Integer> supportedColorList = Arrays.asList(
90 CodecCapabilities.COLOR_FormatYUV420Planar, 89 CodecCapabilities.COLOR_FormatYUV420Planar,
91 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 90 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
92 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 91 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
93 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); 92 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
94 private int colorFormat; 93 private int colorFormat;
95 private int width; 94 private int width;
96 private int height; 95 private int height;
97 private int stride; 96 private int stride;
98 private int sliceHeight; 97 private int sliceHeight;
98 private boolean hasDecodedFirstFrame;
99 private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
99 private boolean useSurface; 100 private boolean useSurface;
100 private int textureID = 0; 101
101 private SurfaceTexture surfaceTexture = null; 102 // The below variables are only used when the decode decodes to a Surface.
magjed_webrtc 2015/11/09 16:30:52 Strange sentence, maybe "when the decoder decodes"
perkj_webrtc 2015/11/09 16:53:50 Done.
103 private TextureListener textureListener;
104 // |isWaitingForTexture| is true when waiting for the transition:
105 // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
106 private boolean isWaitingForTexture;
102 private Surface surface = null; 107 private Surface surface = null;
103 private EglBase eglBase; 108 private final Queue<DecodedOutputBuffer>
104 109 dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
105 private MediaCodecVideoDecoder() {
106 }
107 110
108 // Helper struct for findVp8Decoder() below. 111 // Helper struct for findVp8Decoder() below.
109 private static class DecoderProperties { 112 private static class DecoderProperties {
110 public DecoderProperties(String codecName, int colorFormat) { 113 public DecoderProperties(String codecName, int colorFormat) {
111 this.codecName = codecName; 114 this.codecName = codecName;
112 this.colorFormat = colorFormat; 115 this.colorFormat = colorFormat;
113 } 116 }
114 public final String codecName; // OpenMax component name for VP8 codec. 117 public final String codecName; // OpenMax component name for VP8 codec.
115 public final int colorFormat; // Color format supported by codec. 118 public final int colorFormat; // Color format supported by codec.
116 } 119 }
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
190 } 193 }
191 194
192 private void checkOnMediaCodecThread() throws IllegalStateException { 195 private void checkOnMediaCodecThread() throws IllegalStateException {
193 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { 196 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
194 throw new IllegalStateException( 197 throw new IllegalStateException(
195 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + 198 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
196 " but is now called on " + Thread.currentThread()); 199 " but is now called on " + Thread.currentThread());
197 } 200 }
198 } 201 }
199 202
200 // Pass null in |sharedContext| to configure the codec for ByteBuffer output. 203 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput.
201 private boolean initDecode(VideoCodecType type, int width, int height, EGLCont ext sharedContext) { 204 private boolean initDecode(
205 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) {
202 if (mediaCodecThread != null) { 206 if (mediaCodecThread != null) {
203 throw new RuntimeException("Forgot to release()?"); 207 throw new RuntimeException("Forgot to release()?");
204 } 208 }
205 useSurface = (sharedContext != null); 209 useSurface = (surfaceTextureHelper != null);
206 String mime = null; 210 String mime = null;
207 String[] supportedCodecPrefixes = null; 211 String[] supportedCodecPrefixes = null;
208 if (type == VideoCodecType.VIDEO_CODEC_VP8) { 212 if (type == VideoCodecType.VIDEO_CODEC_VP8) {
209 mime = VP8_MIME_TYPE; 213 mime = VP8_MIME_TYPE;
210 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; 214 supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
211 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { 215 } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
212 mime = H264_MIME_TYPE; 216 mime = H264_MIME_TYPE;
213 supportedCodecPrefixes = supportedH264HwCodecPrefixes; 217 supportedCodecPrefixes = supportedH264HwCodecPrefixes;
214 } else { 218 } else {
215 throw new RuntimeException("Non supported codec " + type); 219 throw new RuntimeException("Non supported codec " + type);
216 } 220 }
217 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); 221 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
218 if (properties == null) { 222 if (properties == null) {
219 throw new RuntimeException("Cannot find HW decoder for " + type); 223 throw new RuntimeException("Cannot find HW decoder for " + type);
220 } 224 }
221 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + 225 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
222 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + 226 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
223 ". Use Surface: " + useSurface); 227 ". Use Surface: " + useSurface);
224 if (sharedContext != null) {
225 Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
226 }
227 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. 228 runningInstance = this; // Decoder is now running and can be queried for sta ck traces.
228 mediaCodecThread = Thread.currentThread(); 229 mediaCodecThread = Thread.currentThread();
229 try { 230 try {
230 this.width = width; 231 this.width = width;
231 this.height = height; 232 this.height = height;
232 stride = width; 233 stride = width;
233 sliceHeight = height; 234 sliceHeight = height;
234 235
235 if (useSurface) { 236 if (useSurface) {
236 // Create shared EGL context. 237 textureListener = new TextureListener(surfaceTextureHelper);
237 eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); 238 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
238 eglBase.createDummyPbufferSurface();
239 eglBase.makeCurrent();
240
241 // Create output surface
242 textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
243 Logging.d(TAG, "Video decoder TextureID = " + textureID);
244 surfaceTexture = new SurfaceTexture(textureID);
245 surface = new Surface(surfaceTexture);
246 } 239 }
247 240
248 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 241 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
249 if (!useSurface) { 242 if (!useSurface) {
250 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 243 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
251 } 244 }
252 Logging.d(TAG, " Format: " + format); 245 Logging.d(TAG, " Format: " + format);
253 mediaCodec = 246 mediaCodec =
254 MediaCodecVideoEncoder.createByCodecName(properties.codecName); 247 MediaCodecVideoEncoder.createByCodecName(properties.codecName);
255 if (mediaCodec == null) { 248 if (mediaCodec == null) {
(...skipping 18 matching lines...) Expand all
274 Logging.d(TAG, "Java releaseDecoder"); 267 Logging.d(TAG, "Java releaseDecoder");
275 checkOnMediaCodecThread(); 268 checkOnMediaCodecThread();
276 try { 269 try {
277 mediaCodec.stop(); 270 mediaCodec.stop();
278 mediaCodec.release(); 271 mediaCodec.release();
279 } catch (IllegalStateException e) { 272 } catch (IllegalStateException e) {
280 Logging.e(TAG, "release failed", e); 273 Logging.e(TAG, "release failed", e);
281 } 274 }
282 mediaCodec = null; 275 mediaCodec = null;
283 mediaCodecThread = null; 276 mediaCodecThread = null;
277 decodeStartTimeMs.clear();
278 hasDecodedFirstFrame = false;
284 runningInstance = null; 279 runningInstance = null;
280 dequeuedSurfaceOutputBuffers.clear();
281 isWaitingForTexture = false;
285 if (useSurface) { 282 if (useSurface) {
286 surface.release(); 283 surface.release();
287 surface = null; 284 surface = null;
288 Logging.d(TAG, "Delete video decoder TextureID " + textureID); 285 textureListener.release();
289 GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
290 textureID = 0;
291 eglBase.release();
292 eglBase = null;
293 } 286 }
294 Logging.d(TAG, "Java releaseDecoder done"); 287 Logging.d(TAG, "Java releaseDecoder done");
295 } 288 }
296 289
297 // Dequeue an input buffer and return its index, -1 if no input buffer is 290 // Dequeue an input buffer and return its index, -1 if no input buffer is
298 // available, or -2 if the codec is no longer operative. 291 // available, or -2 if the codec is no longer operative.
299 private int dequeueInputBuffer() { 292 private int dequeueInputBuffer() {
300 checkOnMediaCodecThread(); 293 checkOnMediaCodecThread();
301 try { 294 try {
302 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); 295 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
303 } catch (IllegalStateException e) { 296 } catch (IllegalStateException e) {
304 Logging.e(TAG, "dequeueIntputBuffer failed", e); 297 Logging.e(TAG, "dequeueIntputBuffer failed", e);
305 return -2; 298 return -2;
306 } 299 }
307 } 300 }
308 301
309 private boolean queueInputBuffer( 302 private boolean queueInputBuffer(
310 int inputBufferIndex, int size, long timestampUs) { 303 int inputBufferIndex, int size, long timestampUs) {
311 checkOnMediaCodecThread(); 304 checkOnMediaCodecThread();
312 try { 305 try {
313 inputBuffers[inputBufferIndex].position(0); 306 inputBuffers[inputBufferIndex].position(0);
314 inputBuffers[inputBufferIndex].limit(size); 307 inputBuffers[inputBufferIndex].limit(size);
308 decodeStartTimeMs.add(SystemClock.elapsedRealtime());
315 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); 309 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
316 return true; 310 return true;
317 } 311 }
318 catch (IllegalStateException e) { 312 catch (IllegalStateException e) {
319 Logging.e(TAG, "decode failed", e); 313 Logging.e(TAG, "decode failed", e);
320 return false; 314 return false;
321 } 315 }
322 } 316 }
323 317
324 // Helper structs for dequeueOutputBuffer() below. 318 // Helper struct for dequeueOutputBuffer() below.
325 private static class DecodedByteBuffer { 319 private static class DecodedOutputBuffer {
326 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) { 320 public DecodedOutputBuffer(int index, int offset, int size, long presentatio nTimestampUs,
321 long decodeTime) {
327 this.index = index; 322 this.index = index;
328 this.offset = offset; 323 this.offset = offset;
329 this.size = size; 324 this.size = size;
330 this.presentationTimestampUs = presentationTimestampUs; 325 this.presentationTimestampUs = presentationTimestampUs;
326 this.decodeTimeMs = decodeTime;
331 } 327 }
332 328
333 private final int index; 329 private final int index;
334 private final int offset; 330 private final int offset;
335 private final int size; 331 private final int size;
336 private final long presentationTimestampUs; 332 private final long presentationTimestampUs;
333 private final long decodeTimeMs;
337 } 334 }
338 335
336 // Helper struct for dequeueTextureBuffer() below.
339 private static class DecodedTextureBuffer { 337 private static class DecodedTextureBuffer {
340 private final int textureID; 338 private final int textureID;
339 private final float[] transformMatrix;
341 private final long presentationTimestampUs; 340 private final long presentationTimestampUs;
341 private final long decodeTimeMs;
342 342
343 public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { 343 // A DecodedTextureBuffer with zero |textureID| has special meaning and repr esents a frame
344 // that was dropped.
345 public DecodedTextureBuffer(int textureID, float[] transformMatrix,
346 long presentationTimestampUs, long decodeTimeMs) {
344 this.textureID = textureID; 347 this.textureID = textureID;
348 this.transformMatrix = transformMatrix;
345 this.presentationTimestampUs = presentationTimestampUs; 349 this.presentationTimestampUs = presentationTimestampUs;
350 this.decodeTimeMs = decodeTimeMs;
346 } 351 }
347 } 352 }
348 353
349 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or 354 // Poll based texture listener.
350 // DecodedTexturebuffer depending on |useSurface| configuration. 355 private static class TextureListener
356 implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
357 public static class TextureInfo {
358 private final int textureID;
359 private final float[] transformMatrix;
360
361 TextureInfo(int textureId, float[] transformMatrix) {
362 this.textureID = textureId;
363 this.transformMatrix = transformMatrix;
364 }
365 }
366 private final SurfaceTextureHelper surfaceTextureHelper;
367 private TextureInfo textureInfo;
368 // |newFrameLock| is used to synchronize arrival of new frames with wait()/n otifyAll().
369 private final Object newFrameLock = new Object();
370
371 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
372 this.surfaceTextureHelper = surfaceTextureHelper;
373 surfaceTextureHelper.setListener(this);
374 }
375
376 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d.
377 @Override
378 public void onTextureFrameAvailable(
379 int oesTextureId, float[] transformMatrix, long timestampNs) {
380 synchronized (newFrameLock) {
381 if (textureInfo != null) {
382 Logging.e(TAG,
383 "Unexpected onTextureFrameAvailable() called while already holding a texture.");
384 throw new IllegalStateException("Already holding a texture.");
385 }
386 // |timestampNs| is always zero on some Android versions.
387 textureInfo = new TextureInfo(oesTextureId, transformMatrix);
388 newFrameLock.notifyAll();
389 }
390 }
391
392 // Dequeues and returns a TextureInfo if available, or null otherwise.
393 public TextureInfo dequeueTextureInfo(int timeoutMs) {
394 synchronized (newFrameLock) {
395 if (textureInfo == null && timeoutMs > 0) {
396 try {
397 newFrameLock.wait(timeoutMs);
398 } catch(InterruptedException e) {
399 // Restore the interrupted status by reinterrupting the thread.
400 Thread.currentThread().interrupt();
401 }
402 }
403 TextureInfo returnedInfo = textureInfo;
404 textureInfo = null;
405 return returnedInfo;
406 }
407 }
408
409 public void release() {
410 // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAv ailable() in
411 // progress is done. Therefore, the call to disconnect() must be outside a ny synchronized
412 // statement that is also used in the onTextureFrameAvailable() above to a void deadlocks.
413 surfaceTextureHelper.disconnect();
414 synchronized (newFrameLock) {
415 if (textureInfo != null) {
416 surfaceTextureHelper.returnTextureFrame();
417 textureInfo = null;
418 }
419 }
420 }
421 }
422
423 // Returns null if no decoded buffer is available, and otherwise a DecodedByte Buffer.
351 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an 424 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
352 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException 425 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
353 // upon codec error. 426 // upon codec error.
354 private Object dequeueOutputBuffer(int dequeueTimeoutUs) 427 private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
355 throws IllegalStateException, MediaCodec.CodecException {
356 checkOnMediaCodecThread(); 428 checkOnMediaCodecThread();
429 if (decodeStartTimeMs.isEmpty()) {
430 return null;
431 }
357 // Drain the decoder until receiving a decoded buffer or hitting 432 // Drain the decoder until receiving a decoded buffer or hitting
358 // MediaCodec.INFO_TRY_AGAIN_LATER. 433 // MediaCodec.INFO_TRY_AGAIN_LATER.
359 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 434 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
360 while (true) { 435 while (true) {
361 final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); 436 final int result = mediaCodec.dequeueOutputBuffer(
437 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
362 switch (result) { 438 switch (result) {
363 case MediaCodec.INFO_TRY_AGAIN_LATER:
364 return null;
365 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 439 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
366 outputBuffers = mediaCodec.getOutputBuffers(); 440 outputBuffers = mediaCodec.getOutputBuffers();
367 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); 441 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
442 if (hasDecodedFirstFrame) {
443 throw new RuntimeException("Unexpected output buffer change event.") ;
444 }
368 break; 445 break;
369 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 446 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
370 MediaFormat format = mediaCodec.getOutputFormat(); 447 MediaFormat format = mediaCodec.getOutputFormat();
371 Logging.d(TAG, "Decoder format changed: " + format.toString()); 448 Logging.d(TAG, "Decoder format changed: " + format.toString());
449 int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
450 int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
451 if (hasDecodedFirstFrame && (new_width != width || new_height != heigh t)) {
452 throw new RuntimeException("Unexpected size change. Configured " + w idth + "*" +
453 height + ". New " + new_width + "*" + new_height);
454 }
372 width = format.getInteger(MediaFormat.KEY_WIDTH); 455 width = format.getInteger(MediaFormat.KEY_WIDTH);
373 height = format.getInteger(MediaFormat.KEY_HEIGHT); 456 height = format.getInteger(MediaFormat.KEY_HEIGHT);
457
374 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 458 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
375 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 459 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
376 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 460 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
377 if (!supportedColorList.contains(colorFormat)) { 461 if (!supportedColorList.contains(colorFormat)) {
378 throw new IllegalStateException("Non supported color format: " + c olorFormat); 462 throw new IllegalStateException("Non supported color format: " + c olorFormat);
379 } 463 }
380 } 464 }
381 if (format.containsKey("stride")) { 465 if (format.containsKey("stride")) {
382 stride = format.getInteger("stride"); 466 stride = format.getInteger("stride");
383 } 467 }
384 if (format.containsKey("slice-height")) { 468 if (format.containsKey("slice-height")) {
385 sliceHeight = format.getInteger("slice-height"); 469 sliceHeight = format.getInteger("slice-height");
386 } 470 }
387 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); 471 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
388 stride = Math.max(width, stride); 472 stride = Math.max(width, stride);
389 sliceHeight = Math.max(height, sliceHeight); 473 sliceHeight = Math.max(height, sliceHeight);
390 break; 474 break;
475 case MediaCodec.INFO_TRY_AGAIN_LATER:
476 return null;
391 default: 477 default:
392 // Output buffer decoded. 478 hasDecodedFirstFrame = true;
393 if (useSurface) { 479 return new DecodedOutputBuffer(result, info.offset, info.size, info.pr esentationTimeUs,
394 mediaCodec.releaseOutputBuffer(result, true /* render */); 480 SystemClock.elapsedRealtime() - decodeStartTimeMs.remove());
395 // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before r eturning a texture 481 }
396 // frame.
397 return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
398 } else {
399 return new DecodedByteBuffer(result, info.offset, info.size, info.pr esentationTimeUs);
400 }
401 }
402 } 482 }
403 } 483 }
404 484
485 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer.
486 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
487 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
488 // upon codec error.
489 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
490 checkOnMediaCodecThread();
491 if (!useSurface) {
492 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding.");
493 }
494
495 DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
496 if (outputBuffer != null) {
497 if (dequeuedSurfaceOutputBuffers.size() > 2) {
498 Logging.w(TAG, "Too many output buffers. Dropping frame.");
499 // Drop the newest frame. Don't drop the oldest since if |isWaitingForTe xture|
500 // releaseOutputBuffer has already been called. Dropping the newest fram e will lead to a
501 // shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverP endingOutputs.
502 mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */);
503 return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimest ampUs,
504 outputBuffer.decodeTimeMs);
505 }
506 dequeuedSurfaceOutputBuffers.add(outputBuffer);
507 }
508
509 if (dequeuedSurfaceOutputBuffers.isEmpty()) {
510 return null;
511 }
512
513
514 if (!isWaitingForTexture) {
515 // Get the first frame in the queue and render to the decoder output surfa ce.
516 mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
517 isWaitingForTexture = true;
518 }
519
520 // We are waiting for a frame to be rendered to the decoder surface.
521 // Check if it is ready now by waiting max |dequeueTimeoutMs|. There can onl y be one frame
522 // rendererd at the time.
523 TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeu eTimeoutMs);
524 if (info != null) {
525 isWaitingForTexture = false;
526 final DecodedOutputBuffer rendererdBuffer =
527 dequeuedSurfaceOutputBuffers.remove();
528 return new DecodedTextureBuffer(info.textureID, info.transformMatrix,
529 rendererdBuffer.presentationTimestampUs, rendererdBuffer.decodeTimeMs) ;
530 }
531 return null;
532 }
533
405 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for 534 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
406 // non-surface decoding. 535 // non-surface decoding.
407 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured 536 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
408 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows 537 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
409 // MediaCodec.CodecException upon codec error. 538 // MediaCodec.CodecException upon codec error.
410 private void returnDecodedByteBuffer(int index) 539 private void returnDecodedOutputBuffer(int index)
411 throws IllegalStateException, MediaCodec.CodecException { 540 throws IllegalStateException, MediaCodec.CodecException {
412 checkOnMediaCodecThread(); 541 checkOnMediaCodecThread();
413 if (useSurface) { 542 if (useSurface) {
414 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding."); 543 throw new IllegalStateException("returnDecodedOutputBuffer() called for su rface decoding.");
415 } 544 }
416 mediaCodec.releaseOutputBuffer(index, false /* render */); 545 mediaCodec.releaseOutputBuffer(index, false /* render */);
417 } 546 }
418 } 547 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698