Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Addressed comments and further refactored. Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2014 Google Inc. 3 * Copyright 2014 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation 11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution. 12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products 13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission. 14 * derived from this software without specific prior written permission.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */ 26 */
27 27
28 package org.webrtc; 28 package org.webrtc;
29 29
30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec; 30 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo; 31 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities; 32 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList; 33 import android.media.MediaCodecList;
35 import android.media.MediaFormat; 34 import android.media.MediaFormat;
36 import android.opengl.EGLContext;
37 import android.opengl.GLES11Ext;
38 import android.opengl.GLES20;
39 import android.os.Build; 35 import android.os.Build;
36 import android.os.SystemClock;
40 import android.view.Surface; 37 import android.view.Surface;
41 38
42 import org.webrtc.Logging; 39 import org.webrtc.Logging;
43 40
44 import java.nio.ByteBuffer; 41 import java.nio.ByteBuffer;
42 import java.util.ArrayList;
45 import java.util.Arrays; 43 import java.util.Arrays;
44 import java.util.LinkedList;
46 import java.util.List; 45 import java.util.List;
46 import java.util.Queue;
47 import java.util.concurrent.TimeUnit;
47 48
48 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. 49 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
49 // This class is an implementation detail of the Java PeerConnection API. 50 // This class is an implementation detail of the Java PeerConnection API.
50 // MediaCodec is thread-hostile so this class must be operated on a single 51 // MediaCodec is thread-hostile so this class must be operated on a single
51 // thread. 52 // thread.
52 public class MediaCodecVideoDecoder { 53 public class MediaCodecVideoDecoder {
53 // This class is constructed, operated, and destroyed by its C++ incarnation, 54 // This class is constructed, operated, and destroyed by its C++ incarnation,
54 // so the class and its methods have non-public visibility. The API this 55 // so the class and its methods have non-public visibility. The API this
55 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as 56 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
56 // possibly to minimize the amount of translation work necessary. 57 // possibly to minimize the amount of translation work necessary.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 CodecCapabilities.COLOR_FormatYUV420Planar, 90 CodecCapabilities.COLOR_FormatYUV420Planar,
90 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 91 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
91 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 92 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
92 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); 93 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
93 private int colorFormat; 94 private int colorFormat;
94 private int width; 95 private int width;
95 private int height; 96 private int height;
96 private int stride; 97 private int stride;
97 private int sliceHeight; 98 private int sliceHeight;
98 private boolean useSurface; 99 private boolean useSurface;
99 private int textureID = 0; 100 private TextureListener textureListener;
100 private SurfaceTexture surfaceTexture = null;
101 private Surface surface = null; 101 private Surface surface = null;
102 private EglBase eglBase;
103 102
104 private MediaCodecVideoDecoder() { 103 private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
105 } 104 private final Queue<DecodedTextureBuffer>
105 dequeuedTextureBuffers = new LinkedList<DecodedTextureBuffer>();
106 106
107 // Helper struct for findVp8Decoder() below. 107 // Helper struct for findVp8Decoder() below.
108 private static class DecoderProperties { 108 private static class DecoderProperties {
109 public DecoderProperties(String codecName, int colorFormat) { 109 public DecoderProperties(String codecName, int colorFormat) {
110 this.codecName = codecName; 110 this.codecName = codecName;
111 this.colorFormat = colorFormat; 111 this.colorFormat = colorFormat;
112 } 112 }
113 public final String codecName; // OpenMax component name for VP8 codec. 113 public final String codecName; // OpenMax component name for VP8 codec.
114 public final int colorFormat; // Color format supported by codec. 114 public final int colorFormat; // Color format supported by codec.
115 } 115 }
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 } 189 }
190 190
191 private void checkOnMediaCodecThread() throws IllegalStateException { 191 private void checkOnMediaCodecThread() throws IllegalStateException {
192 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { 192 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
193 throw new IllegalStateException( 193 throw new IllegalStateException(
194 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + 194 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
195 " but is now called on " + Thread.currentThread()); 195 " but is now called on " + Thread.currentThread());
196 } 196 }
197 } 197 }
198 198
199 // Pass null in |sharedContext| to configure the codec for ByteBuffer output. 199 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput.
200 private boolean initDecode(VideoCodecType type, int width, int height, EGLCont ext sharedContext) { 200 private boolean initDecode(
201 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) {
201 if (mediaCodecThread != null) { 202 if (mediaCodecThread != null) {
202 throw new RuntimeException("Forgot to release()?"); 203 throw new RuntimeException("Forgot to release()?");
203 } 204 }
204 useSurface = (sharedContext != null); 205 useSurface = (surfaceTextureHelper != null);
205 String mime = null; 206 String mime = null;
206 String[] supportedCodecPrefixes = null; 207 String[] supportedCodecPrefixes = null;
207 if (type == VideoCodecType.VIDEO_CODEC_VP8) { 208 if (type == VideoCodecType.VIDEO_CODEC_VP8) {
208 mime = VP8_MIME_TYPE; 209 mime = VP8_MIME_TYPE;
209 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; 210 supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
210 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { 211 } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
211 mime = H264_MIME_TYPE; 212 mime = H264_MIME_TYPE;
212 supportedCodecPrefixes = supportedH264HwCodecPrefixes; 213 supportedCodecPrefixes = supportedH264HwCodecPrefixes;
213 } else { 214 } else {
214 throw new RuntimeException("Non supported codec " + type); 215 throw new RuntimeException("Non supported codec " + type);
215 } 216 }
216 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); 217 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
217 if (properties == null) { 218 if (properties == null) {
218 throw new RuntimeException("Cannot find HW decoder for " + type); 219 throw new RuntimeException("Cannot find HW decoder for " + type);
219 } 220 }
220 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + 221 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
221 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + 222 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
222 ". Use Surface: " + useSurface); 223 ". Use Surface: " + useSurface);
223 if (sharedContext != null) {
224 Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
225 }
226 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. 224 runningInstance = this; // Decoder is now running and can be queried for sta ck traces.
227 mediaCodecThread = Thread.currentThread(); 225 mediaCodecThread = Thread.currentThread();
228 try { 226 try {
229 this.width = width; 227 this.width = width;
230 this.height = height; 228 this.height = height;
231 stride = width; 229 stride = width;
232 sliceHeight = height; 230 sliceHeight = height;
233 231
234 if (useSurface) { 232 if (useSurface) {
235 // Create shared EGL context. 233 textureListener = new TextureListener(surfaceTextureHelper);
236 eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); 234 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
237 eglBase.createDummyPbufferSurface();
238 eglBase.makeCurrent();
239
240 // Create output surface
241 textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
242 Logging.d(TAG, "Video decoder TextureID = " + textureID);
243 surfaceTexture = new SurfaceTexture(textureID);
244 surface = new Surface(surfaceTexture);
245 } 235 }
246 236
247 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 237 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
248 if (!useSurface) { 238 if (!useSurface) {
249 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 239 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
250 } 240 }
251 Logging.d(TAG, " Format: " + format); 241 Logging.d(TAG, " Format: " + format);
252 mediaCodec = 242 mediaCodec =
253 MediaCodecVideoEncoder.createByCodecName(properties.codecName); 243 MediaCodecVideoEncoder.createByCodecName(properties.codecName);
254 if (mediaCodec == null) { 244 if (mediaCodec == null) {
(...skipping 22 matching lines...) Expand all
277 mediaCodec.release(); 267 mediaCodec.release();
278 } catch (IllegalStateException e) { 268 } catch (IllegalStateException e) {
279 Logging.e(TAG, "release failed", e); 269 Logging.e(TAG, "release failed", e);
280 } 270 }
281 mediaCodec = null; 271 mediaCodec = null;
282 mediaCodecThread = null; 272 mediaCodecThread = null;
283 runningInstance = null; 273 runningInstance = null;
284 if (useSurface) { 274 if (useSurface) {
285 surface.release(); 275 surface.release();
286 surface = null; 276 surface = null;
287 Logging.d(TAG, "Delete video decoder TextureID " + textureID); 277 textureListener.release();
288 GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
289 textureID = 0;
290 eglBase.release();
291 eglBase = null;
292 } 278 }
293 Logging.d(TAG, "Java releaseDecoder done"); 279 Logging.d(TAG, "Java releaseDecoder done");
294 } 280 }
295 281
296 // Dequeue an input buffer and return its index, -1 if no input buffer is 282 // Dequeue an input buffer and return its index, -1 if no input buffer is
297 // available, or -2 if the codec is no longer operative. 283 // available, or -2 if the codec is no longer operative.
298 private int dequeueInputBuffer() { 284 private int dequeueInputBuffer() {
299 checkOnMediaCodecThread(); 285 checkOnMediaCodecThread();
300 try { 286 try {
301 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); 287 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
302 } catch (IllegalStateException e) { 288 } catch (IllegalStateException e) {
303 Logging.e(TAG, "dequeueIntputBuffer failed", e); 289 Logging.e(TAG, "dequeueIntputBuffer failed", e);
304 return -2; 290 return -2;
305 } 291 }
306 } 292 }
307 293
308 private boolean queueInputBuffer( 294 private boolean queueInputBuffer(
309 int inputBufferIndex, int size, long timestampUs) { 295 int inputBufferIndex, int size, long timestampUs) {
310 checkOnMediaCodecThread(); 296 checkOnMediaCodecThread();
311 try { 297 try {
312 inputBuffers[inputBufferIndex].position(0); 298 inputBuffers[inputBufferIndex].position(0);
313 inputBuffers[inputBufferIndex].limit(size); 299 inputBuffers[inputBufferIndex].limit(size);
300 decodeStartTimeMs.add(SystemClock.elapsedRealtime());
301
314 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); 302 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
315 return true; 303 return true;
316 } 304 }
317 catch (IllegalStateException e) { 305 catch (IllegalStateException e) {
318 Logging.e(TAG, "decode failed", e); 306 Logging.e(TAG, "decode failed", e);
319 return false; 307 return false;
320 } 308 }
321 } 309 }
322 310
323 // Helper structs for dequeueOutputBuffer() below. 311 // Helper struct for dequeueByteBuffer() below.
324 private static class DecodedByteBuffer { 312 private static class DecodedByteBuffer {
325 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) { 313 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) {
326 this.index = index; 314 this.index = index;
327 this.offset = offset; 315 this.offset = offset;
328 this.size = size; 316 this.size = size;
329 this.presentationTimestampUs = presentationTimestampUs; 317 this.presentationTimestampUs = presentationTimestampUs;
330 } 318 }
331 319
332 private final int index; 320 private final int index;
333 private final int offset; 321 private final int offset;
334 private final int size; 322 private final int size;
335 private final long presentationTimestampUs; 323 private final long presentationTimestampUs;
336 } 324 }
337 325
326 // Helper struct for dequeueTextureBuffer() below.
338 private static class DecodedTextureBuffer { 327 private static class DecodedTextureBuffer {
magjed_webrtc 2015/10/29 09:44:13 I think this class has become bloated with too muc
perkj_webrtc 2015/10/29 19:26:44 Acknowledged.
339 private final int textureID; 328 private final int bufferIndex;
340 private final long presentationTimestampUs; 329 private final long decodeTimeMs;
341 330 private final int width;
342 public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { 331 private final int height;
332 private int textureID;
333 private float[] transformMatrix;
334 private long timestampNs;
335 private State state;
336 public enum State {
magjed_webrtc 2015/10/29 09:44:13 I don't think this State makes sense, because it's
perkj_webrtc 2015/10/29 19:26:45 Done.
337 DECODED,
338 RENDERING,
339 RENDERED,
340 }
341
342 public DecodedTextureBuffer(int bufferIndex, int width, int height, long dec odeTimeMs) {
343 this.state = State.DECODED;
344 this.bufferIndex = bufferIndex;
345 this.width = width;
346 this.height = height;
347 this.decodeTimeMs = decodeTimeMs;
348 }
349
350 public void setTextureInfo(int textureID, float[] transformMatrix, long time stampNs) {
343 this.textureID = textureID; 351 this.textureID = textureID;
344 this.presentationTimestampUs = presentationTimestampUs; 352 this.transformMatrix = transformMatrix;
345 } 353 this.timestampNs = timestampNs;
346 } 354 this.state = State.RENDERED;
347 355 }
348 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or 356 }
349 // DecodedTexturebuffer depending on |useSurface| configuration. 357
358 // Poll based texture listener.
359 private static class TextureListener
360 implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
361 public static class TextureInfo {
362 private final int textureID;
363 private final float[] transformMatrix;
364 private final long timestampNs;
365
366 TextureInfo(int textureId, float[] transformMatrix, long timestampNs) {
367 this.textureID = textureId;
368 this.transformMatrix = transformMatrix;
369 this.timestampNs = timestampNs;
370 }
371 }
372 private final SurfaceTextureHelper surfaceTextureHelper;
373 private TextureInfo textureProperties;
374 // |newFrameLock| is used to synchronize arrival of new frames with wait()/n otifyAll().
375 private final Object newFrameLock = new Object();
376
377 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
378 this.surfaceTextureHelper = surfaceTextureHelper;
379 surfaceTextureHelper.setListener(this);
380 }
381
382 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d.
383 @Override
384 public void onTextureFrameAvailable(
385 int oesTextureId, float[] transformMatrix, long timestampNs) {
386 synchronized (newFrameLock) {
387 if (textureProperties != null) {
388 Logging.e(TAG,
389 "Unexpected onTextureFrameAvailable() called while already holding a texture.");
390 throw new IllegalStateException("Already holding a texture.");
391 }
392 textureProperties = new TextureInfo(oesTextureId, transformMatrix, times tampNs);
393 newFrameLock.notifyAll();
394 }
395 }
396
397 // Dequeues and returns a TextureInfo if available, or null otherwise.
398 public TextureInfo dequeueTextureInfo(int timeoutMs) {
399 synchronized (newFrameLock) {
400 if (textureProperties == null && timeoutMs > 0) {
401 try {
402 newFrameLock.wait(timeoutMs);
403 } catch(InterruptedException e) {
404 // Restore the interrupted status by reinterrupting the thread.
405 Thread.currentThread().interrupt();
406 }
407 }
408 TextureInfo returnedInfo = textureProperties;
409 textureProperties = null;
410 return returnedInfo;
411 }
412 }
413
414 public void release() {
415 // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAv ailable() in
416 // progress is done. Therefore, the call to disconnect() must be outside a ny synchronized
417 // statement that is also used in the onTextureFrameAvailable() above to a void deadlocks.
418 surfaceTextureHelper.disconnect();
419 synchronized (newFrameLock) {
420 if (textureProperties != null) {
421 surfaceTextureHelper.returnTextureFrame();
422 textureProperties = null;
423 }
424 }
425 }
426 }
427
428 // Returns null if no decoded buffer is available, and otherwise a DecodedByte Buffer.
350 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an 429 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
351 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException 430 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
352 // upon codec error. 431 // upon codec error.
353 private Object dequeueOutputBuffer(int dequeueTimeoutUs) 432 private DecodedByteBuffer dequeueByteBuffer(int dequeueTimeoutMs)
354 throws IllegalStateException, MediaCodec.CodecException { 433 throws IllegalStateException, MediaCodec.CodecException {
355 checkOnMediaCodecThread(); 434 checkOnMediaCodecThread();
435 if (useSurface) {
436 throw new IllegalStateException("dequeueOutputBuffer() called for surface decoding.");
437 }
438
439 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
440 int result = dequeueOutputBuffer(info, dequeueTimeoutMs);
441 if (result < 0)
442 return null;
443 return new DecodedByteBuffer(result, info.offset, info.size, info.presentati onTimeUs);
444 }
445
446 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer.
447 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
448 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
449 // upon codec error.
450 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
magjed_webrtc 2015/10/29 09:44:13 This function has become too bloated and complicat
perkj_webrtc 2015/10/29 19:26:44 Done.
451 if (!useSurface) {
452 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding.");
453 }
454
455 if (!decodeStartTimeMs.isEmpty()) {
456 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
457 final int result = dequeueOutputBuffer(info, dequeueTimeoutMs);
458 if (result >= 0) {
459 // Output buffer decoded.
460 dequeuedTextureBuffers.add(
461 new DecodedTextureBuffer(
462 result, width, height, SystemClock.elapsedRealtime() - decodeSta rtTimeMs.poll()));
463 }
464 }
465
466 if (dequeuedTextureBuffers.isEmpty())
467 return null;
468
469 DecodedTextureBuffer textureBuffer = dequeuedTextureBuffers.peek();
470
471 if (textureBuffer.state == DecodedTextureBuffer.State.DECODED) {
472 // releaseOutputBuffer renders to the output surface.
473 mediaCodec.releaseOutputBuffer(textureBuffer.bufferIndex, true /* render * /);
474 textureBuffer.state = DecodedTextureBuffer.State.RENDERING;
475 }
476
477 if (textureBuffer.state == DecodedTextureBuffer.State.RENDERING) {
478 // If we are waiting for a frame to be rendered to the decoder surface,
479 // check if it is ready now by waiting max |dequeueTimeoutMs|. There can o nly be one frame
480 // rendered at the time.
481 TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequ eueTimeoutMs);
482 if (info != null) {
483 textureBuffer.setTextureInfo(info.textureID, info.transformMatrix, info. timestampNs);
484 textureBuffer.state = DecodedTextureBuffer.State.RENDERED;
485 }
486 }
487
488 if (textureBuffer.state == DecodedTextureBuffer.State.RENDERED) {
489 dequeuedTextureBuffers.remove();
490 if (!dequeuedTextureBuffers.isEmpty()) {
491 // If we are not waiting for a frame to be rendered, we can render the n ext decoder output
492 // buffer to the decoder surface to prepare for the next run.
493 DecodedTextureBuffer nextFrame = dequeuedTextureBuffers.peek();
494 mediaCodec.releaseOutputBuffer(nextFrame.bufferIndex, true /* render */) ;
495 nextFrame.state = DecodedTextureBuffer.State.RENDERING;
496 }
497 return textureBuffer;
498 }
499 return null;
500 }
501
502 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
magjed_webrtc 2015/10/29 09:44:13 Move this back to the old place so it's easier to
503 // non-surface decoding.
504 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
505 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
506 // MediaCodec.CodecException upon codec error.
507 private void returnDecodedByteBuffer(int index)
508 throws IllegalStateException, MediaCodec.CodecException {
509 checkOnMediaCodecThread();
510 if (useSurface) {
511 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding.");
512 }
513 mediaCodec.releaseOutputBuffer(index, false /* render */);
514 }
515
516 // Returns the index of |outputBuffers| that has just been decoded or
517 // MediaCodec.INFO_TRY_AGAIN_LATER if no output buffer has been filled.
518 // Throws IllegalStateException if color format changes to an
519 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
520 // upon codec error.
521 private int dequeueOutputBuffer(MediaCodec.BufferInfo info, int dequeueTimeout Ms) {
356 // Drain the decoder until receiving a decoded buffer or hitting 522 // Drain the decoder until receiving a decoded buffer or hitting
357 // MediaCodec.INFO_TRY_AGAIN_LATER. 523 // MediaCodec.INFO_TRY_AGAIN_LATER.
358 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
359 while (true) { 524 while (true) {
360 final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); 525 final int result = mediaCodec.dequeueOutputBuffer(
526 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
361 switch (result) { 527 switch (result) {
362 case MediaCodec.INFO_TRY_AGAIN_LATER:
363 return null;
364 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 528 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
365 outputBuffers = mediaCodec.getOutputBuffers(); 529 outputBuffers = mediaCodec.getOutputBuffers();
366 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); 530 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
367 break; 531 break;
368 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 532 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
369 MediaFormat format = mediaCodec.getOutputFormat(); 533 MediaFormat format = mediaCodec.getOutputFormat();
370 Logging.d(TAG, "Decoder format changed: " + format.toString()); 534 Logging.d(TAG, "Decoder format changed: " + format.toString());
371 width = format.getInteger(MediaFormat.KEY_WIDTH); 535 width = format.getInteger(MediaFormat.KEY_WIDTH);
372 height = format.getInteger(MediaFormat.KEY_HEIGHT); 536 height = format.getInteger(MediaFormat.KEY_HEIGHT);
373 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 537 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
374 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 538 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
375 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 539 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
376 if (!supportedColorList.contains(colorFormat)) { 540 if (!supportedColorList.contains(colorFormat)) {
377 throw new IllegalStateException("Non supported color format: " + c olorFormat); 541 throw new IllegalStateException("Non supported color format: " + c olorFormat);
378 } 542 }
379 } 543 }
380 if (format.containsKey("stride")) { 544 if (format.containsKey("stride")) {
381 stride = format.getInteger("stride"); 545 stride = format.getInteger("stride");
382 } 546 }
383 if (format.containsKey("slice-height")) { 547 if (format.containsKey("slice-height")) {
384 sliceHeight = format.getInteger("slice-height"); 548 sliceHeight = format.getInteger("slice-height");
385 } 549 }
386 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); 550 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
387 stride = Math.max(width, stride); 551 stride = Math.max(width, stride);
388 sliceHeight = Math.max(height, sliceHeight); 552 sliceHeight = Math.max(height, sliceHeight);
389 break; 553 break;
554 case MediaCodec.INFO_TRY_AGAIN_LATER:
390 default: 555 default:
391 // Output buffer decoded. 556 return result;
magjed_webrtc 2015/10/29 09:44:13 I think you should add a |decodedTimeMs| to Decode
perkj_webrtc 2015/10/29 19:26:45 Done.
392 if (useSurface) {
393 mediaCodec.releaseOutputBuffer(result, true /* render */);
394 // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before r eturning a texture
395 // frame.
396 return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
397 } else {
398 return new DecodedByteBuffer(result, info.offset, info.size, info.pr esentationTimeUs);
399 }
400 } 557 }
401 } 558 }
402 } 559 }
403
404 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
405 // non-surface decoding.
406 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
407 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
408 // MediaCodec.CodecException upon codec error.
409 private void returnDecodedByteBuffer(int index)
410 throws IllegalStateException, MediaCodec.CodecException {
411 checkOnMediaCodecThread();
412 if (useSurface) {
413 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding.");
414 }
415 mediaCodec.releaseOutputBuffer(index, false /* render */);
416 }
417 } 560 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698