Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1045)

Side by Side Diff: talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java

Issue 1422963003: Android MediaCodecVideoDecoder: Manage lifetime of texture frames (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * libjingle 2 * libjingle
3 * Copyright 2014 Google Inc. 3 * Copyright 2014 Google Inc.
4 * 4 *
5 * Redistribution and use in source and binary forms, with or without 5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met: 6 * modification, are permitted provided that the following conditions are met:
7 * 7 *
8 * 1. Redistributions of source code must retain the above copyright notice, 8 * 1. Redistributions of source code must retain the above copyright notice,
9 * this list of conditions and the following disclaimer. 9 * this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright notice, 10 * 2. Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation 11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution. 12 * and/or other materials provided with the distribution.
13 * 3. The name of the author may not be used to endorse or promote products 13 * 3. The name of the author may not be used to endorse or promote products
14 * derived from this software without specific prior written permission. 14 * derived from this software without specific prior written permission.
15 * 15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */ 26 */
27 27
28 package org.webrtc; 28 package org.webrtc;
29 29
30 import android.graphics.SurfaceTexture;
31 import android.media.MediaCodec; 30 import android.media.MediaCodec;
32 import android.media.MediaCodecInfo; 31 import android.media.MediaCodecInfo;
33 import android.media.MediaCodecInfo.CodecCapabilities; 32 import android.media.MediaCodecInfo.CodecCapabilities;
34 import android.media.MediaCodecList; 33 import android.media.MediaCodecList;
35 import android.media.MediaFormat; 34 import android.media.MediaFormat;
36 import android.opengl.EGLContext;
37 import android.opengl.GLES11Ext;
38 import android.opengl.GLES20;
39 import android.os.Build; 35 import android.os.Build;
36 import android.os.SystemClock;
40 import android.view.Surface; 37 import android.view.Surface;
41 38
42 import org.webrtc.Logging; 39 import org.webrtc.Logging;
43 40
44 import java.nio.ByteBuffer; 41 import java.nio.ByteBuffer;
42 import java.util.ArrayList;
45 import java.util.Arrays; 43 import java.util.Arrays;
46 import java.util.List; 44 import java.util.List;
45 import java.util.concurrent.TimeUnit;
47 46
48 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. 47 // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
49 // This class is an implementation detail of the Java PeerConnection API. 48 // This class is an implementation detail of the Java PeerConnection API.
50 // MediaCodec is thread-hostile so this class must be operated on a single 49 // MediaCodec is thread-hostile so this class must be operated on a single
51 // thread. 50 // thread.
52 public class MediaCodecVideoDecoder { 51 public class MediaCodecVideoDecoder {
53 // This class is constructed, operated, and destroyed by its C++ incarnation, 52 // This class is constructed, operated, and destroyed by its C++ incarnation,
54 // so the class and its methods have non-public visibility. The API this 53 // so the class and its methods have non-public visibility. The API this
55 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as 54 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
56 // possibly to minimize the amount of translation work necessary. 55 // possibly to minimize the amount of translation work necessary.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 CodecCapabilities.COLOR_FormatYUV420Planar, 88 CodecCapabilities.COLOR_FormatYUV420Planar,
90 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 89 CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
91 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 90 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
92 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); 91 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
93 private int colorFormat; 92 private int colorFormat;
94 private int width; 93 private int width;
95 private int height; 94 private int height;
96 private int stride; 95 private int stride;
97 private int sliceHeight; 96 private int sliceHeight;
98 private boolean useSurface; 97 private boolean useSurface;
99 private int textureID = 0; 98 // |isWaitingForTextureToFinishRendering| is true when waiting for the transit ion:
100 private SurfaceTexture surfaceTexture = null; 99 // MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
100 private boolean isWaitingForTextureToFinishRendering = false;
101 private TextureListener textureListener;
101 private Surface surface = null; 102 private Surface surface = null;
102 private EglBase eglBase; 103
104 private final List<Long> decodeStartTime = new ArrayList<Long>();
magjed_webrtc 2015/10/28 11:57:16 These Lists should be queues. Also add ms suffix t
perkj_webrtc 2015/10/28 21:12:39 Done.
105 private final List<Long> decodeTime = new ArrayList<Long>();
106 private final List<Integer> dequeuedOutputBuffers = new ArrayList<Integer>();
103 107
104 private MediaCodecVideoDecoder() { 108 private MediaCodecVideoDecoder() {
105 } 109 }
106 110
107 // Helper struct for findVp8Decoder() below. 111 // Helper struct for findVp8Decoder() below.
108 private static class DecoderProperties { 112 private static class DecoderProperties {
109 public DecoderProperties(String codecName, int colorFormat) { 113 public DecoderProperties(String codecName, int colorFormat) {
110 this.codecName = codecName; 114 this.codecName = codecName;
111 this.colorFormat = colorFormat; 115 this.colorFormat = colorFormat;
112 } 116 }
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 } 193 }
190 194
191 private void checkOnMediaCodecThread() throws IllegalStateException { 195 private void checkOnMediaCodecThread() throws IllegalStateException {
192 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { 196 if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
193 throw new IllegalStateException( 197 throw new IllegalStateException(
194 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + 198 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
195 " but is now called on " + Thread.currentThread()); 199 " but is now called on " + Thread.currentThread());
196 } 200 }
197 } 201 }
198 202
199 // Pass null in |sharedContext| to configure the codec for ByteBuffer output. 203 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput.
200 private boolean initDecode(VideoCodecType type, int width, int height, EGLCont ext sharedContext) { 204 private boolean initDecode(
205 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) {
201 if (mediaCodecThread != null) { 206 if (mediaCodecThread != null) {
202 throw new RuntimeException("Forgot to release()?"); 207 throw new RuntimeException("Forgot to release()?");
203 } 208 }
204 useSurface = (sharedContext != null); 209 useSurface = (surfaceTextureHelper != null);
205 String mime = null; 210 String mime = null;
206 String[] supportedCodecPrefixes = null; 211 String[] supportedCodecPrefixes = null;
207 if (type == VideoCodecType.VIDEO_CODEC_VP8) { 212 if (type == VideoCodecType.VIDEO_CODEC_VP8) {
208 mime = VP8_MIME_TYPE; 213 mime = VP8_MIME_TYPE;
209 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; 214 supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
210 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { 215 } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
211 mime = H264_MIME_TYPE; 216 mime = H264_MIME_TYPE;
212 supportedCodecPrefixes = supportedH264HwCodecPrefixes; 217 supportedCodecPrefixes = supportedH264HwCodecPrefixes;
213 } else { 218 } else {
214 throw new RuntimeException("Non supported codec " + type); 219 throw new RuntimeException("Non supported codec " + type);
215 } 220 }
216 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); 221 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
217 if (properties == null) { 222 if (properties == null) {
218 throw new RuntimeException("Cannot find HW decoder for " + type); 223 throw new RuntimeException("Cannot find HW decoder for " + type);
219 } 224 }
220 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + 225 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
221 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + 226 ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
222 ". Use Surface: " + useSurface); 227 ". Use Surface: " + useSurface);
223 if (sharedContext != null) {
224 Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
225 }
226 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. 228 runningInstance = this; // Decoder is now running and can be queried for sta ck traces.
227 mediaCodecThread = Thread.currentThread(); 229 mediaCodecThread = Thread.currentThread();
228 try { 230 try {
229 this.width = width; 231 this.width = width;
230 this.height = height; 232 this.height = height;
231 stride = width; 233 stride = width;
232 sliceHeight = height; 234 sliceHeight = height;
233 235
234 if (useSurface) { 236 if (useSurface) {
235 // Create shared EGL context. 237 textureListener = new TextureListener(surfaceTextureHelper);
236 eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); 238 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
237 eglBase.createDummyPbufferSurface();
238 eglBase.makeCurrent();
239
240 // Create output surface
241 textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
242 Logging.d(TAG, "Video decoder TextureID = " + textureID);
243 surfaceTexture = new SurfaceTexture(textureID);
244 surface = new Surface(surfaceTexture);
245 } 239 }
246 240
247 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 241 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
248 if (!useSurface) { 242 if (!useSurface) {
249 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 243 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
250 } 244 }
251 Logging.d(TAG, " Format: " + format); 245 Logging.d(TAG, " Format: " + format);
252 mediaCodec = 246 mediaCodec =
253 MediaCodecVideoEncoder.createByCodecName(properties.codecName); 247 MediaCodecVideoEncoder.createByCodecName(properties.codecName);
254 if (mediaCodec == null) { 248 if (mediaCodec == null) {
(...skipping 22 matching lines...) Expand all
277 mediaCodec.release(); 271 mediaCodec.release();
278 } catch (IllegalStateException e) { 272 } catch (IllegalStateException e) {
279 Logging.e(TAG, "release failed", e); 273 Logging.e(TAG, "release failed", e);
280 } 274 }
281 mediaCodec = null; 275 mediaCodec = null;
282 mediaCodecThread = null; 276 mediaCodecThread = null;
283 runningInstance = null; 277 runningInstance = null;
284 if (useSurface) { 278 if (useSurface) {
285 surface.release(); 279 surface.release();
286 surface = null; 280 surface = null;
287 Logging.d(TAG, "Delete video decoder TextureID " + textureID); 281 textureListener.release();
288 GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
289 textureID = 0;
290 eglBase.release();
291 eglBase = null;
292 } 282 }
293 Logging.d(TAG, "Java releaseDecoder done"); 283 Logging.d(TAG, "Java releaseDecoder done");
294 } 284 }
295 285
296 // Dequeue an input buffer and return its index, -1 if no input buffer is 286 // Dequeue an input buffer and return its index, -1 if no input buffer is
297 // available, or -2 if the codec is no longer operative. 287 // available, or -2 if the codec is no longer operative.
298 private int dequeueInputBuffer() { 288 private int dequeueInputBuffer() {
299 checkOnMediaCodecThread(); 289 checkOnMediaCodecThread();
300 try { 290 try {
301 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); 291 return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
302 } catch (IllegalStateException e) { 292 } catch (IllegalStateException e) {
303 Logging.e(TAG, "dequeueIntputBuffer failed", e); 293 Logging.e(TAG, "dequeueIntputBuffer failed", e);
304 return -2; 294 return -2;
305 } 295 }
306 } 296 }
307 297
308 private boolean queueInputBuffer( 298 private boolean queueInputBuffer(
309 int inputBufferIndex, int size, long timestampUs) { 299 int inputBufferIndex, int size, long timestampUs) {
310 checkOnMediaCodecThread(); 300 checkOnMediaCodecThread();
311 try { 301 try {
312 inputBuffers[inputBufferIndex].position(0); 302 inputBuffers[inputBufferIndex].position(0);
313 inputBuffers[inputBufferIndex].limit(size); 303 inputBuffers[inputBufferIndex].limit(size);
304 decodeStartTime.add(SystemClock.elapsedRealtime());
305
314 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); 306 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
315 return true; 307 return true;
316 } 308 }
317 catch (IllegalStateException e) { 309 catch (IllegalStateException e) {
318 Logging.e(TAG, "decode failed", e); 310 Logging.e(TAG, "decode failed", e);
319 return false; 311 return false;
320 } 312 }
321 } 313 }
322 314
323 // Helper structs for dequeueOutputBuffer() below. 315 // Helper structs for dequeueOutputBuffer() below.
324 private static class DecodedByteBuffer { 316 private static class DecodedByteBuffer {
325 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) { 317 public DecodedByteBuffer(int index, int offset, int size, long presentationT imestampUs) {
326 this.index = index; 318 this.index = index;
327 this.offset = offset; 319 this.offset = offset;
328 this.size = size; 320 this.size = size;
329 this.presentationTimestampUs = presentationTimestampUs; 321 this.presentationTimestampUs = presentationTimestampUs;
330 } 322 }
331 323
332 private final int index; 324 private final int index;
333 private final int offset; 325 private final int offset;
334 private final int size; 326 private final int size;
335 private final long presentationTimestampUs; 327 private final long presentationTimestampUs;
336 } 328 }
337 329
338 private static class DecodedTextureBuffer { 330 private static class DecodedTextureBuffer {
339 private final int textureID; 331 private final int textureID;
340 private final long presentationTimestampUs; 332 private final float[] transformMatrix;
333 private final long timestampNs;
334 private long decodeTimeMs;
perkj_webrtc 2015/10/27 20:46:15 decodeTime will be needed in the byte buffer as we
magjed_webrtc 2015/10/28 11:57:16 Or use the decode latency calculated in jni as bef
perkj_webrtc 2015/10/28 21:12:39 Done.
341 335
342 public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { 336 public DecodedTextureBuffer(int textureID, float[] transformMatrix, long tim estampNs) {
343 this.textureID = textureID; 337 this.textureID = textureID;
344 this.presentationTimestampUs = presentationTimestampUs; 338 this.transformMatrix = transformMatrix;
339 this.timestampNs = timestampNs;
345 } 340 }
346 } 341 }
347 342
343 // Poll based texture listener.
344 private static class TextureListener
345 implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
346 private final SurfaceTextureHelper surfaceTextureHelper;
347 private DecodedTextureBuffer textureBuffer;
348 private long decodeTime;
perkj_webrtc 2015/10/27 20:46:15 remove, unused.
perkj_webrtc 2015/10/28 21:12:39 Done.
349 // |newFrameLock| is used to synchronize arrival of new frames with wait()/n otifyAll().
350 private final Object newFrameLock = new Object();
351
352 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
353 this.surfaceTextureHelper = surfaceTextureHelper;
354 surfaceTextureHelper.setListener(this);
355 }
356
357 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d.
358 @Override
359 public void onTextureFrameAvailable(
360 int oesTextureId, float[] transformMatrix, long timestampNs) {
361 synchronized (newFrameLock) {
362 if (textureBuffer != null) {
363 Logging.e(TAG,
364 "Unexpected onTextureFrameAvailable() called while already holding a texture.");
365 throw new IllegalStateException("Already holding a texture.");
366 }
367 textureBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, timestampNs);
368 newFrameLock.notifyAll();
369 }
370 }
371
372 // Dequeues and returns a texture buffer if available, or null otherwise.
373 public DecodedTextureBuffer dequeueTextureFrame(int timeoutMs) {
374 synchronized (newFrameLock) {
375 if (textureBuffer == null && timeoutMs > 0) {
376 try {
377 newFrameLock.wait(timeoutMs);
378 } catch(InterruptedException e) {
379 // Restore the interrupted status by reinterrupting the thread.
380 Thread.currentThread().interrupt();
381 }
382 }
383 final DecodedTextureBuffer textureBuffer = this.textureBuffer;
384 this.textureBuffer = null;
385 return textureBuffer;
386 }
387 }
388
389 public void release() {
390 // SurfaceTextureHelper.disconnect() will block until any onTextureFrameAv ailable() in
391 // progress is done. Therefore, the call to disconnect() must be outside a ny synchronized
392 // statement that is also used in the onTextureFrameAvailable() above to a void deadlocks.
393 surfaceTextureHelper.disconnect();
394 synchronized (newFrameLock) {
395 if (textureBuffer != null) {
396 surfaceTextureHelper.returnTextureFrame();
397 textureBuffer = null;
398 }
399 }
400 }
401 }
402
348 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or 403 // Returns null if no decoded buffer is available, and otherwise either a Deco dedByteBuffer or
349 // DecodedTexturebuffer depending on |useSurface| configuration. 404 // DecodedTexturebuffer depending on |useSurface| configuration.
350 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an 405 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
351 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException 406 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException
352 // upon codec error. 407 // upon codec error.
353 private Object dequeueOutputBuffer(int dequeueTimeoutUs) 408 private Object dequeueOutputBuffer(int dequeueTimeoutMs)
perkj_webrtc 2015/10/27 20:46:15 I think this should be completely separate from th
perkj_webrtc 2015/10/28 21:12:39 Done.
354 throws IllegalStateException, MediaCodec.CodecException { 409 throws IllegalStateException, MediaCodec.CodecException {
355 checkOnMediaCodecThread(); 410 checkOnMediaCodecThread();
411
412 if (useSurface)
413 return dequeueTexture(dequeueTimeoutMs);
perkj_webrtc 2015/10/27 20:46:15 remove and use dequeueTexture from c++
perkj_webrtc 2015/10/28 21:12:39 Done.
414
356 // Drain the decoder until receiving a decoded buffer or hitting 415 // Drain the decoder until receiving a decoded buffer or hitting
357 // MediaCodec.INFO_TRY_AGAIN_LATER. 416 // MediaCodec.INFO_TRY_AGAIN_LATER.
358 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 417 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
359 while (true) { 418 while (true) {
360 final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); 419 final int result = mediaCodec.dequeueOutputBuffer(
420 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
361 switch (result) { 421 switch (result) {
362 case MediaCodec.INFO_TRY_AGAIN_LATER: 422 case MediaCodec.INFO_TRY_AGAIN_LATER:
363 return null; 423 return null;
364 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 424 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
365 outputBuffers = mediaCodec.getOutputBuffers(); 425 outputBuffers = mediaCodec.getOutputBuffers();
366 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); 426 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
367 break; 427 break;
368 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 428 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
369 MediaFormat format = mediaCodec.getOutputFormat(); 429 MediaFormat format = mediaCodec.getOutputFormat();
370 Logging.d(TAG, "Decoder format changed: " + format.toString()); 430 Logging.d(TAG, "Decoder format changed: " + format.toString());
(...skipping 10 matching lines...) Expand all
381 stride = format.getInteger("stride"); 441 stride = format.getInteger("stride");
382 } 442 }
383 if (format.containsKey("slice-height")) { 443 if (format.containsKey("slice-height")) {
384 sliceHeight = format.getInteger("slice-height"); 444 sliceHeight = format.getInteger("slice-height");
385 } 445 }
386 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); 446 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
387 stride = Math.max(width, stride); 447 stride = Math.max(width, stride);
388 sliceHeight = Math.max(height, sliceHeight); 448 sliceHeight = Math.max(height, sliceHeight);
389 break; 449 break;
390 default: 450 default:
391 // Output buffer decoded. 451 return new DecodedByteBuffer(result, info.offset, info.size, info.pres entationTimeUs);
392 if (useSurface) {
393 mediaCodec.releaseOutputBuffer(result, true /* render */);
394 // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before r eturning a texture
395 // frame.
396 return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
397 } else {
398 return new DecodedByteBuffer(result, info.offset, info.size, info.pr esentationTimeUs);
399 }
400 } 452 }
401 } 453 }
402 } 454 }
403 455
456 private Object dequeueTexture(int dequeueTimeoutMs) {
457 if (!useSurface) {
458 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding.");
459 }
460
461 if (decodeStartTime.size() > 0) {
magjed_webrtc 2015/10/28 11:57:16 s/decodeStartTime.size() > 0/!decodeStartTime.isEm
perkj_webrtc 2015/10/28 21:12:39 Done.
462 // Drain the decoder until receiving a decoded buffer or hitting
463 // MediaCodec.INFO_TRY_AGAIN_LATER.
464 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
465
466 int result = MediaCodec.INFO_TRY_AGAIN_LATER;
467 do {
perkj_webrtc 2015/10/27 20:46:15 Move all of do {} into a separate method and use i
perkj_webrtc 2015/10/28 21:12:39 Done.
468 result = mediaCodec.dequeueOutputBuffer(
469 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
470 switch (result) {
471 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
472 outputBuffers = mediaCodec.getOutputBuffers();
magjed_webrtc 2015/10/28 11:57:16 You need to update |dequeuedOutputBuffers| and |de
perkj_webrtc 2015/10/28 21:12:39 I hope I don't have to. This variable is actually
magjed_webrtc 2015/10/29 09:44:12 If INFO_OUTPUT_BUFFERS_CHANGED should never happen
473 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th);
474 break;
475 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
476 MediaFormat format = mediaCodec.getOutputFormat();
477 Logging.d(TAG, "Decoder format changed: " + format.toString());
478 width = format.getInteger(MediaFormat.KEY_WIDTH);
perkj_webrtc 2015/10/28 21:12:39 width and height need to be stored with the frames
479 height = format.getInteger(MediaFormat.KEY_HEIGHT);
480 if (format.containsKey("stride")) {
481 stride = format.getInteger("stride");
482 }
483 if (format.containsKey("slice-height")) {
484 sliceHeight = format.getInteger("slice-height");
485 }
486 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight);
487 stride = Math.max(width, stride);
488 sliceHeight = Math.max(height, sliceHeight);
489
490 break;
491 default:
492 break;
493 }
494 } while (result < 0 && result != MediaCodec.INFO_TRY_AGAIN_LATER);
495
496
497 if (result >= 0) {
498 // Output buffer decoded.
499 decodeTime.add(SystemClock.elapsedRealtime() - decodeStartTime.get(0));
500 decodeStartTime.remove(0);
501 dequeuedOutputBuffers.add(result);
502 }
503 }
504
505 DecodedTextureBuffer textureBuffer = null;
magjed_webrtc 2015/10/28 11:57:16 You can write the remaining code simpler like this
perkj_webrtc 2015/10/28 21:12:39 Yes, but the idea was that if isWaitingForTextureT
magjed_webrtc 2015/10/29 09:44:12 I think you should trust the C++ class to poll fre
506 if (isWaitingForTextureToFinishRendering) {
507 // If we are waiting for a frame to be rendered to the decoder surface,
508 // check if it is ready now by waiting max |dequeueTimeoutMs|. There can o nly be one frame
509 // rendered at the time.
510 textureBuffer = textureListener.dequeueTextureFrame(dequeueTimeoutMs);
511 isWaitingForTextureToFinishRendering = (textureBuffer == null);
512 }
513
514 if (!isWaitingForTextureToFinishRendering) {
515 // If we are not waiting for a frame to be rendered, we can render the nex t decoder output
516 // buffer to the decoder surface and wait for it |dequeueTimeoutMs|.
517 if (dequeuedOutputBuffers.size() > 0) {
518 int bufferIndex = dequeuedOutputBuffers.get(0);
519 dequeuedOutputBuffers.remove(0);
520 // releaseOutputBuffer renders to the output surface.
521 mediaCodec.releaseOutputBuffer(bufferIndex, true /* render */);
522 isWaitingForTextureToFinishRendering = true;
523
524 if (textureBuffer == null) {
525 // Wait max |dequeueTimeoutMs| for the rendering to finish.
526 textureBuffer = textureListener.dequeueTextureFrame(dequeueTimeoutMs);
527 isWaitingForTextureToFinishRendering = (textureBuffer == null);
528 }
529 }
530 }
531
532 if (textureBuffer != null) {
533 textureBuffer.decodeTimeMs = decodeTime.get(0);
534 decodeTime.remove(0);
535 }
536
537 // Logging.d(TAG, " pending decoding: " + decodeStartTime.size() + " pending rendering: " + dequeuedOutputBuffers.size()
538 // + " pending for release: " + decodeTime.size() + " isWaitingForTextur e: " + (isWaitingForTextureToFinishRendering ? "True" : "False") + " textureBuff er : " + (textureBuffer != null ? "Set" : "Null"));
539
540 return textureBuffer;
541 }
542
404 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for 543 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
405 // non-surface decoding. 544 // non-surface decoding.
406 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured 545 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured
407 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows 546 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows
408 // MediaCodec.CodecException upon codec error. 547 // MediaCodec.CodecException upon codec error.
409 private void returnDecodedByteBuffer(int index) 548 private void returnDecodedByteBuffer(int index)
410 throws IllegalStateException, MediaCodec.CodecException { 549 throws IllegalStateException, MediaCodec.CodecException {
411 checkOnMediaCodecThread(); 550 checkOnMediaCodecThread();
412 if (useSurface) { 551 if (useSurface) {
413 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding."); 552 throw new IllegalStateException("returnDecodedByteBuffer() called for surf ace decoding.");
414 } 553 }
415 mediaCodec.releaseOutputBuffer(index, false /* render */); 554 mediaCodec.releaseOutputBuffer(index, false /* render */);
416 } 555 }
417 } 556 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698