Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2014 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2014 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 35 public class MediaCodecVideoDecoder { | 35 public class MediaCodecVideoDecoder { |
| 36 // This class is constructed, operated, and destroyed by its C++ incarnation, | 36 // This class is constructed, operated, and destroyed by its C++ incarnation, |
| 37 // so the class and its methods have non-public visibility. The API this | 37 // so the class and its methods have non-public visibility. The API this |
| 38 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as | 38 // class exposes aims to mimic the webrtc::VideoDecoder API as closely as |
| 39 // possibly to minimize the amount of translation work necessary. | 39 // possibly to minimize the amount of translation work necessary. |
| 40 | 40 |
| 41 private static final String TAG = "MediaCodecVideoDecoder"; | 41 private static final String TAG = "MediaCodecVideoDecoder"; |
| 42 private static final long MAX_DECODE_TIME_MS = 200; | 42 private static final long MAX_DECODE_TIME_MS = 200; |
| 43 | 43 |
| 44 // Tracks webrtc::VideoCodecType. | 44 // Tracks webrtc::VideoCodecType. |
| 45 public enum VideoCodecType { | 45 public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H26 4 } |
| 46 VIDEO_CODEC_VP8, | |
| 47 VIDEO_CODEC_VP9, | |
| 48 VIDEO_CODEC_H264 | |
| 49 } | |
| 50 | 46 |
| 51 // Timeout for input buffer dequeue. | 47 // Timeout for input buffer dequeue. |
| 52 private static final int DEQUEUE_INPUT_TIMEOUT = 500000; | 48 private static final int DEQUEUE_INPUT_TIMEOUT = 500000; |
| 53 // Timeout for codec releasing. | 49 // Timeout for codec releasing. |
| 54 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; | 50 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; |
| 55 // Max number of output buffers queued before starting to drop decoded frames. | 51 // Max number of output buffers queued before starting to drop decoded frames. |
| 56 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; | 52 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3; |
| 57 // Active running decoder instance. Set in initDecode() (called from native co de) | 53 // Active running decoder instance. Set in initDecode() (called from native co de) |
| 58 // and reset to null in release() call. | 54 // and reset to null in release() call. |
| 59 private static MediaCodecVideoDecoder runningInstance = null; | 55 private static MediaCodecVideoDecoder runningInstance = null; |
| 60 private static MediaCodecVideoDecoderErrorCallback errorCallback = null; | 56 private static MediaCodecVideoDecoderErrorCallback errorCallback = null; |
| 61 private static int codecErrors = 0; | 57 private static int codecErrors = 0; |
| 62 // List of disabled codec types - can be set from application. | 58 // List of disabled codec types - can be set from application. |
| 63 private static Set<String> hwDecoderDisabledTypes = new HashSet<String>(); | 59 private static Set<String> hwDecoderDisabledTypes = new HashSet<String>(); |
| 64 | 60 |
| 65 private Thread mediaCodecThread; | 61 private Thread mediaCodecThread; |
| 66 private MediaCodec mediaCodec; | 62 private MediaCodec mediaCodec; |
| 67 private ByteBuffer[] inputBuffers; | 63 private ByteBuffer[] inputBuffers; |
| 68 private ByteBuffer[] outputBuffers; | 64 private ByteBuffer[] outputBuffers; |
| 69 private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; | 65 private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; |
| 70 private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; | 66 private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; |
| 71 private static final String H264_MIME_TYPE = "video/avc"; | 67 private static final String H264_MIME_TYPE = "video/avc"; |
| 72 // List of supported HW VP8 decoders. | 68 // List of supported HW VP8 decoders. |
| 73 private static final String[] supportedVp8HwCodecPrefixes = | 69 private static final String[] supportedVp8HwCodecPrefixes = { |
| 74 {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." }; | 70 "OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."}; |
| 75 // List of supported HW VP9 decoders. | 71 // List of supported HW VP9 decoders. |
| 76 private static final String[] supportedVp9HwCodecPrefixes = | 72 private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX .Exynos."}; |
| 77 {"OMX.qcom.", "OMX.Exynos." }; | |
| 78 // List of supported HW H.264 decoders. | 73 // List of supported HW H.264 decoders. |
| 79 private static final String[] supportedH264HwCodecPrefixes = | 74 private static final String[] supportedH264HwCodecPrefixes = { |
| 80 {"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." }; | 75 "OMX.qcom.", "OMX.Intel.", "OMX.Exynos."}; |
| 81 | 76 |
| 82 // NV12 color format supported by QCOM codec, but not declared in MediaCodec - | 77 // NV12 color format supported by QCOM codec, but not declared in MediaCodec - |
| 83 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h | 78 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h |
| 84 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA 30C01; | 79 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA 30C01; |
| 85 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA 30C02; | 80 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA 30C02; |
| 86 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03; | 81 private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03; |
| 87 private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C 04; | 82 private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C 04; |
| 88 // Allowable color formats supported by codec - in order of preference. | 83 // Allowable color formats supported by codec - in order of preference. |
| 89 private static final List<Integer> supportedColorList = Arrays.asList( | 84 private static final List<Integer> supportedColorList = Arrays.asList( |
| 90 CodecCapabilities.COLOR_FormatYUV420Planar, | 85 CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_Format YUV420SemiPlanar, |
|
magjed_webrtc
2016/09/28 13:45:04
You should probably revert this change as well.
sakal
2016/09/28 15:05:13
I kept it.
| |
| 91 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, | 86 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, |
| 92 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, | 87 COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420Pack edSemiPlanar16m4ka, |
| 93 COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, | 88 COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, |
| 94 COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, | 89 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); |
| 95 COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, | |
| 96 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m); | |
| 97 | 90 |
| 98 private int colorFormat; | 91 private int colorFormat; |
| 99 private int width; | 92 private int width; |
| 100 private int height; | 93 private int height; |
| 101 private int stride; | 94 private int stride; |
| 102 private int sliceHeight; | 95 private int sliceHeight; |
| 103 private boolean hasDecodedFirstFrame; | 96 private boolean hasDecodedFirstFrame; |
| 104 private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps> (); | 97 private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps> (); |
| 105 private boolean useSurface; | 98 private boolean useSurface; |
| 106 | 99 |
| 107 // The below variables are only used when decoding to a Surface. | 100 // The below variables are only used when decoding to a Surface. |
| 108 private TextureListener textureListener; | 101 private TextureListener textureListener; |
| 109 private int droppedFrames; | 102 private int droppedFrames; |
| 110 private Surface surface = null; | 103 private Surface surface = null; |
| 111 private final Queue<DecodedOutputBuffer> | 104 private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers = |
| 112 dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>(); | 105 new LinkedList<DecodedOutputBuffer>(); |
| 113 | 106 |
| 114 // MediaCodec error handler - invoked when critical error happens which may pr event | 107 // MediaCodec error handler - invoked when critical error happens which may pr event |
| 115 // further use of media codec API. Now it means that one of media codec instan ces | 108 // further use of media codec API. Now it means that one of media codec instan ces |
| 116 // is hanging and can no longer be used in the next call. | 109 // is hanging and can no longer be used in the next call. |
| 117 public static interface MediaCodecVideoDecoderErrorCallback { | 110 public static interface MediaCodecVideoDecoderErrorCallback { |
| 118 void onMediaCodecVideoDecoderCriticalError(int codecErrors); | 111 void onMediaCodecVideoDecoderCriticalError(int codecErrors); |
| 119 } | 112 } |
| 120 | 113 |
| 121 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) { | 114 public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorC allback) { |
| 122 Logging.d(TAG, "Set error callback"); | 115 Logging.d(TAG, "Set error callback"); |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 135 hwDecoderDisabledTypes.add(VP9_MIME_TYPE); | 128 hwDecoderDisabledTypes.add(VP9_MIME_TYPE); |
| 136 } | 129 } |
| 137 | 130 |
| 138 public static void disableH264HwCodec() { | 131 public static void disableH264HwCodec() { |
| 139 Logging.w(TAG, "H.264 decoding is disabled by application."); | 132 Logging.w(TAG, "H.264 decoding is disabled by application."); |
| 140 hwDecoderDisabledTypes.add(H264_MIME_TYPE); | 133 hwDecoderDisabledTypes.add(H264_MIME_TYPE); |
| 141 } | 134 } |
| 142 | 135 |
| 143 // Functions to query if HW decoding is supported. | 136 // Functions to query if HW decoding is supported. |
| 144 public static boolean isVp8HwSupported() { | 137 public static boolean isVp8HwSupported() { |
| 145 return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) && | 138 return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) |
| 146 (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null); | 139 && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null); |
| 147 } | 140 } |
| 148 | 141 |
| 149 public static boolean isVp9HwSupported() { | 142 public static boolean isVp9HwSupported() { |
| 150 return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) && | 143 return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) |
| 151 (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null); | 144 && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null); |
| 152 } | 145 } |
| 153 | 146 |
| 154 public static boolean isH264HwSupported() { | 147 public static boolean isH264HwSupported() { |
| 155 return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) && | 148 return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) |
| 156 (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null); | 149 && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null); |
| 157 } | 150 } |
| 158 | 151 |
| 159 public static void printStackTrace() { | 152 public static void printStackTrace() { |
| 160 if (runningInstance != null && runningInstance.mediaCodecThread != null) { | 153 if (runningInstance != null && runningInstance.mediaCodecThread != null) { |
| 161 StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThre ad.getStackTrace(); | 154 StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThre ad.getStackTrace(); |
| 162 if (mediaCodecStackTraces.length > 0) { | 155 if (mediaCodecStackTraces.length > 0) { |
| 163 Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:"); | 156 Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:"); |
| 164 for (StackTraceElement stackTrace : mediaCodecStackTraces) { | 157 for (StackTraceElement stackTrace : mediaCodecStackTraces) { |
| 165 Logging.d(TAG, stackTrace.toString()); | 158 Logging.d(TAG, stackTrace.toString()); |
| 166 } | 159 } |
| 167 } | 160 } |
| 168 } | 161 } |
| 169 } | 162 } |
| 170 | 163 |
| 171 // Helper struct for findDecoder() below. | 164 // Helper struct for findDecoder() below. |
| 172 private static class DecoderProperties { | 165 private static class DecoderProperties { |
| 173 public DecoderProperties(String codecName, int colorFormat) { | 166 public DecoderProperties(String codecName, int colorFormat) { |
| 174 this.codecName = codecName; | 167 this.codecName = codecName; |
| 175 this.colorFormat = colorFormat; | 168 this.colorFormat = colorFormat; |
| 176 } | 169 } |
| 177 public final String codecName; // OpenMax component name for VP8 codec. | 170 public final String codecName; // OpenMax component name for VP8 codec. |
| 178 public final int colorFormat; // Color format supported by codec. | 171 public final int colorFormat; // Color format supported by codec. |
| 179 } | 172 } |
| 180 | 173 |
| 181 private static DecoderProperties findDecoder( | 174 private static DecoderProperties findDecoder(String mime, String[] supportedCo decPrefixes) { |
| 182 String mime, String[] supportedCodecPrefixes) { | |
| 183 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { | 175 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { |
| 184 return null; // MediaCodec.setParameters is missing. | 176 return null; // MediaCodec.setParameters is missing. |
| 185 } | 177 } |
| 186 Logging.d(TAG, "Trying to find HW decoder for mime " + mime); | 178 Logging.d(TAG, "Trying to find HW decoder for mime " + mime); |
| 187 for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { | 179 for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) { |
| 188 MediaCodecInfo info = null; | 180 MediaCodecInfo info = null; |
| 189 try { | 181 try { |
| 190 info = MediaCodecList.getCodecInfoAt(i); | 182 info = MediaCodecList.getCodecInfoAt(i); |
| 191 } catch (IllegalArgumentException e) { | 183 } catch (IllegalArgumentException e) { |
| 192 Logging.e(TAG, "Cannot retrieve decoder codec info", e); | 184 Logging.e(TAG, "Cannot retrieve decoder codec info", e); |
| 193 } | 185 } |
| 194 if (info == null || info.isEncoder()) { | 186 if (info == null || info.isEncoder()) { |
| 195 continue; | 187 continue; |
| 196 } | 188 } |
| 197 String name = null; | 189 String name = null; |
| 198 for (String mimeType : info.getSupportedTypes()) { | 190 for (String mimeType : info.getSupportedTypes()) { |
| 199 if (mimeType.equals(mime)) { | 191 if (mimeType.equals(mime)) { |
| 200 name = info.getName(); | 192 name = info.getName(); |
| 201 break; | 193 break; |
| 202 } | 194 } |
| 203 } | 195 } |
| 204 if (name == null) { | 196 if (name == null) { |
| 205 continue; // No HW support in this codec; try the next one. | 197 continue; // No HW support in this codec; try the next one. |
| 206 } | 198 } |
| 207 Logging.d(TAG, "Found candidate decoder " + name); | 199 Logging.d(TAG, "Found candidate decoder " + name); |
| 208 | 200 |
| 209 // Check if this is supported decoder. | 201 // Check if this is supported decoder. |
| 210 boolean supportedCodec = false; | 202 boolean supportedCodec = false; |
| 211 for (String codecPrefix : supportedCodecPrefixes) { | 203 for (String codecPrefix : supportedCodecPrefixes) { |
| 212 if (name.startsWith(codecPrefix)) { | 204 if (name.startsWith(codecPrefix)) { |
| 213 supportedCodec = true; | 205 supportedCodec = true; |
| 214 break; | 206 break; |
| 215 } | 207 } |
| 216 } | 208 } |
| 217 if (!supportedCodec) { | 209 if (!supportedCodec) { |
| 218 continue; | 210 continue; |
| 219 } | 211 } |
| 220 | 212 |
| 221 // Check if codec supports either yuv420 or nv12. | 213 // Check if codec supports either yuv420 or nv12. |
| 222 CodecCapabilities capabilities; | 214 CodecCapabilities capabilities; |
| 223 try { | 215 try { |
| 224 capabilities = info.getCapabilitiesForType(mime); | 216 capabilities = info.getCapabilitiesForType(mime); |
| 225 } catch (IllegalArgumentException e) { | 217 } catch (IllegalArgumentException e) { |
| 226 Logging.e(TAG, "Cannot retrieve decoder capabilities", e); | 218 Logging.e(TAG, "Cannot retrieve decoder capabilities", e); |
| 227 continue; | 219 continue; |
| 228 } | 220 } |
| 229 for (int colorFormat : capabilities.colorFormats) { | 221 for (int colorFormat : capabilities.colorFormats) { |
| 230 Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); | 222 Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); |
| 231 } | 223 } |
| 232 for (int supportedColorFormat : supportedColorList) { | 224 for (int supportedColorFormat : supportedColorList) { |
| 233 for (int codecColorFormat : capabilities.colorFormats) { | 225 for (int codecColorFormat : capabilities.colorFormats) { |
| 234 if (codecColorFormat == supportedColorFormat) { | 226 if (codecColorFormat == supportedColorFormat) { |
| 235 // Found supported HW decoder. | 227 // Found supported HW decoder. |
| 236 Logging.d(TAG, "Found target decoder " + name + | 228 Logging.d(TAG, "Found target decoder " + name + ". Color: 0x" |
| 237 ". Color: 0x" + Integer.toHexString(codecColorFormat)); | 229 + Integer.toHexString(codecColorFormat)); |
| 238 return new DecoderProperties(name, codecColorFormat); | 230 return new DecoderProperties(name, codecColorFormat); |
| 239 } | 231 } |
| 240 } | 232 } |
| 241 } | 233 } |
| 242 } | 234 } |
| 243 Logging.d(TAG, "No HW decoder found for mime " + mime); | 235 Logging.d(TAG, "No HW decoder found for mime " + mime); |
| 244 return null; // No HW decoder. | 236 return null; // No HW decoder. |
| 245 } | 237 } |
| 246 | 238 |
| 247 private void checkOnMediaCodecThread() throws IllegalStateException { | 239 private void checkOnMediaCodecThread() throws IllegalStateException { |
| 248 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { | 240 if (mediaCodecThread.getId() != Thread.currentThread().getId()) { |
| 249 throw new IllegalStateException( | 241 throw new IllegalStateException("MediaCodecVideoDecoder previously operate d on " |
| 250 "MediaCodecVideoDecoder previously operated on " + mediaCodecThread + | 242 + mediaCodecThread + " but is now called on " + Thread.currentThread() ); |
| 251 " but is now called on " + Thread.currentThread()); | |
| 252 } | 243 } |
| 253 } | 244 } |
| 254 | 245 |
| 255 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput. | 246 // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer o utput. |
| 256 private boolean initDecode( | 247 private boolean initDecode( |
| 257 VideoCodecType type, int width, int height, | 248 VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTe xtureHelper) { |
| 258 SurfaceTextureHelper surfaceTextureHelper) { | |
| 259 if (mediaCodecThread != null) { | 249 if (mediaCodecThread != null) { |
| 260 throw new RuntimeException("initDecode: Forgot to release()?"); | 250 throw new RuntimeException("initDecode: Forgot to release()?"); |
| 261 } | 251 } |
| 262 | 252 |
| 263 String mime = null; | 253 String mime = null; |
| 264 useSurface = (surfaceTextureHelper != null); | 254 useSurface = (surfaceTextureHelper != null); |
| 265 String[] supportedCodecPrefixes = null; | 255 String[] supportedCodecPrefixes = null; |
| 266 if (type == VideoCodecType.VIDEO_CODEC_VP8) { | 256 if (type == VideoCodecType.VIDEO_CODEC_VP8) { |
| 267 mime = VP8_MIME_TYPE; | 257 mime = VP8_MIME_TYPE; |
| 268 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; | 258 supportedCodecPrefixes = supportedVp8HwCodecPrefixes; |
| 269 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { | 259 } else if (type == VideoCodecType.VIDEO_CODEC_VP9) { |
| 270 mime = VP9_MIME_TYPE; | 260 mime = VP9_MIME_TYPE; |
| 271 supportedCodecPrefixes = supportedVp9HwCodecPrefixes; | 261 supportedCodecPrefixes = supportedVp9HwCodecPrefixes; |
| 272 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { | 262 } else if (type == VideoCodecType.VIDEO_CODEC_H264) { |
| 273 mime = H264_MIME_TYPE; | 263 mime = H264_MIME_TYPE; |
| 274 supportedCodecPrefixes = supportedH264HwCodecPrefixes; | 264 supportedCodecPrefixes = supportedH264HwCodecPrefixes; |
| 275 } else { | 265 } else { |
| 276 throw new RuntimeException("initDecode: Non-supported codec " + type); | 266 throw new RuntimeException("initDecode: Non-supported codec " + type); |
| 277 } | 267 } |
| 278 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); | 268 DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); |
| 279 if (properties == null) { | 269 if (properties == null) { |
| 280 throw new RuntimeException("Cannot find HW decoder for " + type); | 270 throw new RuntimeException("Cannot find HW decoder for " + type); |
| 281 } | 271 } |
| 282 | 272 |
| 283 Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + | 273 Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" |
| 284 ". Color: 0x" + Integer.toHexString(properties.colorFormat) + | 274 + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); |
| 285 ". Use Surface: " + useSurface); | |
| 286 | 275 |
| 287 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. | 276 runningInstance = this; // Decoder is now running and can be queried for sta ck traces. |
| 288 mediaCodecThread = Thread.currentThread(); | 277 mediaCodecThread = Thread.currentThread(); |
| 289 try { | 278 try { |
| 290 this.width = width; | 279 this.width = width; |
| 291 this.height = height; | 280 this.height = height; |
| 292 stride = width; | 281 stride = width; |
| 293 sliceHeight = height; | 282 sliceHeight = height; |
| 294 | 283 |
| 295 if (useSurface) { | 284 if (useSurface) { |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 310 mediaCodec.configure(format, surface, null, 0); | 299 mediaCodec.configure(format, surface, null, 0); |
| 311 mediaCodec.start(); | 300 mediaCodec.start(); |
| 312 | 301 |
| 313 colorFormat = properties.colorFormat; | 302 colorFormat = properties.colorFormat; |
| 314 outputBuffers = mediaCodec.getOutputBuffers(); | 303 outputBuffers = mediaCodec.getOutputBuffers(); |
| 315 inputBuffers = mediaCodec.getInputBuffers(); | 304 inputBuffers = mediaCodec.getInputBuffers(); |
| 316 decodeStartTimeMs.clear(); | 305 decodeStartTimeMs.clear(); |
| 317 hasDecodedFirstFrame = false; | 306 hasDecodedFirstFrame = false; |
| 318 dequeuedSurfaceOutputBuffers.clear(); | 307 dequeuedSurfaceOutputBuffers.clear(); |
| 319 droppedFrames = 0; | 308 droppedFrames = 0; |
| 320 Logging.d(TAG, "Input buffers: " + inputBuffers.length + | 309 Logging.d(TAG, |
| 321 ". Output buffers: " + outputBuffers.length); | 310 "Input buffers: " + inputBuffers.length + ". Output buffers: " + outpu tBuffers.length); |
| 322 return true; | 311 return true; |
| 323 } catch (IllegalStateException e) { | 312 } catch (IllegalStateException e) { |
| 324 Logging.e(TAG, "initDecode failed", e); | 313 Logging.e(TAG, "initDecode failed", e); |
| 325 return false; | 314 return false; |
| 326 } | 315 } |
| 327 } | 316 } |
| 328 | 317 |
| 329 // Resets the decoder so it can start decoding frames with new resolution. | 318 // Resets the decoder so it can start decoding frames with new resolution. |
| 330 // Flushes MediaCodec and clears decoder output buffers. | 319 // Flushes MediaCodec and clears decoder output buffers. |
| 331 private void reset(int width, int height) { | 320 private void reset(int width, int height) { |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 399 return -2; | 388 return -2; |
| 400 } | 389 } |
| 401 } | 390 } |
| 402 | 391 |
| 403 private boolean queueInputBuffer(int inputBufferIndex, int size, long presenta tionTimeStamUs, | 392 private boolean queueInputBuffer(int inputBufferIndex, int size, long presenta tionTimeStamUs, |
| 404 long timeStampMs, long ntpTimeStamp) { | 393 long timeStampMs, long ntpTimeStamp) { |
| 405 checkOnMediaCodecThread(); | 394 checkOnMediaCodecThread(); |
| 406 try { | 395 try { |
| 407 inputBuffers[inputBufferIndex].position(0); | 396 inputBuffers[inputBufferIndex].position(0); |
| 408 inputBuffers[inputBufferIndex].limit(size); | 397 inputBuffers[inputBufferIndex].limit(size); |
| 409 decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeSt ampMs, | 398 decodeStartTimeMs.add( |
| 410 ntpTimeStamp)); | 399 new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStam p)); |
| 411 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeSta mUs, 0); | 400 mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeSta mUs, 0); |
| 412 return true; | 401 return true; |
| 413 } | 402 } catch (IllegalStateException e) { |
| 414 catch (IllegalStateException e) { | |
| 415 Logging.e(TAG, "decode failed", e); | 403 Logging.e(TAG, "decode failed", e); |
| 416 return false; | 404 return false; |
| 417 } | 405 } |
| 418 } | 406 } |
| 419 | 407 |
| 420 private static class TimeStamps { | 408 private static class TimeStamps { |
| 421 public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStam pMs) { | 409 public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStam pMs) { |
| 422 this.decodeStartTimeMs = decodeStartTimeMs; | 410 this.decodeStartTimeMs = decodeStartTimeMs; |
| 423 this.timeStampMs = timeStampMs; | 411 this.timeStampMs = timeStampMs; |
| 424 this.ntpTimeStampMs = ntpTimeStampMs; | 412 this.ntpTimeStampMs = ntpTimeStampMs; |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 504 private DecodedOutputBuffer bufferToRender; | 492 private DecodedOutputBuffer bufferToRender; |
| 505 private DecodedTextureBuffer renderedBuffer; | 493 private DecodedTextureBuffer renderedBuffer; |
| 506 | 494 |
| 507 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { | 495 public TextureListener(SurfaceTextureHelper surfaceTextureHelper) { |
| 508 this.surfaceTextureHelper = surfaceTextureHelper; | 496 this.surfaceTextureHelper = surfaceTextureHelper; |
| 509 surfaceTextureHelper.startListening(this); | 497 surfaceTextureHelper.startListening(this); |
| 510 } | 498 } |
| 511 | 499 |
| 512 public void addBufferToRender(DecodedOutputBuffer buffer) { | 500 public void addBufferToRender(DecodedOutputBuffer buffer) { |
| 513 if (bufferToRender != null) { | 501 if (bufferToRender != null) { |
| 514 Logging.e(TAG, | 502 Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture."); |
| 515 "Unexpected addBufferToRender() called while waiting for a texture." ); | |
| 516 throw new IllegalStateException("Waiting for a texture."); | 503 throw new IllegalStateException("Waiting for a texture."); |
| 517 } | 504 } |
| 518 bufferToRender = buffer; | 505 bufferToRender = buffer; |
| 519 } | 506 } |
| 520 | 507 |
| 521 public boolean isWaitingForTexture() { | 508 public boolean isWaitingForTexture() { |
| 522 synchronized (newFrameLock) { | 509 synchronized (newFrameLock) { |
| 523 return bufferToRender != null; | 510 return bufferToRender != null; |
| 524 } | 511 } |
| 525 } | 512 } |
| 526 | 513 |
| 527 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d. | 514 // Callback from |surfaceTextureHelper|. May be called on an arbitrary threa d. |
| 528 @Override | 515 @Override |
| 529 public void onTextureFrameAvailable( | 516 public void onTextureFrameAvailable( |
| 530 int oesTextureId, float[] transformMatrix, long timestampNs) { | 517 int oesTextureId, float[] transformMatrix, long timestampNs) { |
| 531 synchronized (newFrameLock) { | 518 synchronized (newFrameLock) { |
| 532 if (renderedBuffer != null) { | 519 if (renderedBuffer != null) { |
| 533 Logging.e(TAG, | 520 Logging.e( |
| 534 "Unexpected onTextureFrameAvailable() called while already holding a texture."); | 521 TAG, "Unexpected onTextureFrameAvailable() called while already ho lding a texture."); |
| 535 throw new IllegalStateException("Already holding a texture."); | 522 throw new IllegalStateException("Already holding a texture."); |
| 536 } | 523 } |
| 537 // |timestampNs| is always zero on some Android versions. | 524 // |timestampNs| is always zero on some Android versions. |
| 538 renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, | 525 renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix, |
| 539 bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs, | 526 bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs, |
| 540 bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, | 527 bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs, |
| 541 SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); | 528 SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs); |
| 542 bufferToRender = null; | 529 bufferToRender = null; |
| 543 newFrameLock.notifyAll(); | 530 newFrameLock.notifyAll(); |
| 544 } | 531 } |
| 545 } | 532 } |
| 546 | 533 |
| 547 // Dequeues and returns a DecodedTextureBuffer if available, or null otherwi se. | 534 // Dequeues and returns a DecodedTextureBuffer if available, or null otherwi se. |
| 548 public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) { | 535 public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) { |
| 549 synchronized (newFrameLock) { | 536 synchronized (newFrameLock) { |
| 550 if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) { | 537 if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) { |
| 551 try { | 538 try { |
| 552 newFrameLock.wait(timeoutMs); | 539 newFrameLock.wait(timeoutMs); |
| 553 } catch(InterruptedException e) { | 540 } catch (InterruptedException e) { |
| 554 // Restore the interrupted status by reinterrupting the thread. | 541 // Restore the interrupted status by reinterrupting the thread. |
| 555 Thread.currentThread().interrupt(); | 542 Thread.currentThread().interrupt(); |
| 556 } | 543 } |
| 557 } | 544 } |
| 558 DecodedTextureBuffer returnedBuffer = renderedBuffer; | 545 DecodedTextureBuffer returnedBuffer = renderedBuffer; |
| 559 renderedBuffer = null; | 546 renderedBuffer = null; |
| 560 return returnedBuffer; | 547 return returnedBuffer; |
| 561 } | 548 } |
| 562 } | 549 } |
| 563 | 550 |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 581 // upon codec error. | 568 // upon codec error. |
| 582 private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { | 569 private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { |
| 583 checkOnMediaCodecThread(); | 570 checkOnMediaCodecThread(); |
| 584 if (decodeStartTimeMs.isEmpty()) { | 571 if (decodeStartTimeMs.isEmpty()) { |
| 585 return null; | 572 return null; |
| 586 } | 573 } |
| 587 // Drain the decoder until receiving a decoded buffer or hitting | 574 // Drain the decoder until receiving a decoded buffer or hitting |
| 588 // MediaCodec.INFO_TRY_AGAIN_LATER. | 575 // MediaCodec.INFO_TRY_AGAIN_LATER. |
| 589 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); | 576 final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); |
| 590 while (true) { | 577 while (true) { |
| 591 final int result = mediaCodec.dequeueOutputBuffer( | 578 final int result = |
| 592 info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs)); | 579 mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(de queueTimeoutMs)); |
| 593 switch (result) { | 580 switch (result) { |
| 594 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: | 581 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: |
| 595 outputBuffers = mediaCodec.getOutputBuffers(); | 582 outputBuffers = mediaCodec.getOutputBuffers(); |
| 596 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); | 583 Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.leng th); |
| 597 if (hasDecodedFirstFrame) { | 584 if (hasDecodedFirstFrame) { |
| 598 throw new RuntimeException("Unexpected output buffer change event.") ; | 585 throw new RuntimeException("Unexpected output buffer change event.") ; |
| 599 } | 586 } |
| 600 break; | 587 break; |
| 601 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: | 588 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: |
| 602 MediaFormat format = mediaCodec.getOutputFormat(); | 589 MediaFormat format = mediaCodec.getOutputFormat(); |
| 603 Logging.d(TAG, "Decoder format changed: " + format.toString()); | 590 Logging.d(TAG, "Decoder format changed: " + format.toString()); |
| 604 int new_width = format.getInteger(MediaFormat.KEY_WIDTH); | 591 int new_width = format.getInteger(MediaFormat.KEY_WIDTH); |
| 605 int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); | 592 int new_height = format.getInteger(MediaFormat.KEY_HEIGHT); |
| 606 if (hasDecodedFirstFrame && (new_width != width || new_height != heigh t)) { | 593 if (hasDecodedFirstFrame && (new_width != width || new_height != heigh t)) { |
| 607 throw new RuntimeException("Unexpected size change. Configured " + w idth + "*" + | 594 throw new RuntimeException("Unexpected size change. Configured " + w idth + "*" + height |
| 608 height + ". New " + new_width + "*" + new_height); | 595 + ". New " + new_width + "*" + new_height); |
| 609 } | 596 } |
| 610 width = format.getInteger(MediaFormat.KEY_WIDTH); | 597 width = format.getInteger(MediaFormat.KEY_WIDTH); |
| 611 height = format.getInteger(MediaFormat.KEY_HEIGHT); | 598 height = format.getInteger(MediaFormat.KEY_HEIGHT); |
| 612 | 599 |
| 613 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { | 600 if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { |
| 614 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); | 601 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); |
| 615 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); | 602 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); |
| 616 if (!supportedColorList.contains(colorFormat)) { | 603 if (!supportedColorList.contains(colorFormat)) { |
| 617 throw new IllegalStateException("Non supported color format: " + c olorFormat); | 604 throw new IllegalStateException("Non supported color format: " + c olorFormat); |
| 618 } | 605 } |
| 619 } | 606 } |
| 620 if (format.containsKey("stride")) { | 607 if (format.containsKey("stride")) { |
| 621 stride = format.getInteger("stride"); | 608 stride = format.getInteger("stride"); |
| 622 } | 609 } |
| 623 if (format.containsKey("slice-height")) { | 610 if (format.containsKey("slice-height")) { |
| 624 sliceHeight = format.getInteger("slice-height"); | 611 sliceHeight = format.getInteger("slice-height"); |
| 625 } | 612 } |
| 626 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); | 613 Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sl iceHeight); |
| 627 stride = Math.max(width, stride); | 614 stride = Math.max(width, stride); |
| 628 sliceHeight = Math.max(height, sliceHeight); | 615 sliceHeight = Math.max(height, sliceHeight); |
| 629 break; | 616 break; |
| 630 case MediaCodec.INFO_TRY_AGAIN_LATER: | 617 case MediaCodec.INFO_TRY_AGAIN_LATER: |
| 631 return null; | 618 return null; |
| 632 default: | 619 default: |
| 633 hasDecodedFirstFrame = true; | 620 hasDecodedFirstFrame = true; |
| 634 TimeStamps timeStamps = decodeStartTimeMs.remove(); | 621 TimeStamps timeStamps = decodeStartTimeMs.remove(); |
| 635 long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeS tartTimeMs; | 622 long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeS tartTimeMs; |
| 636 if (decodeTimeMs > MAX_DECODE_TIME_MS) { | 623 if (decodeTimeMs > MAX_DECODE_TIME_MS) { |
| 637 Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms" | 624 Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms" |
| 638 + ". Q size: " + decodeStartTimeMs.size() | 625 + ". Q size: " + decodeStartTimeMs.size() |
| 639 + ". Might be caused by resuming H264 decoding after a pause."); | 626 + ". Might be caused by resuming H264 decoding after a pause ."); |
| 640 decodeTimeMs = MAX_DECODE_TIME_MS; | 627 decodeTimeMs = MAX_DECODE_TIME_MS; |
| 641 } | 628 } |
| 642 return new DecodedOutputBuffer(result, | 629 return new DecodedOutputBuffer(result, info.offset, info.size, |
| 643 info.offset, | 630 TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamp s.timeStampMs, |
| 644 info.size, | 631 timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealti me()); |
| 645 TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), | 632 } |
| 646 timeStamps.timeStampMs, | |
| 647 timeStamps.ntpTimeStampMs, | |
| 648 decodeTimeMs, | |
| 649 SystemClock.elapsedRealtime()); | |
| 650 } | |
| 651 } | 633 } |
| 652 } | 634 } |
| 653 | 635 |
| 654 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer. | 636 // Returns null if no decoded buffer is available, and otherwise a DecodedText ureBuffer. |
| 655 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an | 637 // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an |
| 656 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException | 638 // unsupported format, or if |mediaCodec| is not in the Executing state. Throw s CodecException |
| 657 // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if | 639 // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if |
| 658 // a frame can't be returned. | 640 // a frame can't be returned. |
| 659 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { | 641 private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { |
| 660 checkOnMediaCodecThread(); | 642 checkOnMediaCodecThread(); |
| 661 if (!useSurface) { | 643 if (!useSurface) { |
| 662 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding."); | 644 throw new IllegalStateException("dequeueTexture() called for byte buffer d ecoding."); |
| 663 } | 645 } |
| 664 DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); | 646 DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs); |
| 665 if (outputBuffer != null) { | 647 if (outputBuffer != null) { |
| 666 dequeuedSurfaceOutputBuffers.add(outputBuffer); | 648 dequeuedSurfaceOutputBuffers.add(outputBuffer); |
| 667 } | 649 } |
| 668 | 650 |
| 669 MaybeRenderDecodedTextureBuffer(); | 651 MaybeRenderDecodedTextureBuffer(); |
| 670 // Check if there is texture ready now by waiting max |dequeueTimeoutMs|. | 652 // Check if there is texture ready now by waiting max |dequeueTimeoutMs|. |
| 671 DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(d equeueTimeoutMs); | 653 DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(d equeueTimeoutMs); |
| 672 if (renderedBuffer != null) { | 654 if (renderedBuffer != null) { |
| 673 MaybeRenderDecodedTextureBuffer(); | 655 MaybeRenderDecodedTextureBuffer(); |
| 674 return renderedBuffer; | 656 return renderedBuffer; |
| 675 } | 657 } |
| 676 | 658 |
| 677 if ((dequeuedSurfaceOutputBuffers.size() | 659 if ((dequeuedSurfaceOutputBuffers.size() |
| 678 >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length) | 660 >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length) |
| 679 || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) { | 661 || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()) )) { |
| 680 ++droppedFrames; | 662 ++droppedFrames; |
| 681 // Drop the oldest frame still in dequeuedSurfaceOutputBuffers. | 663 // Drop the oldest frame still in dequeuedSurfaceOutputBuffers. |
| 682 // The oldest frame is owned by |textureListener| and can't be dropped sin ce | 664 // The oldest frame is owned by |textureListener| and can't be dropped sin ce |
| 683 // mediaCodec.releaseOutputBuffer has already been called. | 665 // mediaCodec.releaseOutputBuffer has already been called. |
| 684 final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remo ve(); | 666 final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remo ve(); |
| 685 if (dequeueTimeoutMs > 0) { | 667 if (dequeueTimeoutMs > 0) { |
| 686 // TODO(perkj): Re-add the below log when VideoRenderGUI has been remove d or fixed to | 668 // TODO(perkj): Re-add the below log when VideoRenderGUI has been remove d or fixed to |
| 687 // return the one and only texture even if it does not render. | 669 // return the one and only texture even if it does not render. |
| 688 Logging.w(TAG, "Draining decoder. Dropping frame with TS: " | 670 Logging.w(TAG, "Draining decoder. Dropping frame with TS: " |
| 689 + droppedFrame.presentationTimeStampMs + | 671 + droppedFrame.presentationTimeStampMs + ". Total number of drop ped frames: " |
| 690 ". Total number of dropped frames: " + droppedFrames); | 672 + droppedFrames); |
| 691 } else { | 673 } else { |
| 692 Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers .size() + | 674 Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers .size() |
| 693 ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs + | 675 + ". Dropping frame with TS: " + droppedFrame.presentationTimeSt ampMs |
| 694 ". Total number of dropped frames: " + droppedFrames); | 676 + ". Total number of dropped frames: " + droppedFrames); |
| 695 } | 677 } |
| 696 | 678 |
| 697 mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); | 679 mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */); |
| 698 return new DecodedTextureBuffer(0, null, | 680 return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStam pMs, |
| 699 droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs, | 681 droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.de codeTimeMs, |
| 700 droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs, | |
| 701 SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); | 682 SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs); |
| 702 } | 683 } |
| 703 return null; | 684 return null; |
| 704 } | 685 } |
| 705 | 686 |
| 706 private void MaybeRenderDecodedTextureBuffer() { | 687 private void MaybeRenderDecodedTextureBuffer() { |
| 707 if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTe xture()) { | 688 if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTe xture()) { |
| 708 return; | 689 return; |
| 709 } | 690 } |
| 710 // Get the first frame in the queue and render to the decoder output surface . | 691 // Get the first frame in the queue and render to the decoder output surface . |
| 711 final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); | 692 final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove(); |
| 712 textureListener.addBufferToRender(buffer); | 693 textureListener.addBufferToRender(buffer); |
| 713 mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); | 694 mediaCodec.releaseOutputBuffer(buffer.index, true /* render */); |
| 714 } | 695 } |
| 715 | 696 |
| 716 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for | 697 // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for |
| 717 // non-surface decoding. | 698 // non-surface decoding. |
| 718 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured | 699 // Throws IllegalStateException if the call is made on the wrong thread, if co dec is configured |
| 719 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows | 700 // for surface decoding, or if |mediaCodec| is not in the Executing state. Thr ows |
| 720 // MediaCodec.CodecException upon codec error. | 701 // MediaCodec.CodecException upon codec error. |
| 721 private void returnDecodedOutputBuffer(int index) | 702 private void returnDecodedOutputBuffer(int index) |
| 722 throws IllegalStateException, MediaCodec.CodecException { | 703 throws IllegalStateException, MediaCodec.CodecException { |
| 723 checkOnMediaCodecThread(); | 704 checkOnMediaCodecThread(); |
| 724 if (useSurface) { | 705 if (useSurface) { |
| 725 throw new IllegalStateException("returnDecodedOutputBuffer() called for su rface decoding."); | 706 throw new IllegalStateException("returnDecodedOutputBuffer() called for su rface decoding."); |
| 726 } | 707 } |
| 727 mediaCodec.releaseOutputBuffer(index, false /* render */); | 708 mediaCodec.releaseOutputBuffer(index, false /* render */); |
| 728 } | 709 } |
| 729 } | 710 } |
| OLD | NEW |