Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(272)

Side by Side Diff: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java

Issue 2977643002: Add texture support to HardwareVideoDecoder. (Closed)
Patch Set: Remove unused imports Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 package org.webrtc; 11 package org.webrtc;
12 12
13 import android.annotation.TargetApi; 13 import android.annotation.TargetApi;
14 import android.graphics.Matrix; 14 import android.graphics.Matrix;
15 import android.media.MediaCodec; 15 import android.media.MediaCodec;
16 import android.media.MediaCodecInfo.CodecCapabilities; 16 import android.media.MediaCodecInfo.CodecCapabilities;
17 import android.media.MediaFormat; 17 import android.media.MediaFormat;
18 import android.os.SystemClock; 18 import android.os.SystemClock;
19 import android.view.Surface;
19 import java.io.IOException; 20 import java.io.IOException;
20 import java.nio.ByteBuffer; 21 import java.nio.ByteBuffer;
21 import java.util.Arrays; 22 import java.util.Arrays;
22 import java.util.Deque; 23 import java.util.Deque;
23 import java.util.concurrent.CountDownLatch; 24 import java.util.concurrent.CountDownLatch;
24 import java.util.concurrent.LinkedBlockingDeque; 25 import java.util.concurrent.LinkedBlockingDeque;
25 import org.webrtc.ThreadUtils.ThreadChecker; 26 import org.webrtc.ThreadUtils.ThreadChecker;
26 27
27 /** Android hardware video decoder. */ 28 /** Android hardware video decoder. */
28 @TargetApi(16) 29 @TargetApi(16)
29 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods. 30 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods.
30 class HardwareVideoDecoder implements VideoDecoder { 31 class HardwareVideoDecoder
32 implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListene r {
31 private static final String TAG = "HardwareVideoDecoder"; 33 private static final String TAG = "HardwareVideoDecoder";
32 34
33 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. 35 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
34 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride"; 36 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
35 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height"; 37 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
36 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left"; 38 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
37 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right"; 39 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
38 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top"; 40 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
39 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; 41 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
40 42
41 // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after 43 // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
42 // this timeout. 44 // this timeout.
43 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; 45 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
44 46
45 // WebRTC queues input frames quickly in the beginning on the call. Wait for i nput buffers with a 47 // WebRTC queues input frames quickly in the beginning on the call. Wait for i nput buffers with a
46 // long timeout (500 ms) to prevent this from causing the codec to return an e rror. 48 // long timeout (500 ms) to prevent this from causing the codec to return an e rror.
47 private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000; 49 private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
48 50
49 // Dequeuing an output buffer will block until a buffer is available (up to 10 0 milliseconds). 51 // Dequeuing an output buffer will block until a buffer is available (up to 10 0 milliseconds).
50 // If this timeout is exceeded, the output thread will unblock and check if th e decoder is still 52 // If this timeout is exceeded, the output thread will unblock and check if th e decoder is still
51 // running. If it is, it will block on dequeue again. Otherwise, it will sto p and release the 53 // running. If it is, it will block on dequeue again. Otherwise, it will sto p and release the
52 // MediaCodec. 54 // MediaCodec.
53 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; 55 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
54 56
57 // Max number of output buffers queued before starting to drop decoded frames.
sakal 2017/07/18 08:53:40 Not used anymore?
mellem 2017/07/18 16:50:03 Done.
58 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
59
55 private final String codecName; 60 private final String codecName;
56 private final VideoCodecType codecType; 61 private final VideoCodecType codecType;
57 62
58 private static class FrameInfo { 63 private static class FrameInfo {
59 final long decodeStartTimeMs; 64 final long decodeStartTimeMs;
60 final int rotation; 65 final int rotation;
61 66
62 FrameInfo(long decodeStartTimeMs, int rotation) { 67 FrameInfo(long decodeStartTimeMs, int rotation) {
63 this.decodeStartTimeMs = decodeStartTimeMs; 68 this.decodeStartTimeMs = decodeStartTimeMs;
64 this.rotation = rotation; 69 this.rotation = rotation;
(...skipping 28 matching lines...) Expand all
93 private int height; 98 private int height;
94 private int stride; 99 private int stride;
95 private int sliceHeight; 100 private int sliceHeight;
96 101
97 // Whether the decoder has finished the first frame. The codec may not change output dimensions 102 // Whether the decoder has finished the first frame. The codec may not change output dimensions
98 // after delivering the first frame. 103 // after delivering the first frame.
99 private boolean hasDecodedFirstFrame; 104 private boolean hasDecodedFirstFrame;
100 // Whether the decoder has seen a key frame. The first frame must be a key fr ame. 105 // Whether the decoder has seen a key frame. The first frame must be a key fr ame.
101 private boolean keyFrameRequired; 106 private boolean keyFrameRequired;
102 107
108 private final EglBase.Context sharedContext;
109 private SurfaceTextureHelper surfaceTextureHelper;
110 private Surface surface = null;
111
112 private static class DecodedTextureMetadata {
113 final int width;
114 final int height;
115 final int rotation;
116 final long presentationTimestampUs;
117 final Integer decodeTimeMs;
118
119 DecodedTextureMetadata(
120 int width, int height, int rotation, long presentationTimestampUs, Integ er decodeTimeMs) {
121 this.width = width;
122 this.height = height;
123 this.rotation = rotation;
124 this.presentationTimestampUs = presentationTimestampUs;
125 this.decodeTimeMs = decodeTimeMs;
126 }
127 }
128
129 // Metadata for the last frame rendered to the texture. Only accessed on the texture helper's
130 // thread.
131 private DecodedTextureMetadata renderedTextureMetadata;
132
133 private int droppedFrames = 0;
sakal 2017/07/18 08:53:40 Not used?
mellem 2017/07/18 16:50:03 Done.
134
103 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. 135 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller.
104 private Callback callback; 136 private Callback callback;
105 137
106 private MediaCodec codec = null; 138 private MediaCodec codec = null;
107 139
108 HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorForm at) { 140 HardwareVideoDecoder(
141 String codecName, VideoCodecType codecType, int colorFormat, EglBase.Conte xt sharedContext) {
109 if (!isSupportedColorFormat(colorFormat)) { 142 if (!isSupportedColorFormat(colorFormat)) {
110 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat); 143 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat);
111 } 144 }
112 this.codecName = codecName; 145 this.codecName = codecName;
113 this.codecType = codecType; 146 this.codecType = codecType;
114 this.colorFormat = colorFormat; 147 this.colorFormat = colorFormat;
148 this.sharedContext = sharedContext;
115 this.frameInfos = new LinkedBlockingDeque<>(); 149 this.frameInfos = new LinkedBlockingDeque<>();
116 } 150 }
117 151
118 @Override 152 @Override
119 public VideoCodecStatus initDecode(Settings settings, Callback callback) { 153 public VideoCodecStatus initDecode(Settings settings, Callback callback) {
120 this.decoderThreadChecker = new ThreadChecker(); 154 this.decoderThreadChecker = new ThreadChecker();
121 return initDecodeInternal(settings.width, settings.height, callback); 155 return initDecodeInternal(settings.width, settings.height, callback);
122 } 156 }
123 157
124 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) { 158 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) {
125 decoderThreadChecker.checkIsOnValidThread(); 159 decoderThreadChecker.checkIsOnValidThread();
126 if (outputThread != null) { 160 if (outputThread != null) {
127 Logging.e(TAG, "initDecodeInternal called while the codec is already runni ng"); 161 Logging.e(TAG, "initDecodeInternal called while the codec is already runni ng");
128 return VideoCodecStatus.ERROR; 162 return VideoCodecStatus.ERROR;
129 } 163 }
130 164
131 // Note: it is not necessary to initialize dimensions under the lock, since the output thread 165 // Note: it is not necessary to initialize dimensions under the lock, since the output thread
132 // is not running. 166 // is not running.
133 this.callback = callback; 167 this.callback = callback;
134 this.width = width; 168 this.width = width;
135 this.height = height; 169 this.height = height;
136 170
137 stride = width; 171 stride = width;
138 sliceHeight = height; 172 sliceHeight = height;
139 hasDecodedFirstFrame = false; 173 hasDecodedFirstFrame = false;
140 keyFrameRequired = true; 174 keyFrameRequired = true;
175 droppedFrames = 0;
141 176
142 try { 177 try {
143 codec = MediaCodec.createByCodecName(codecName); 178 codec = MediaCodec.createByCodecName(codecName);
144 } catch (IOException | IllegalArgumentException e) { 179 } catch (IOException | IllegalArgumentException e) {
145 Logging.e(TAG, "Cannot create media decoder " + codecName); 180 Logging.e(TAG, "Cannot create media decoder " + codecName);
146 return VideoCodecStatus.ERROR; 181 return VideoCodecStatus.ERROR;
147 } 182 }
148 try { 183 try {
149 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); 184 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height);
150 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 185 if (sharedContext == null) {
151 codec.configure(format, null, null, 0); 186 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
187 } else {
188 surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thre ad", sharedContext);
189 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
190 surfaceTextureHelper.startListening(this);
191 }
192 codec.configure(format, surface, null, 0);
152 codec.start(); 193 codec.start();
153 } catch (IllegalStateException e) { 194 } catch (IllegalStateException e) {
154 Logging.e(TAG, "initDecode failed", e); 195 Logging.e(TAG, "initDecode failed", e);
155 release(); 196 release();
156 return VideoCodecStatus.ERROR; 197 return VideoCodecStatus.ERROR;
157 } 198 }
158 199
159 running = true; 200 running = true;
160 outputThread = createOutputThread(); 201 outputThread = createOutputThread();
161 outputThread.start(); 202 outputThread.start();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
202 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) { 243 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
203 Logging.e(TAG, "decode() - key frame required first"); 244 Logging.e(TAG, "decode() - key frame required first");
204 return VideoCodecStatus.ERROR; 245 return VideoCodecStatus.ERROR;
205 } 246 }
206 if (!frame.completeFrame) { 247 if (!frame.completeFrame) {
207 Logging.e(TAG, "decode() - complete frame required first"); 248 Logging.e(TAG, "decode() - complete frame required first");
208 return VideoCodecStatus.ERROR; 249 return VideoCodecStatus.ERROR;
209 } 250 }
210 } 251 }
211 252
212 // TODO(mellem): Support textures.
213 int index; 253 int index;
214 try { 254 try {
215 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); 255 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
216 } catch (IllegalStateException e) { 256 } catch (IllegalStateException e) {
217 Logging.e(TAG, "dequeueInputBuffer failed", e); 257 Logging.e(TAG, "dequeueInputBuffer failed", e);
218 return VideoCodecStatus.ERROR; 258 return VideoCodecStatus.ERROR;
219 } 259 }
220 if (index < 0) { 260 if (index < 0) {
221 // Decoder is falling behind. No input buffers available. 261 // Decoder is falling behind. No input buffers available.
222 // The decoder can't simply drop frames; it might lose a key frame. 262 // The decoder can't simply drop frames; it might lose a key frame.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 // capture both the output thread's stack trace and this thread's stack trace. 321 // capture both the output thread's stack trace and this thread's stack trace.
282 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException)); 322 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException));
283 shutdownException = null; 323 shutdownException = null;
284 return VideoCodecStatus.ERROR; 324 return VideoCodecStatus.ERROR;
285 } 325 }
286 } finally { 326 } finally {
287 codec = null; 327 codec = null;
288 callback = null; 328 callback = null;
289 outputThread = null; 329 outputThread = null;
290 frameInfos.clear(); 330 frameInfos.clear();
331 if (surface != null) {
332 surface.release();
333 surface = null;
334 surfaceTextureHelper.stopListening();
335 surfaceTextureHelper.dispose();
336 surfaceTextureHelper = null;
337 }
291 } 338 }
292 return VideoCodecStatus.OK; 339 return VideoCodecStatus.OK;
293 } 340 }
294 341
295 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) { 342 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
296 decoderThreadChecker.checkIsOnValidThread(); 343 decoderThreadChecker.checkIsOnValidThread();
297 VideoCodecStatus status = release(); 344 VideoCodecStatus status = release();
298 if (status != VideoCodecStatus.OK) { 345 if (status != VideoCodecStatus.OK) {
299 return status; 346 return status;
300 } 347 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
336 FrameInfo frameInfo = frameInfos.poll(); 383 FrameInfo frameInfo = frameInfos.poll();
337 Integer decodeTimeMs = null; 384 Integer decodeTimeMs = null;
338 int rotation = 0; 385 int rotation = 0;
339 if (frameInfo != null) { 386 if (frameInfo != null) {
340 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs); 387 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs);
341 rotation = frameInfo.rotation; 388 rotation = frameInfo.rotation;
342 } 389 }
343 390
344 hasDecodedFirstFrame = true; 391 hasDecodedFirstFrame = true;
345 392
346 // Load dimensions from shared memory under the dimension lock. 393 if (surfaceTextureHelper != null) {
347 int width, height, stride, sliceHeight; 394 deliverTextureFrame(result, info, rotation, decodeTimeMs);
348 synchronized (dimensionLock) { 395 } else {
349 width = this.width; 396 deliverByteFrame(result, info, rotation, decodeTimeMs);
350 height = this.height;
351 stride = this.stride;
352 sliceHeight = this.sliceHeight;
353 } 397 }
354 398
355 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
356 // bytes for each of the U and V channels.
357 if (info.size < width * height * 3 / 2) {
358 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
359 return;
360 }
361
362 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
363 // Some codecs (Exynos) report an incorrect stride. Correct it here.
364 // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
365 // 2 * size / (3 * height).
366 stride = info.size * 2 / (height * 3);
367 }
368
369 ByteBuffer buffer = codec.getOutputBuffers()[result];
370 buffer.position(info.offset);
371 buffer.limit(info.size);
372
373 final VideoFrame.I420Buffer frameBuffer;
374
375 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatt ing data.
376 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
377 if (sliceHeight % 2 == 0) {
378 frameBuffer =
379 createBufferFromI420(buffer, result, info.offset, stride, sliceHei ght, width, height);
380 } else {
381 frameBuffer = new I420BufferImpl(width, height);
382 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
383 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
384 codec.releaseOutputBuffer(result, false);
385 }
386 } else {
387 frameBuffer = new I420BufferImpl(width, height);
388 // All other supported color formats are NV12.
389 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
390 codec.releaseOutputBuffer(result, false);
391 }
392
393 long presentationTimeNs = info.presentationTimeUs * 1000;
394 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeN s, new Matrix());
395
396 // Note that qp is parsed on the C++ side.
397 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
398 frame.release();
399 } catch (IllegalStateException e) { 399 } catch (IllegalStateException e) {
400 Logging.e(TAG, "deliverDecodedFrame failed", e); 400 Logging.e(TAG, "deliverDecodedFrame failed", e);
401 } 401 }
402 } 402 }
403 403
404 private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
405 final int rotation, final Integer decodeTimeMs) {
406 // Load dimensions from shared memory under the dimension lock.
407 final int width, height;
408 synchronized (dimensionLock) {
409 width = this.width;
410 height = this.height;
411 }
412
413 surfaceTextureHelper.getHandler().post(new Runnable() {
414 @Override
415 public void run() {
416 renderedTextureMetadata = new DecodedTextureMetadata(
417 width, height, rotation, info.presentationTimeUs, decodeTimeMs);
418 codec.releaseOutputBuffer(index, true);
419 }
420 });
421 }
422
423 @Override
424 public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
425 VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffe r(
426 renderedTextureMetadata.width, renderedTextureMetadata.height, transform Matrix);
427
428 Matrix matrix = RendererCommon.convertMatrixToAndroidGraphicsMatrix(transfor mMatrix);
429
430 VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotatio n,
431 renderedTextureMetadata.presentationTimestampUs * 1000, matrix);
432 callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
433 frame.release();
434 }
435
436 private void deliverByteFrame(
437 int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs ) {
438 // Load dimensions from shared memory under the dimension lock.
439 int width, height, stride, sliceHeight;
440 synchronized (dimensionLock) {
441 width = this.width;
442 height = this.height;
443 stride = this.stride;
444 sliceHeight = this.sliceHeight;
445 }
446
447 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
448 // bytes for each of the U and V channels.
449 if (info.size < width * height * 3 / 2) {
450 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
451 return;
452 }
453
454 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
455 // Some codecs (Exynos) report an incorrect stride. Correct it here.
456 // Expected size == stride * height * 3 / 2. A bit of algebra gives the c orrect stride as
457 // 2 * size / (3 * height).
458 stride = info.size * 2 / (height * 3);
459 }
460
461 ByteBuffer buffer = codec.getOutputBuffers()[result];
462 buffer.position(info.offset);
463 buffer.limit(info.size);
464
465 final VideoFrame.I420Buffer frameBuffer;
466
467 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformattin g data.
468 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
469 if (sliceHeight % 2 == 0) {
470 frameBuffer =
471 createBufferFromI420(buffer, result, info.offset, stride, sliceHeigh t, width, height);
472 } else {
473 frameBuffer = I420BufferImpl.allocate(width, height);
474 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
475 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
476 codec.releaseOutputBuffer(result, false);
477 }
478 } else {
479 frameBuffer = I420BufferImpl.allocate(width, height);
480 // All other supported color formats are NV12.
481 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
482 codec.releaseOutputBuffer(result, false);
483 }
484
485 long presentationTimeNs = info.presentationTimeUs * 1000;
486 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix());
487
488 // Note that qp is parsed on the C++ side.
489 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
490 frame.release();
491 }
492
404 private void reformat(MediaFormat format) { 493 private void reformat(MediaFormat format) {
405 outputThreadChecker.checkIsOnValidThread(); 494 outputThreadChecker.checkIsOnValidThread();
406 Logging.d(TAG, "Decoder format changed: " + format.toString()); 495 Logging.d(TAG, "Decoder format changed: " + format.toString());
407 final int newWidth; 496 final int newWidth;
408 final int newHeight; 497 final int newHeight;
409 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT) 498 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
410 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT) 499 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
411 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM) 500 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
412 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) { 501 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
413 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT) 502 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
414 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT); 503 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
415 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM) 504 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
416 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP); 505 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
417 } else { 506 } else {
418 newWidth = format.getInteger(MediaFormat.KEY_WIDTH); 507 newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
419 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 508 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
420 } 509 }
421 // Compare to existing width, height, and save values under the dimension lo ck. 510 // Compare to existing width, height, and save values under the dimension lo ck.
422 synchronized (dimensionLock) { 511 synchronized (dimensionLock) {
423 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) { 512 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) {
424 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*" 513 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*"
425 + height + ". New " + newWidth + "*" + newHeight)); 514 + height + ". New " + newWidth + "*" + newHeight));
426 return; 515 return;
427 } 516 }
428 width = newWidth; 517 width = newWidth;
429 height = newHeight; 518 height = newHeight;
430 } 519 }
431 520
432 if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 521 // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
522 // color format updates.
523 if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR _FORMAT)) {
433 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 524 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
434 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 525 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
435 if (!isSupportedColorFormat(colorFormat)) { 526 if (!isSupportedColorFormat(colorFormat)) {
436 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat)); 527 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
437 return; 528 return;
438 } 529 }
439 } 530 }
440 531
441 // Save stride and sliceHeight under the dimension lock. 532 // Save stride and sliceHeight under the dimension lock.
442 synchronized (dimensionLock) { 533 synchronized (dimensionLock) {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
512 final int chromaWidth = (width + 1) / 2; 603 final int chromaWidth = (width + 1) / 2;
513 final int chromaHeight = (height + 1) / 2; 604 final int chromaHeight = (height + 1) / 2;
514 605
515 final int yPos = offset; 606 final int yPos = offset;
516 final int uPos = yPos + stride * sliceHeight; 607 final int uPos = yPos + stride * sliceHeight;
517 final int vPos = uPos + uvStride * sliceHeight / 2; 608 final int vPos = uPos + uvStride * sliceHeight / 2;
518 609
519 synchronized (activeOutputBuffersLock) { 610 synchronized (activeOutputBuffersLock) {
520 activeOutputBuffers++; 611 activeOutputBuffers++;
521 } 612 }
522 return new VideoFrame.I420Buffer() {
523 private int refCount = 1;
524 613
614 I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback () {
525 @Override 615 @Override
526 public ByteBuffer getDataY() { 616 public void onRelease() {
527 ByteBuffer data = buffer.slice(); 617 codec.releaseOutputBuffer(outputBufferIndex, false);
528 data.position(yPos); 618 synchronized (activeOutputBuffersLock) {
529 data.limit(yPos + getStrideY() * height); 619 activeOutputBuffers--;
530 return data; 620 activeOutputBuffersLock.notifyAll();
531 }
532
533 @Override
534 public ByteBuffer getDataU() {
535 ByteBuffer data = buffer.slice();
536 data.position(uPos);
537 data.limit(uPos + getStrideU() * chromaHeight);
538 return data;
539 }
540
541 @Override
542 public ByteBuffer getDataV() {
543 ByteBuffer data = buffer.slice();
544 data.position(vPos);
545 data.limit(vPos + getStrideV() * chromaHeight);
546 return data;
547 }
548
549 @Override
550 public int getStrideY() {
551 return stride;
552 }
553
554 @Override
555 public int getStrideU() {
556 return uvStride;
557 }
558
559 @Override
560 public int getStrideV() {
561 return uvStride;
562 }
563
564 @Override
565 public int getWidth() {
566 return width;
567 }
568
569 @Override
570 public int getHeight() {
571 return height;
572 }
573
574 @Override
575 public VideoFrame.I420Buffer toI420() {
576 return this;
577 }
578
579 @Override
580 public void retain() {
581 refCount++;
582 }
583
584 @Override
585 public void release() {
586 refCount--;
587
588 if (refCount == 0) {
589 codec.releaseOutputBuffer(outputBufferIndex, false);
590 synchronized (activeOutputBuffersLock) {
591 activeOutputBuffers--;
592 activeOutputBuffersLock.notifyAll();
593 }
594 } 621 }
595 } 622 }
596 }; 623 };
624
625 return new I420BufferImpl(
626 buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, cal lback);
597 } 627 }
598 628
599 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer, 629 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,
600 int stride, int sliceHeight, int width, int height) { 630 int stride, int sliceHeight, int width, int height) {
601 int uvStride = stride / 2; 631 int uvStride = stride / 2;
602 int chromaWidth = (width + 1) / 2; 632 int chromaWidth = (width + 1) / 2;
603 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last 633 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last
604 // row will be duplicated if the sliceHeight is odd. 634 // row will be duplicated if the sliceHeight is odd.
605 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2; 635 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
606 636
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
659 dstPos += dstStride; 689 dstPos += dstStride;
660 } 690 }
661 } 691 }
662 692
663 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) { 693 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) {
664 for (int i = 0; i < width; ++i) { 694 for (int i = 0; i < width; ++i) {
665 dst.put(dstPos + i, src.get(srcPos + i)); 695 dst.put(dstPos + i, src.get(srcPos + i));
666 } 696 }
667 } 697 }
668 } 698 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698