Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(300)

Side by Side Diff: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java

Issue 2977643002: Add texture support to HardwareVideoDecoder. (Closed)
Patch Set: Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 package org.webrtc; 11 package org.webrtc;
12 12
13 import android.annotation.TargetApi; 13 import android.annotation.TargetApi;
14 import android.graphics.Matrix; 14 import android.graphics.Matrix;
15 import android.media.MediaCodec; 15 import android.media.MediaCodec;
16 import android.media.MediaCodecInfo.CodecCapabilities; 16 import android.media.MediaCodecInfo.CodecCapabilities;
17 import android.media.MediaFormat; 17 import android.media.MediaFormat;
18 import android.os.SystemClock; 18 import android.os.SystemClock;
19 import android.view.Surface;
19 import java.io.IOException; 20 import java.io.IOException;
20 import java.nio.ByteBuffer; 21 import java.nio.ByteBuffer;
21 import java.util.Arrays; 22 import java.util.Arrays;
22 import java.util.Deque; 23 import java.util.Deque;
24 import java.util.concurrent.BlockingDeque;
23 import java.util.concurrent.CountDownLatch; 25 import java.util.concurrent.CountDownLatch;
24 import java.util.concurrent.LinkedBlockingDeque; 26 import java.util.concurrent.LinkedBlockingDeque;
27 import java.util.concurrent.atomic.AtomicReference;
25 import org.webrtc.ThreadUtils.ThreadChecker; 28 import org.webrtc.ThreadUtils.ThreadChecker;
26 29
27 /** Android hardware video decoder. */ 30 /** Android hardware video decoder. */
28 @TargetApi(16) 31 @TargetApi(16)
29 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods. 32 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods.
30 class HardwareVideoDecoder implements VideoDecoder { 33 class HardwareVideoDecoder
34 implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListene r {
31 private static final String TAG = "HardwareVideoDecoder"; 35 private static final String TAG = "HardwareVideoDecoder";
32 36
33 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. 37 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
34 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride"; 38 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
35 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height"; 39 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
36 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left"; 40 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
37 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right"; 41 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
38 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top"; 42 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
39 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; 43 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
40 44
41 // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after 45 // MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
42 // this timeout. 46 // this timeout.
43 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; 47 private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
44 48
45 // WebRTC queues input frames quickly in the beginning on the call. Wait for i nput buffers with a 49 // WebRTC queues input frames quickly in the beginning on the call. Wait for i nput buffers with a
46 // long timeout (500 ms) to prevent this from causing the codec to return an e rror. 50 // long timeout (500 ms) to prevent this from causing the codec to return an e rror.
47 private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000; 51 private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
48 52
49 // Dequeuing an output buffer will block until a buffer is available (up to 10 0 milliseconds). 53 // Dequeuing an output buffer will block until a buffer is available (up to 10 0 milliseconds).
50 // If this timeout is exceeded, the output thread will unblock and check if th e decoder is still 54 // If this timeout is exceeded, the output thread will unblock and check if th e decoder is still
51 // running. If it is, it will block on dequeue again. Otherwise, it will sto p and release the 55 // running. If it is, it will block on dequeue again. Otherwise, it will sto p and release the
52 // MediaCodec. 56 // MediaCodec.
53 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; 57 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
54 58
59 // Max number of output buffers queued before starting to drop decoded frames.
60 private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
61
55 private final String codecName; 62 private final String codecName;
56 private final VideoCodecType codecType; 63 private final VideoCodecType codecType;
57 64
58 private static class FrameInfo { 65 private static class FrameInfo {
59 final long decodeStartTimeMs; 66 final long decodeStartTimeMs;
60 final int rotation; 67 final int rotation;
61 68
62 FrameInfo(long decodeStartTimeMs, int rotation) { 69 FrameInfo(long decodeStartTimeMs, int rotation) {
63 this.decodeStartTimeMs = decodeStartTimeMs; 70 this.decodeStartTimeMs = decodeStartTimeMs;
64 this.rotation = rotation; 71 this.rotation = rotation;
(...skipping 28 matching lines...) Expand all
93 private int height; 100 private int height;
94 private int stride; 101 private int stride;
95 private int sliceHeight; 102 private int sliceHeight;
96 103
97 // Whether the decoder has finished the first frame. The codec may not change output dimensions 104 // Whether the decoder has finished the first frame. The codec may not change output dimensions
98 // after delivering the first frame. 105 // after delivering the first frame.
99 private boolean hasDecodedFirstFrame; 106 private boolean hasDecodedFirstFrame;
100 // Whether the decoder has seen a key frame. The first frame must be a key fr ame. 107 // Whether the decoder has seen a key frame. The first frame must be a key fr ame.
101 private boolean keyFrameRequired; 108 private boolean keyFrameRequired;
102 109
110 private final SurfaceTextureHelper surfaceTextureHelper;
111 private Surface surface = null;
112
113 private static class DecodedTextureBuffer {
114 final int index;
115 final int width;
116 final int height;
117 final int rotation;
118 final long presentationTimestampUs;
119 final Integer decodeTimeMs;
120
121 DecodedTextureBuffer(int index, int width, int height, int rotation,
122 long presentationTimestampUs, Integer decodeTimeMs) {
123 this.index = index;
124 this.width = width;
125 this.height = height;
126 this.rotation = rotation;
127 this.presentationTimestampUs = presentationTimestampUs;
128 this.decodeTimeMs = decodeTimeMs;
129 }
130 }
131
132 private final BlockingDeque<DecodedTextureBuffer> decodedTextureBuffers;
133 private final AtomicReference<DecodedTextureBuffer> renderedTextureBuffer;
134
135 private int droppedFrames = 0;
136
103 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. 137 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller.
104 private Callback callback; 138 private Callback callback;
105 139
106 private MediaCodec codec = null; 140 private MediaCodec codec = null;
107 141
108 HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorForm at) { 142 HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorForm at,
143 SurfaceTextureHelper surfaceTextureHelper) {
sakal 2017/07/17 13:44:56 I don't think we have to pass in the SurfaceTextur
mellem 2017/07/17 21:57:15 Done.
109 if (!isSupportedColorFormat(colorFormat)) { 144 if (!isSupportedColorFormat(colorFormat)) {
110 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat); 145 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat);
111 } 146 }
112 this.codecName = codecName; 147 this.codecName = codecName;
113 this.codecType = codecType; 148 this.codecType = codecType;
114 this.colorFormat = colorFormat; 149 this.colorFormat = colorFormat;
150 this.surfaceTextureHelper = surfaceTextureHelper;
151 this.decodedTextureBuffers = new LinkedBlockingDeque<>();
152 this.renderedTextureBuffer = new AtomicReference<>();
115 this.frameInfos = new LinkedBlockingDeque<>(); 153 this.frameInfos = new LinkedBlockingDeque<>();
116 } 154 }
117 155
118 @Override 156 @Override
119 public VideoCodecStatus initDecode(Settings settings, Callback callback) { 157 public VideoCodecStatus initDecode(Settings settings, Callback callback) {
120 this.decoderThreadChecker = new ThreadChecker(); 158 this.decoderThreadChecker = new ThreadChecker();
121 return initDecodeInternal(settings.width, settings.height, callback); 159 return initDecodeInternal(settings.width, settings.height, callback);
122 } 160 }
123 161
124 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) { 162 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) {
125 decoderThreadChecker.checkIsOnValidThread(); 163 decoderThreadChecker.checkIsOnValidThread();
126 if (outputThread != null) { 164 if (outputThread != null) {
127 Logging.e(TAG, "initDecodeInternal called while the codec is already runni ng"); 165 Logging.e(TAG, "initDecodeInternal called while the codec is already runni ng");
128 return VideoCodecStatus.ERROR; 166 return VideoCodecStatus.ERROR;
129 } 167 }
130 168
131 // Note: it is not necessary to initialize dimensions under the lock, since the output thread 169 // Note: it is not necessary to initialize dimensions under the lock, since the output thread
132 // is not running. 170 // is not running.
133 this.callback = callback; 171 this.callback = callback;
134 this.width = width; 172 this.width = width;
135 this.height = height; 173 this.height = height;
136 174
137 stride = width; 175 stride = width;
138 sliceHeight = height; 176 sliceHeight = height;
139 hasDecodedFirstFrame = false; 177 hasDecodedFirstFrame = false;
140 keyFrameRequired = true; 178 keyFrameRequired = true;
179 droppedFrames = 0;
141 180
142 try { 181 try {
143 codec = MediaCodec.createByCodecName(codecName); 182 codec = MediaCodec.createByCodecName(codecName);
144 } catch (IOException | IllegalArgumentException e) { 183 } catch (IOException | IllegalArgumentException e) {
145 Logging.e(TAG, "Cannot create media decoder " + codecName); 184 Logging.e(TAG, "Cannot create media decoder " + codecName);
146 return VideoCodecStatus.ERROR; 185 return VideoCodecStatus.ERROR;
147 } 186 }
148 try { 187 try {
149 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); 188 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height);
150 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 189 if (surfaceTextureHelper == null) {
151 codec.configure(format, null, null, 0); 190 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
191 } else {
192 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
193 surfaceTextureHelper.startListening(this);
194 }
195 codec.configure(format, surface, null, 0);
152 codec.start(); 196 codec.start();
153 } catch (IllegalStateException e) { 197 } catch (IllegalStateException e) {
154 Logging.e(TAG, "initDecode failed", e); 198 Logging.e(TAG, "initDecode failed", e);
155 release(); 199 release();
156 return VideoCodecStatus.ERROR; 200 return VideoCodecStatus.ERROR;
157 } 201 }
158 202
159 running = true; 203 running = true;
160 outputThread = createOutputThread(); 204 outputThread = createOutputThread();
161 outputThread.start(); 205 outputThread.start();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
202 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) { 246 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
203 Logging.e(TAG, "decode() - key frame required first"); 247 Logging.e(TAG, "decode() - key frame required first");
204 return VideoCodecStatus.ERROR; 248 return VideoCodecStatus.ERROR;
205 } 249 }
206 if (!frame.completeFrame) { 250 if (!frame.completeFrame) {
207 Logging.e(TAG, "decode() - complete frame required first"); 251 Logging.e(TAG, "decode() - complete frame required first");
208 return VideoCodecStatus.ERROR; 252 return VideoCodecStatus.ERROR;
209 } 253 }
210 } 254 }
211 255
212 // TODO(mellem): Support textures.
213 int index; 256 int index;
214 try { 257 try {
215 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); 258 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
216 } catch (IllegalStateException e) { 259 } catch (IllegalStateException e) {
217 Logging.e(TAG, "dequeueInputBuffer failed", e); 260 Logging.e(TAG, "dequeueInputBuffer failed", e);
218 return VideoCodecStatus.ERROR; 261 return VideoCodecStatus.ERROR;
219 } 262 }
220 if (index < 0) { 263 if (index < 0) {
221 // Decoder is falling behind. No input buffers available. 264 // Decoder is falling behind. No input buffers available.
222 // The decoder can't simply drop frames; it might lose a key frame. 265 // The decoder can't simply drop frames; it might lose a key frame.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 // capture both the output thread's stack trace and this thread's stack trace. 324 // capture both the output thread's stack trace and this thread's stack trace.
282 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException)); 325 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException));
283 shutdownException = null; 326 shutdownException = null;
284 return VideoCodecStatus.ERROR; 327 return VideoCodecStatus.ERROR;
285 } 328 }
286 } finally { 329 } finally {
287 codec = null; 330 codec = null;
288 callback = null; 331 callback = null;
289 outputThread = null; 332 outputThread = null;
290 frameInfos.clear(); 333 frameInfos.clear();
334 if (surface != null) {
335 surface.release();
336 surface = null;
337 surfaceTextureHelper.stopListening();
338 surfaceTextureHelper.returnTextureFrame();
sakal 2017/07/17 13:44:56 I think we should wait that all frames are release
mellem 2017/07/17 21:57:15 Now that the decoder owns the SurfaceTextureHelper
339 }
340 decodedTextureBuffers.clear();
341 renderedTextureBuffer.set(null);
291 } 342 }
292 return VideoCodecStatus.OK; 343 return VideoCodecStatus.OK;
293 } 344 }
294 345
295 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) { 346 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
296 decoderThreadChecker.checkIsOnValidThread(); 347 decoderThreadChecker.checkIsOnValidThread();
297 VideoCodecStatus status = release(); 348 VideoCodecStatus status = release();
298 if (status != VideoCodecStatus.OK) { 349 if (status != VideoCodecStatus.OK) {
299 return status; 350 return status;
300 } 351 }
301 return initDecodeInternal(newWidth, newHeight, callback); 352 return initDecodeInternal(newWidth, newHeight, callback);
302 } 353 }
303 354
304 private Thread createOutputThread() { 355 private Thread createOutputThread() {
305 return new Thread("HardwareVideoDecoder.outputThread") { 356 return new Thread("HardwareVideoDecoder.outputThread") {
306 @Override 357 @Override
307 public void run() { 358 public void run() {
308 outputThreadChecker = new ThreadChecker(); 359 outputThreadChecker = new ThreadChecker();
309 while (running) { 360 while (running) {
310 deliverDecodedFrame(); 361 deliverDecodedFrame();
362 maybeRenderTextureBuffers();
311 } 363 }
312 releaseCodecOnOutputThread(); 364 releaseCodecOnOutputThread();
313 } 365 }
314 }; 366 };
315 } 367 }
316 368
317 private void deliverDecodedFrame() { 369 private void deliverDecodedFrame() {
318 outputThreadChecker.checkIsOnValidThread(); 370 outputThreadChecker.checkIsOnValidThread();
319 try { 371 try {
320 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 372 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
(...skipping 15 matching lines...) Expand all
336 FrameInfo frameInfo = frameInfos.poll(); 388 FrameInfo frameInfo = frameInfos.poll();
337 Integer decodeTimeMs = null; 389 Integer decodeTimeMs = null;
338 int rotation = 0; 390 int rotation = 0;
339 if (frameInfo != null) { 391 if (frameInfo != null) {
340 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs); 392 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs);
341 rotation = frameInfo.rotation; 393 rotation = frameInfo.rotation;
342 } 394 }
343 395
344 hasDecodedFirstFrame = true; 396 hasDecodedFirstFrame = true;
345 397
346 // Load dimensions from shared memory under the dimension lock. 398 if (surfaceTextureHelper != null) {
347 int width, height, stride, sliceHeight; 399 deliverTextureFrame(result, info, rotation, decodeTimeMs);
348 synchronized (dimensionLock) { 400 } else {
349 width = this.width; 401 deliverByteFrame(result, info, rotation, decodeTimeMs);
350 height = this.height;
351 stride = this.stride;
352 sliceHeight = this.sliceHeight;
353 } 402 }
354 403
355 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
356 // bytes for each of the U and V channels.
357 if (info.size < width * height * 3 / 2) {
358 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
359 return;
360 }
361
362 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
363 // Some codecs (Exynos) report an incorrect stride. Correct it here.
364 // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
365 // 2 * size / (3 * height).
366 stride = info.size * 2 / (height * 3);
367 }
368
369 ByteBuffer buffer = codec.getOutputBuffers()[result];
370 buffer.position(info.offset);
371 buffer.limit(info.size);
372
373 final VideoFrame.I420Buffer frameBuffer;
374
375 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatt ing data.
376 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
377 if (sliceHeight % 2 == 0) {
378 frameBuffer =
379 createBufferFromI420(buffer, result, info.offset, stride, sliceHei ght, width, height);
380 } else {
381 frameBuffer = new I420BufferImpl(width, height);
382 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
383 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
384 codec.releaseOutputBuffer(result, false);
385 }
386 } else {
387 frameBuffer = new I420BufferImpl(width, height);
388 // All other supported color formats are NV12.
389 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
390 codec.releaseOutputBuffer(result, false);
391 }
392
393 long presentationTimeNs = info.presentationTimeUs * 1000;
394 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeN s, new Matrix());
395
396 // Note that qp is parsed on the C++ side.
397 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
398 frame.release();
399 } catch (IllegalStateException e) { 404 } catch (IllegalStateException e) {
400 Logging.e(TAG, "deliverDecodedFrame failed", e); 405 Logging.e(TAG, "deliverDecodedFrame failed", e);
401 } 406 }
402 } 407 }
403 408
409 private void deliverTextureFrame(
410 int index, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
411 // Load dimensions from shared memory under the dimension lock.
412 int width, height;
413 synchronized (dimensionLock) {
414 width = this.width;
415 height = this.height;
416 }
417
418 DecodedTextureBuffer buffer = new DecodedTextureBuffer(
419 index, width, height, rotation, info.presentationTimeUs, decodeTimeMs);
420 decodedTextureBuffers.offerLast(buffer);
421
422 maybeRenderTextureBuffers();
423 maybeDropTextureBuffer();
424 }
425
426 private void maybeRenderTextureBuffers() {
427 // Possibly render more than one buffer, to allow the callee to catch up.
428 while (!decodedTextureBuffers.isEmpty() && !surfaceTextureHelper.isTextureIn Use()) {
sakal 2017/07/17 13:44:56 I don't think we are going to get more than one lo
mellem 2017/07/17 21:57:15 I need to call updateTexImage() whenever I get a t
sakal 2017/07/18 08:53:40 Yeah, we of course would have had to implement dra
429 DecodedTextureBuffer buffer = decodedTextureBuffers.pollFirst();
430 renderedTextureBuffer.set(buffer);
431 codec.releaseOutputBuffer(buffer.index, true);
432 }
433 }
434
435 private void maybeDropTextureBuffer() {
436 if (decodedTextureBuffers.size() > MAX_QUEUED_OUTPUTBUFFERS) {
437 ++droppedFrames;
438 DecodedTextureBuffer buffer = decodedTextureBuffers.pollFirst();
439 Logging.w(TAG,
440 "Draining the decoder. Dropped frame with timestamp : " + buffer.pres entationTimestampUs
441 + "(us). Total dropped frames: " + droppedFrames);
442 codec.releaseOutputBuffer(buffer.index, false);
443 }
444 }
445
446 @Override
447 public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
448 DecodedTextureBuffer info = renderedTextureBuffer.getAndSet(null);
449
450 OesTextureBuffer oesBuffer = new OesTextureBuffer(
451 oesTextureId, info.width, info.height, transformMatrix, surfaceTextureHe lper);
452
453 // SurfaceTexture's matrix looks like this:
sakal 2017/07/17 13:44:56 Please copy the helper method from my other CL her
mellem 2017/07/17 21:57:15 Done.
454 // [x1 y1 0 w1]
455 // [x2 y2 0 w2]
456 // [ 0 0 1 0]
457 // [x3 y3 0 w3]
458 // The android.graphics.Matrix looks like this:
459 // [x1 y1 w1]
460 // [x2 y2 w2]
461 // [x3 y3 w3]
462 float[] matrix3x3 = new float[9];
463 matrix3x3[0 * 3 + 0] = transformMatrix[0 * 4 + 0];
464 matrix3x3[0 * 3 + 1] = transformMatrix[0 * 4 + 1];
465 matrix3x3[0 * 3 + 2] = transformMatrix[0 * 4 + 3];
466 matrix3x3[1 * 3 + 0] = transformMatrix[1 * 4 + 0];
467 matrix3x3[1 * 3 + 1] = transformMatrix[1 * 4 + 1];
468 matrix3x3[1 * 3 + 2] = transformMatrix[1 * 4 + 3];
469 matrix3x3[2 * 3 + 0] = transformMatrix[3 * 4 + 0];
470 matrix3x3[2 * 3 + 1] = transformMatrix[3 * 4 + 1];
471 matrix3x3[2 * 3 + 2] = transformMatrix[3 * 4 + 3];
472
473 Matrix matrix = new Matrix();
474 matrix.setValues(matrix3x3);
475
476 VideoFrame frame =
477 new VideoFrame(oesBuffer, info.rotation, info.presentationTimestampUs * 1000, matrix);
478 callback.onDecodedFrame(frame, info.decodeTimeMs, null /* qp */);
479 frame.release();
480 }
481
482 private void deliverByteFrame(
483 int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs ) {
484 // Load dimensions from shared memory under the dimension lock.
485 int width, height, stride, sliceHeight;
486 synchronized (dimensionLock) {
487 width = this.width;
488 height = this.height;
489 stride = this.stride;
490 sliceHeight = this.sliceHeight;
491 }
492
493 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
494 // bytes for each of the U and V channels.
495 if (info.size < width * height * 3 / 2) {
496 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
497 return;
498 }
499
500 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
501 // Some codecs (Exynos) report an incorrect stride. Correct it here.
502 // Expected size == stride * height * 3 / 2. A bit of algebra gives the c orrect stride as
503 // 2 * size / (3 * height).
504 stride = info.size * 2 / (height * 3);
505 }
506
507 ByteBuffer buffer = codec.getOutputBuffers()[result];
508 buffer.position(info.offset);
509 buffer.limit(info.size);
510
511 final VideoFrame.I420Buffer frameBuffer;
512
513 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformattin g data.
514 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
515 if (sliceHeight % 2 == 0) {
516 frameBuffer =
517 createBufferFromI420(buffer, result, info.offset, stride, sliceHeigh t, width, height);
518 } else {
519 frameBuffer = new I420BufferImpl(width, height);
520 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
521 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
522 codec.releaseOutputBuffer(result, false);
523 }
524 } else {
525 frameBuffer = new I420BufferImpl(width, height);
526 // All other supported color formats are NV12.
527 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
528 codec.releaseOutputBuffer(result, false);
529 }
530
531 long presentationTimeNs = info.presentationTimeUs * 1000;
532 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix());
533
534 // Note that qp is parsed on the C++ side.
535 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
536 frame.release();
537 }
538
404 private void reformat(MediaFormat format) { 539 private void reformat(MediaFormat format) {
405 outputThreadChecker.checkIsOnValidThread(); 540 outputThreadChecker.checkIsOnValidThread();
406 Logging.d(TAG, "Decoder format changed: " + format.toString()); 541 Logging.d(TAG, "Decoder format changed: " + format.toString());
407 final int newWidth; 542 final int newWidth;
408 final int newHeight; 543 final int newHeight;
409 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT) 544 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
410 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT) 545 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
411 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM) 546 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
412 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) { 547 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
413 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT) 548 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
414 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT); 549 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
415 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM) 550 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
416 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP); 551 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
417 } else { 552 } else {
418 newWidth = format.getInteger(MediaFormat.KEY_WIDTH); 553 newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
419 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 554 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
420 } 555 }
421 // Compare to existing width, height, and save values under the dimension lo ck. 556 // Compare to existing width, height, and save values under the dimension lo ck.
422 synchronized (dimensionLock) { 557 synchronized (dimensionLock) {
423 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) { 558 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) {
424 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*" 559 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*"
425 + height + ". New " + newWidth + "*" + newHeight)); 560 + height + ". New " + newWidth + "*" + newHeight));
426 return; 561 return;
427 } 562 }
428 width = newWidth; 563 width = newWidth;
429 height = newHeight; 564 height = newHeight;
430 } 565 }
431 566
432 if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 567 // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
568 // color format updates.
569 if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR _FORMAT)) {
433 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 570 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
434 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 571 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
435 if (!isSupportedColorFormat(colorFormat)) { 572 if (!isSupportedColorFormat(colorFormat)) {
436 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat)); 573 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
437 return; 574 return;
438 } 575 }
439 } 576 }
440 577
441 // Save stride and sliceHeight under the dimension lock. 578 // Save stride and sliceHeight under the dimension lock.
442 synchronized (dimensionLock) { 579 synchronized (dimensionLock) {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
512 final int chromaWidth = (width + 1) / 2; 649 final int chromaWidth = (width + 1) / 2;
513 final int chromaHeight = (height + 1) / 2; 650 final int chromaHeight = (height + 1) / 2;
514 651
515 final int yPos = offset; 652 final int yPos = offset;
516 final int uPos = yPos + stride * sliceHeight; 653 final int uPos = yPos + stride * sliceHeight;
517 final int vPos = uPos + uvStride * sliceHeight / 2; 654 final int vPos = uPos + uvStride * sliceHeight / 2;
518 655
519 synchronized (activeOutputBuffersLock) { 656 synchronized (activeOutputBuffersLock) {
520 activeOutputBuffers++; 657 activeOutputBuffers++;
521 } 658 }
522 return new VideoFrame.I420Buffer() {
523 private int refCount = 1;
524 659
660 I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback () {
525 @Override 661 @Override
526 public ByteBuffer getDataY() { 662 public void onRelease() {
527 ByteBuffer data = buffer.slice(); 663 codec.releaseOutputBuffer(outputBufferIndex, false);
528 data.position(yPos); 664 synchronized (activeOutputBuffersLock) {
529 data.limit(yPos + getStrideY() * height); 665 activeOutputBuffers--;
530 return data; 666 activeOutputBuffersLock.notifyAll();
531 }
532
533 @Override
534 public ByteBuffer getDataU() {
535 ByteBuffer data = buffer.slice();
536 data.position(uPos);
537 data.limit(uPos + getStrideU() * chromaHeight);
538 return data;
539 }
540
541 @Override
542 public ByteBuffer getDataV() {
543 ByteBuffer data = buffer.slice();
544 data.position(vPos);
545 data.limit(vPos + getStrideV() * chromaHeight);
546 return data;
547 }
548
549 @Override
550 public int getStrideY() {
551 return stride;
552 }
553
554 @Override
555 public int getStrideU() {
556 return uvStride;
557 }
558
559 @Override
560 public int getStrideV() {
561 return uvStride;
562 }
563
564 @Override
565 public int getWidth() {
566 return width;
567 }
568
569 @Override
570 public int getHeight() {
571 return height;
572 }
573
574 @Override
575 public VideoFrame.I420Buffer toI420() {
576 return this;
577 }
578
579 @Override
580 public void retain() {
581 refCount++;
582 }
583
584 @Override
585 public void release() {
586 refCount--;
587
588 if (refCount == 0) {
589 codec.releaseOutputBuffer(outputBufferIndex, false);
590 synchronized (activeOutputBuffersLock) {
591 activeOutputBuffers--;
592 activeOutputBuffersLock.notifyAll();
593 }
594 } 667 }
595 } 668 }
596 }; 669 };
670
671 return new I420BufferImpl(
672 buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, cal lback);
597 } 673 }
598 674
599 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer, 675 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,
600 int stride, int sliceHeight, int width, int height) { 676 int stride, int sliceHeight, int width, int height) {
601 int uvStride = stride / 2; 677 int uvStride = stride / 2;
602 int chromaWidth = (width + 1) / 2; 678 int chromaWidth = (width + 1) / 2;
603 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last 679 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last
604 // row will be duplicated if the sliceHeight is odd. 680 // row will be duplicated if the sliceHeight is odd.
605 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2; 681 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
606 682
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
659 dstPos += dstStride; 735 dstPos += dstStride;
660 } 736 }
661 } 737 }
662 738
663 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) { 739 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) {
664 for (int i = 0; i < width; ++i) { 740 for (int i = 0; i < width; ++i) {
665 dst.put(dstPos + i, src.get(srcPos + i)); 741 dst.put(dstPos + i, src.get(srcPos + i));
666 } 742 }
667 } 743 }
668 } 744 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698