Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(238)

Side by Side Diff: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java

Issue 3010623002: Change capture time format to nanoseconds in EncodedImage. (Closed)
Patch Set: Update decoder wrapper. Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 package org.webrtc; 11 package org.webrtc;
12 12
13 import android.annotation.TargetApi; 13 import android.annotation.TargetApi;
14 import android.graphics.Matrix; 14 import android.graphics.Matrix;
15 import android.media.MediaCodec; 15 import android.media.MediaCodec;
16 import android.media.MediaCodecInfo; 16 import android.media.MediaCodecInfo;
17 import android.media.MediaFormat; 17 import android.media.MediaFormat;
18 import android.opengl.GLES20; 18 import android.opengl.GLES20;
19 import android.os.Bundle; 19 import android.os.Bundle;
20 import android.view.Surface; 20 import android.view.Surface;
21 import java.io.IOException; 21 import java.io.IOException;
22 import java.nio.ByteBuffer; 22 import java.nio.ByteBuffer;
23 import java.util.Arrays; 23 import java.util.Arrays;
24 import java.util.Deque; 24 import java.util.Deque;
25 import java.util.HashSet; 25 import java.util.HashSet;
26 import java.util.Set; 26 import java.util.Set;
27 import java.util.concurrent.LinkedBlockingDeque; 27 import java.util.concurrent.LinkedBlockingDeque;
28 import java.util.concurrent.TimeUnit;
28 29
29 /** Android hardware video encoder. */ 30 /** Android hardware video encoder. */
30 @TargetApi(19) 31 @TargetApi(19)
31 @SuppressWarnings("deprecation") // Cannot support API level 19 without using de precated methods. 32 @SuppressWarnings("deprecation") // Cannot support API level 19 without using de precated methods.
32 class HardwareVideoEncoder implements VideoEncoder { 33 class HardwareVideoEncoder implements VideoEncoder {
33 private static final String TAG = "HardwareVideoEncoder"; 34 private static final String TAG = "HardwareVideoEncoder";
34 35
35 // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined 36 // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
36 // in OMX_Video.h 37 // in OMX_Video.h
37 private static final int VIDEO_ControlRateConstant = 2; 38 private static final int VIDEO_ControlRateConstant = 2;
(...skipping 10 matching lines...) Expand all
48 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; 49 private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
49 50
50 private final String codecName; 51 private final String codecName;
51 private final VideoCodecType codecType; 52 private final VideoCodecType codecType;
52 private final int colorFormat; 53 private final int colorFormat;
53 private final ColorFormat inputColorFormat; 54 private final ColorFormat inputColorFormat;
54 // Base interval for generating key frames. 55 // Base interval for generating key frames.
55 private final int keyFrameIntervalSec; 56 private final int keyFrameIntervalSec;
56 // Interval at which to force a key frame. Used to reduce color distortions ca used by some 57 // Interval at which to force a key frame. Used to reduce color distortions ca used by some
57 // Qualcomm video encoders. 58 // Qualcomm video encoders.
58 private final long forcedKeyFrameMs; 59 private final long forcedKeyFrameNs;
59 // Presentation timestamp of the last requested (or forced) key frame. 60 // Presentation timestamp of the last requested (or forced) key frame.
60 private long lastKeyFrameMs; 61 private long lastKeyFrameNs;
61 62
62 private final BitrateAdjuster bitrateAdjuster; 63 private final BitrateAdjuster bitrateAdjuster;
63 private int adjustedBitrate; 64 private int adjustedBitrate;
64 65
65 // A queue of EncodedImage.Builders that correspond to frames in the codec. T hese builders are 66 // A queue of EncodedImage.Builders that correspond to frames in the codec. T hese builders are
66 // pre-populated with all the information that can't be sent through MediaCode c. 67 // pre-populated with all the information that can't be sent through MediaCode c.
67 private final Deque<EncodedImage.Builder> outputBuilders; 68 private final Deque<EncodedImage.Builder> outputBuilders;
68 69
69 // Thread that delivers encoded frames to the user callback. 70 // Thread that delivers encoded frames to the user callback.
70 private Thread outputThread; 71 private Thread outputThread;
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
118 this.codecName = codecName; 119 this.codecName = codecName;
119 this.codecType = codecType; 120 this.codecType = codecType;
120 this.colorFormat = colorFormat; 121 this.colorFormat = colorFormat;
121 if (textureContext == null) { 122 if (textureContext == null) {
122 this.inputColorFormat = ColorFormat.valueOf(colorFormat); 123 this.inputColorFormat = ColorFormat.valueOf(colorFormat);
123 } else { 124 } else {
124 // ColorFormat copies bytes between buffers. It is not used in texture mo de. 125 // ColorFormat copies bytes between buffers. It is not used in texture mo de.
125 this.inputColorFormat = null; 126 this.inputColorFormat = null;
126 } 127 }
127 this.keyFrameIntervalSec = keyFrameIntervalSec; 128 this.keyFrameIntervalSec = keyFrameIntervalSec;
128 this.forcedKeyFrameMs = forceKeyFrameIntervalMs; 129 this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalM s);
129 this.bitrateAdjuster = bitrateAdjuster; 130 this.bitrateAdjuster = bitrateAdjuster;
130 this.outputBuilders = new LinkedBlockingDeque<>(); 131 this.outputBuilders = new LinkedBlockingDeque<>();
131 this.textureContext = textureContext; 132 this.textureContext = textureContext;
132 } 133 }
133 134
134 @Override 135 @Override
135 public VideoCodecStatus initEncode(Settings settings, Callback callback) { 136 public VideoCodecStatus initEncode(Settings settings, Callback callback) {
136 return initEncodeInternal( 137 return initEncodeInternal(
137 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback); 138 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback);
138 } 139 }
139 140
140 private VideoCodecStatus initEncodeInternal( 141 private VideoCodecStatus initEncodeInternal(
141 int width, int height, int bitrateKbps, int fps, Callback callback) { 142 int width, int height, int bitrateKbps, int fps, Callback callback) {
142 Logging.d( 143 Logging.d(
143 TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "k bps. Fps: " + fps); 144 TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "k bps. Fps: " + fps);
144 this.width = width; 145 this.width = width;
145 this.height = height; 146 this.height = height;
146 if (bitrateKbps != 0 && fps != 0) { 147 if (bitrateKbps != 0 && fps != 0) {
147 bitrateAdjuster.setTargets(bitrateKbps * 1000, fps); 148 bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
148 } 149 }
149 adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); 150 adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
150 151
151 this.callback = callback; 152 this.callback = callback;
152 153
153 lastKeyFrameMs = -1; 154 lastKeyFrameNs = -1;
154 155
155 try { 156 try {
156 codec = MediaCodec.createByCodecName(codecName); 157 codec = MediaCodec.createByCodecName(codecName);
157 } catch (IOException | IllegalArgumentException e) { 158 } catch (IOException | IllegalArgumentException e) {
158 Logging.e(TAG, "Cannot create media encoder " + codecName); 159 Logging.e(TAG, "Cannot create media encoder " + codecName);
159 return VideoCodecStatus.ERROR; 160 return VideoCodecStatus.ERROR;
160 } 161 }
161 try { 162 try {
162 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); 163 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height);
163 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); 164 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
250 return VideoCodecStatus.OK; // See webrtc bug 2887. 251 return VideoCodecStatus.OK; // See webrtc bug 2887.
251 } 252 }
252 253
253 boolean requestedKeyFrame = false; 254 boolean requestedKeyFrame = false;
254 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { 255 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
255 if (frameType == EncodedImage.FrameType.VideoFrameKey) { 256 if (frameType == EncodedImage.FrameType.VideoFrameKey) {
256 requestedKeyFrame = true; 257 requestedKeyFrame = true;
257 } 258 }
258 } 259 }
259 260
260 // Frame timestamp rounded to the nearest microsecond and millisecond. 261 if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
261 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; 262 requestKeyFrame(videoFrame.getTimestampNs());
262 long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
263 if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) {
264 requestKeyFrame(presentationTimestampMs);
265 } 263 }
266 264
267 VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); 265 VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
268 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are 266 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
269 // subsampled at one byte per four pixels. 267 // subsampled at one byte per four pixels.
270 int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; 268 int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
271 EncodedImage.Builder builder = EncodedImage.builder() 269 EncodedImage.Builder builder = EncodedImage.builder()
272 .setCaptureTimeMs(presentationTimestampMs ) 270 .setCaptureTimeNs(videoFrame.getTimestamp Ns())
273 .setCompleteFrame(true) 271 .setCompleteFrame(true)
274 .setEncodedWidth(videoFrame.getBuffer().g etWidth()) 272 .setEncodedWidth(videoFrame.getBuffer().g etWidth())
275 .setEncodedHeight(videoFrame.getBuffer(). getHeight()) 273 .setEncodedHeight(videoFrame.getBuffer(). getHeight())
276 .setRotation(videoFrame.getRotation()); 274 .setRotation(videoFrame.getRotation());
277 outputBuilders.offer(builder); 275 outputBuilders.offer(builder);
278 276
279 if (textureContext != null) { 277 if (textureContext != null) {
280 if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) { 278 if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
281 Logging.e(TAG, "Cannot encode non-texture buffer in texture mode"); 279 Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
282 return VideoCodecStatus.ERROR; 280 return VideoCodecStatus.ERROR;
283 } 281 }
284 VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoF rameBuffer; 282 VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoF rameBuffer;
285 return encodeTextureBuffer(videoFrame, textureBuffer); 283 return encodeTextureBuffer(videoFrame, textureBuffer);
286 } else { 284 } else {
287 if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) { 285 if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
288 Logging.w(TAG, "Encoding texture buffer in byte mode; this may be ineffi cient"); 286 Logging.w(TAG, "Encoding texture buffer in byte mode; this may be ineffi cient");
289 } 287 }
290 return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize, presenta tionTimestampUs); 288 return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
291 } 289 }
292 } 290 }
293 291
294 private VideoCodecStatus encodeTextureBuffer( 292 private VideoCodecStatus encodeTextureBuffer(
295 VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) { 293 VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
296 Matrix matrix = textureBuffer.getTransformMatrix(); 294 Matrix matrix = textureBuffer.getTransformMatrix();
297 float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphi csMatrix(matrix); 295 float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphi csMatrix(matrix);
298 296
299 try { 297 try {
300 textureEglBase.makeCurrent(); 298 textureEglBase.makeCurrent();
(...skipping 13 matching lines...) Expand all
314 textureEglBase.swapBuffers(videoFrame.getTimestampNs()); 312 textureEglBase.swapBuffers(videoFrame.getTimestampNs());
315 } catch (RuntimeException e) { 313 } catch (RuntimeException e) {
316 Logging.e(TAG, "encodeTexture failed", e); 314 Logging.e(TAG, "encodeTexture failed", e);
317 // Keep the output builders in sync with buffers in the codec. 315 // Keep the output builders in sync with buffers in the codec.
318 outputBuilders.pollLast(); 316 outputBuilders.pollLast();
319 return VideoCodecStatus.ERROR; 317 return VideoCodecStatus.ERROR;
320 } 318 }
321 return VideoCodecStatus.OK; 319 return VideoCodecStatus.OK;
322 } 320 }
323 321
324 private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, 322 private VideoCodecStatus encodeByteBuffer(
325 VideoFrame.Buffer videoFrameBuffer, int bufferSize, long presentationTimes tampUs) { 323 VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
324 // Frame timestamp rounded to the nearest microsecond.
325 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
326
326 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. 327 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
327 int index; 328 int index;
328 try { 329 try {
329 index = codec.dequeueInputBuffer(0 /* timeout */); 330 index = codec.dequeueInputBuffer(0 /* timeout */);
330 } catch (IllegalStateException e) { 331 } catch (IllegalStateException e) {
331 Logging.e(TAG, "dequeueInputBuffer failed", e); 332 Logging.e(TAG, "dequeueInputBuffer failed", e);
332 return VideoCodecStatus.FALLBACK_SOFTWARE; 333 return VideoCodecStatus.FALLBACK_SOFTWARE;
333 } 334 }
334 335
335 if (index == -1) { 336 if (index == -1) {
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
390 391
391 private VideoCodecStatus resetCodec(int newWidth, int newHeight) { 392 private VideoCodecStatus resetCodec(int newWidth, int newHeight) {
392 VideoCodecStatus status = release(); 393 VideoCodecStatus status = release();
393 if (status != VideoCodecStatus.OK) { 394 if (status != VideoCodecStatus.OK) {
394 return status; 395 return status;
395 } 396 }
396 // Zero bitrate and framerate indicate not to change the targets. 397 // Zero bitrate and framerate indicate not to change the targets.
397 return initEncodeInternal(newWidth, newHeight, 0, 0, callback); 398 return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
398 } 399 }
399 400
400 private boolean shouldForceKeyFrame(long presentationTimestampMs) { 401 private boolean shouldForceKeyFrame(long presentationTimestampNs) {
401 return forcedKeyFrameMs > 0 && presentationTimestampMs > lastKeyFrameMs + fo rcedKeyFrameMs; 402 return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + fo rcedKeyFrameNs;
402 } 403 }
403 404
404 private void requestKeyFrame(long presentationTimestampMs) { 405 private void requestKeyFrame(long presentationTimestampNs) {
405 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could 406 // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
406 // indicate this in queueInputBuffer() below and guarantee _this_ frame 407 // indicate this in queueInputBuffer() below and guarantee _this_ frame
407 // be encoded as a key frame, but sadly that flag is ignored. Instead, 408 // be encoded as a key frame, but sadly that flag is ignored. Instead,
408 // we request a key frame "soon". 409 // we request a key frame "soon".
409 try { 410 try {
410 Bundle b = new Bundle(); 411 Bundle b = new Bundle();
411 b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 412 b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
412 codec.setParameters(b); 413 codec.setParameters(b);
413 } catch (IllegalStateException e) { 414 } catch (IllegalStateException e) {
414 Logging.e(TAG, "requestKeyFrame failed", e); 415 Logging.e(TAG, "requestKeyFrame failed", e);
415 return; 416 return;
416 } 417 }
417 lastKeyFrameMs = presentationTimestampMs; 418 lastKeyFrameNs = presentationTimestampNs;
418 } 419 }
419 420
420 private Thread createOutputThread() { 421 private Thread createOutputThread() {
421 return new Thread() { 422 return new Thread() {
422 @Override 423 @Override
423 public void run() { 424 public void run() {
424 while (running) { 425 while (running) {
425 deliverEncodedImage(); 426 deliverEncodedImage();
426 } 427 }
427 releaseCodecOnOutputThread(); 428 releaseCodecOnOutputThread();
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
550 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 551 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
551 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: 552 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
552 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: 553 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
553 return NV12; 554 return NV12;
554 default: 555 default:
555 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format); 556 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format);
556 } 557 }
557 } 558 }
558 } 559 }
559 } 560 }
OLDNEW
« no previous file with comments | « webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java ('k') | webrtc/sdk/android/src/jni/videodecoderwrapper.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698