OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 package org.webrtc; | 11 package org.webrtc; |
12 | 12 |
13 import android.annotation.TargetApi; | 13 import android.annotation.TargetApi; |
14 import android.graphics.Matrix; | |
14 import android.media.MediaCodec; | 15 import android.media.MediaCodec; |
15 import android.media.MediaCodecInfo; | 16 import android.media.MediaCodecInfo; |
16 import android.media.MediaFormat; | 17 import android.media.MediaFormat; |
18 import android.opengl.GLES20; | |
17 import android.os.Bundle; | 19 import android.os.Bundle; |
20 import android.view.Surface; | |
18 import java.io.IOException; | 21 import java.io.IOException; |
19 import java.nio.ByteBuffer; | 22 import java.nio.ByteBuffer; |
20 import java.util.Arrays; | 23 import java.util.Arrays; |
21 import java.util.Deque; | 24 import java.util.Deque; |
22 import java.util.HashSet; | 25 import java.util.HashSet; |
23 import java.util.Set; | 26 import java.util.Set; |
24 import java.util.concurrent.LinkedBlockingDeque; | 27 import java.util.concurrent.LinkedBlockingDeque; |
25 | 28 |
26 /** Android hardware video encoder. */ | 29 /** Android hardware video encoder. */ |
27 @TargetApi(19) | 30 @TargetApi(19) |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
66 // Thread that delivers encoded frames to the user callback. | 69 // Thread that delivers encoded frames to the user callback. |
67 private Thread outputThread; | 70 private Thread outputThread; |
68 | 71 |
69 // Whether the encoder is running. Volatile so that the output thread can wat ch this value and | 72 // Whether the encoder is running. Volatile so that the output thread can wat ch this value and |
70 // exit when the encoder stops. | 73 // exit when the encoder stops. |
71 private volatile boolean running = false; | 74 private volatile boolean running = false; |
72 // Any exception thrown during shutdown. The output thread releases the Media Codec and uses this | 75 // Any exception thrown during shutdown. The output thread releases the Media Codec and uses this |
73 // value to send exceptions thrown during release back to the encoder thread. | 76 // value to send exceptions thrown during release back to the encoder thread. |
74 private volatile Exception shutdownException = null; | 77 private volatile Exception shutdownException = null; |
75 | 78 |
79 // Surface objects for texture-mode encoding. | |
pthatcher1
2017/07/17 22:50:19
Would it make sense to write a comment explaining
mellem
2017/07/17 23:22:00
Done.
| |
80 private EglBase14.Context sharedContext; | |
81 private EglBase14 eglBase; | |
82 private Surface inputSurface; | |
83 private GlRectDrawer drawer; | |
pthatcher1
2017/07/17 22:50:19
It would be nice if these had a common name that m
mellem
2017/07/17 23:22:01
Done.
| |
84 | |
76 private MediaCodec codec; | 85 private MediaCodec codec; |
77 private Callback callback; | 86 private Callback callback; |
78 | 87 |
79 private int width; | 88 private int width; |
80 private int height; | 89 private int height; |
81 | 90 |
82 // Contents of the last observed config frame output by the MediaCodec. Used b y H.264. | 91 // Contents of the last observed config frame output by the MediaCodec. Used b y H.264. |
83 private ByteBuffer configBuffer = null; | 92 private ByteBuffer configBuffer = null; |
84 | 93 |
85 /** | 94 /** |
86 * Creates a new HardwareVideoEncoder with the given codecName, codecType, col orFormat, key frame | 95 * Creates a new HardwareVideoEncoder with the given codecName, codecType, col orFormat, key frame |
87 * intervals, and bitrateAdjuster. | 96 * intervals, and bitrateAdjuster. |
88 * | 97 * |
89 * @param codecName the hardware codec implementation to use | 98 * @param codecName the hardware codec implementation to use |
90 * @param codecType the type of the given video codec (eg. VP8, VP9, or H264) | 99 * @param codecType the type of the given video codec (eg. VP8, VP9, or H264) |
91 * @param colorFormat color format used by the input buffer | 100 * @param colorFormat color format used by the input buffer |
92 * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec | 101 * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec |
93 * @param forceKeyFrameIntervalMs interval at which to force a key frame if on e is not requested; | 102 * @param forceKeyFrameIntervalMs interval at which to force a key frame if on e is not requested; |
94 * used to reduce distortion caused by some codec implementations | 103 * used to reduce distortion caused by some codec implementations |
95 * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the | 104 * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the |
96 * desired bitrates | 105 * desired bitrates |
97 * @throws IllegalArgumentException if colorFormat is unsupported | 106 * @throws IllegalArgumentException if colorFormat is unsupported |
98 */ | 107 */ |
99 public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int co lorFormat, | 108 public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int co lorFormat, |
100 int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitr ateAdjuster) { | 109 int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitr ateAdjuster, |
110 EglBase14.Context sharedContext) { | |
101 this.codecName = codecName; | 111 this.codecName = codecName; |
102 this.codecType = codecType; | 112 this.codecType = codecType; |
103 this.colorFormat = colorFormat; | 113 this.colorFormat = colorFormat; |
104 this.inputColorFormat = ColorFormat.valueOf(colorFormat); | 114 if (sharedContext == null) { |
115 this.inputColorFormat = ColorFormat.valueOf(colorFormat); | |
116 } else { | |
117 this.inputColorFormat = null; | |
pthatcher1
2017/07/17 22:50:19
Can you write a comment explaining why the inputCo
mellem
2017/07/17 23:22:00
Done.
| |
118 } | |
105 this.keyFrameIntervalSec = keyFrameIntervalSec; | 119 this.keyFrameIntervalSec = keyFrameIntervalSec; |
106 this.forcedKeyFrameMs = forceKeyFrameIntervalMs; | 120 this.forcedKeyFrameMs = forceKeyFrameIntervalMs; |
107 this.bitrateAdjuster = bitrateAdjuster; | 121 this.bitrateAdjuster = bitrateAdjuster; |
108 this.outputBuilders = new LinkedBlockingDeque<>(); | 122 this.outputBuilders = new LinkedBlockingDeque<>(); |
123 this.sharedContext = sharedContext; | |
109 } | 124 } |
110 | 125 |
111 @Override | 126 @Override |
112 public VideoCodecStatus initEncode(Settings settings, Callback callback) { | 127 public VideoCodecStatus initEncode(Settings settings, Callback callback) { |
113 return initEncodeInternal( | 128 return initEncodeInternal( |
114 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback); | 129 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback); |
115 } | 130 } |
116 | 131 |
117 private VideoCodecStatus initEncodeInternal( | 132 private VideoCodecStatus initEncodeInternal( |
118 int width, int height, int bitrateKbps, int fps, Callback callback) { | 133 int width, int height, int bitrateKbps, int fps, Callback callback) { |
(...skipping 18 matching lines...) Expand all Loading... | |
137 } | 152 } |
138 try { | 153 try { |
139 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); | 154 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); |
140 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); | 155 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); |
141 format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant); | 156 format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant); |
142 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); | 157 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); |
143 format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedF ramerate()); | 158 format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedF ramerate()); |
144 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); | 159 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); |
145 Logging.d(TAG, "Format: " + format); | 160 Logging.d(TAG, "Format: " + format); |
146 codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); | 161 codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); |
162 | |
163 if (sharedContext != null) { | |
164 // Surface mode. | |
pthatcher1
2017/07/17 22:50:19
Above it was called "texture mode", not it says "s
mellem
2017/07/17 23:22:00
Done.
| |
165 eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE); | |
166 inputSurface = codec.createInputSurface(); | |
167 eglBase.createSurface(inputSurface); | |
168 drawer = new GlRectDrawer(); | |
169 } | |
170 | |
147 codec.start(); | 171 codec.start(); |
148 } catch (IllegalStateException e) { | 172 } catch (IllegalStateException e) { |
149 Logging.e(TAG, "initEncode failed", e); | 173 Logging.e(TAG, "initEncode failed", e); |
150 release(); | 174 release(); |
151 return VideoCodecStatus.ERROR; | 175 return VideoCodecStatus.ERROR; |
152 } | 176 } |
153 | 177 |
154 running = true; | 178 running = true; |
155 outputThread = createOutputThread(); | 179 outputThread = createOutputThread(); |
156 outputThread.start(); | 180 outputThread.start(); |
157 | 181 |
158 return VideoCodecStatus.OK; | 182 return VideoCodecStatus.OK; |
159 } | 183 } |
160 | 184 |
161 @Override | 185 @Override |
162 public VideoCodecStatus release() { | 186 public VideoCodecStatus release() { |
163 try { | 187 try { |
188 if (outputThread == null) { | |
189 return VideoCodecStatus.OK; | |
190 } | |
pthatcher1
2017/07/17 22:50:19
Is this only true when we're in texture mode? If
mellem
2017/07/17 23:22:00
No, this is just a fix for a bug that I whacked.
| |
164 // The outputThread actually stops and releases the codec once running is false. | 191 // The outputThread actually stops and releases the codec once running is false. |
165 running = false; | 192 running = false; |
166 if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIM EOUT_MS)) { | 193 if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIM EOUT_MS)) { |
167 Logging.e(TAG, "Media encoder release timeout"); | 194 Logging.e(TAG, "Media encoder release timeout"); |
168 return VideoCodecStatus.TIMEOUT; | 195 return VideoCodecStatus.TIMEOUT; |
169 } | 196 } |
170 if (shutdownException != null) { | 197 if (shutdownException != null) { |
171 // Log the exception and turn it into an error. | 198 // Log the exception and turn it into an error. |
172 Logging.e(TAG, "Media encoder release exception", shutdownException); | 199 Logging.e(TAG, "Media encoder release exception", shutdownException); |
173 return VideoCodecStatus.ERROR; | 200 return VideoCodecStatus.ERROR; |
174 } | 201 } |
175 } finally { | 202 } finally { |
176 codec = null; | 203 codec = null; |
177 outputThread = null; | 204 outputThread = null; |
178 outputBuilders.clear(); | 205 outputBuilders.clear(); |
206 | |
207 if (drawer != null) { | |
208 drawer.release(); | |
209 drawer = null; | |
210 } | |
211 if (eglBase != null) { | |
212 eglBase.release(); | |
213 eglBase = null; | |
214 } | |
215 if (inputSurface != null) { | |
216 inputSurface.release(); | |
217 inputSurface = null; | |
218 } | |
pthatcher1
2017/07/17 22:50:19
Would it make sense to put the 4 texture things in
mellem
2017/07/17 23:22:01
It's possible to fail init partway through allocat
| |
179 } | 219 } |
180 return VideoCodecStatus.OK; | 220 return VideoCodecStatus.OK; |
181 } | 221 } |
182 | 222 |
183 @Override | 223 @Override |
184 public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { | 224 public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { |
185 if (codec == null) { | 225 if (codec == null) { |
186 return VideoCodecStatus.UNINITIALIZED; | 226 return VideoCodecStatus.UNINITIALIZED; |
187 } | 227 } |
188 | 228 |
189 // If input resolution changed, restart the codec with the new resolution. | 229 // If input resolution changed, restart the codec with the new resolution. |
190 int frameWidth = videoFrame.getWidth(); | 230 int frameWidth = videoFrame.getWidth(); |
191 int frameHeight = videoFrame.getHeight(); | 231 int frameHeight = videoFrame.getHeight(); |
192 if (frameWidth != width || frameHeight != height) { | 232 if (frameWidth != width || frameHeight != height) { |
193 VideoCodecStatus status = resetCodec(frameWidth, frameHeight); | 233 VideoCodecStatus status = resetCodec(frameWidth, frameHeight); |
194 if (status != VideoCodecStatus.OK) { | 234 if (status != VideoCodecStatus.OK) { |
195 return status; | 235 return status; |
196 } | 236 } |
197 } | 237 } |
198 | 238 |
199 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. | |
200 int index; | |
201 try { | |
202 index = codec.dequeueInputBuffer(0 /* timeout */); | |
203 } catch (IllegalStateException e) { | |
204 Logging.e(TAG, "dequeueInputBuffer failed", e); | |
205 return VideoCodecStatus.FALLBACK_SOFTWARE; | |
206 } | |
207 | |
208 if (index == -1) { | |
209 // Encoder is falling behind. No input buffers available. Drop the frame . | |
210 Logging.e(TAG, "Dropped frame, no input buffers available"); | |
211 return VideoCodecStatus.OK; // See webrtc bug 2887. | |
212 } | |
213 if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { | 239 if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { |
214 // Too many frames in the encoder. Drop this frame. | 240 // Too many frames in the encoder. Drop this frame. |
215 Logging.e(TAG, "Dropped frame, encoder queue full"); | 241 Logging.e(TAG, "Dropped frame, encoder queue full"); |
216 return VideoCodecStatus.OK; // See webrtc bug 2887. | 242 return VideoCodecStatus.OK; // See webrtc bug 2887. |
217 } | 243 } |
218 | 244 |
219 // TODO(mellem): Add support for input surfaces and textures. | |
220 ByteBuffer buffer; | |
221 try { | |
222 buffer = codec.getInputBuffers()[index]; | |
223 } catch (IllegalStateException e) { | |
224 Logging.e(TAG, "getInputBuffers failed", e); | |
225 return VideoCodecStatus.FALLBACK_SOFTWARE; | |
226 } | |
227 VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420(); | |
228 inputColorFormat.fillBufferFromI420(buffer, i420); | |
229 | |
230 boolean requestedKeyFrame = false; | 245 boolean requestedKeyFrame = false; |
231 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { | 246 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { |
232 if (frameType == EncodedImage.FrameType.VideoFrameKey) { | 247 if (frameType == EncodedImage.FrameType.VideoFrameKey) { |
233 requestedKeyFrame = true; | 248 requestedKeyFrame = true; |
234 } | 249 } |
235 } | 250 } |
236 | 251 |
237 // Frame timestamp rounded to the nearest microsecond and millisecond. | 252 // Frame timestamp rounded to the nearest microsecond and millisecond. |
238 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; | 253 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; |
239 long presentationTimestampMs = (presentationTimestampUs + 500) / 1000; | 254 long presentationTimestampMs = (presentationTimestampUs + 500) / 1000; |
240 if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) { | 255 if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) { |
241 requestKeyFrame(presentationTimestampMs); | 256 requestKeyFrame(presentationTimestampMs); |
242 } | 257 } |
243 | 258 |
259 VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); | |
244 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are | 260 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are |
245 // subsampled at one byte per four pixels. | 261 // subsampled at one byte per four pixels. |
246 int bufferSize = videoFrame.getBuffer().getHeight() * videoFrame.getBuffer() .getWidth() * 3 / 2; | 262 int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; |
247 EncodedImage.Builder builder = EncodedImage.builder() | 263 EncodedImage.Builder builder = EncodedImage.builder() |
248 .setCaptureTimeMs(presentationTimestampMs ) | 264 .setCaptureTimeMs(presentationTimestampMs ) |
249 .setCompleteFrame(true) | 265 .setCompleteFrame(true) |
250 .setEncodedWidth(videoFrame.getWidth()) | 266 .setEncodedWidth(videoFrame.getWidth()) |
251 .setEncodedHeight(videoFrame.getHeight()) | 267 .setEncodedHeight(videoFrame.getHeight()) |
252 .setRotation(videoFrame.getRotation()); | 268 .setRotation(videoFrame.getRotation()); |
253 outputBuilders.offer(builder); | 269 outputBuilders.offer(builder); |
254 try { | 270 |
255 codec.queueInputBuffer( | 271 if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) { |
sakal
2017/07/17 12:25:41
nit: I would prefer these cases in separate method
mellem
2017/07/17 17:49:29
Done.
| |
256 index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */); | 272 VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoF rameBuffer; |
257 } catch (IllegalStateException e) { | 273 |
258 Logging.e(TAG, "queueInputBuffer failed", e); | 274 // TODO(mellem): Put this matrix manipulation in a helper. |
sakal
2017/07/17 12:25:41
I have implemented such helper here https://chromi
mellem
2017/07/17 17:49:30
Done. Thanks for writing that.
| |
259 // Keep the output builders in sync with buffers in the codec. | 275 // The android.graphics.Matrix looks like this: |
260 outputBuilders.pollLast(); | 276 // [x1 y1 w1] |
261 // IllegalStateException thrown when the codec is in the wrong state. | 277 // [x2 y2 w2] |
262 return VideoCodecStatus.FALLBACK_SOFTWARE; | 278 // [x3 y3 w3] |
279 // We want to contruct a matrix that looks like this: | |
280 // [x1 y1 0 w1] | |
281 // [x2 y2 0 w2] | |
282 // [ 0 0 1 0] | |
283 // [x3 y3 0 w3] | |
284 Matrix matrix = videoFrame.getTransformMatrix(); | |
285 float[] matrix3x3 = new float[9]; | |
286 matrix.getValues(matrix3x3); | |
287 | |
288 float[] transformationMatrix = new float[16]; | |
289 transformationMatrix[0 * 4 + 0] = matrix3x3[0 * 3 + 0]; | |
290 transformationMatrix[0 * 4 + 1] = matrix3x3[0 * 3 + 1]; | |
291 transformationMatrix[0 * 4 + 3] = matrix3x3[0 * 3 + 2]; | |
292 transformationMatrix[1 * 4 + 0] = matrix3x3[1 * 3 + 0]; | |
293 transformationMatrix[1 * 4 + 1] = matrix3x3[1 * 3 + 1]; | |
294 transformationMatrix[1 * 4 + 3] = matrix3x3[1 * 3 + 2]; | |
295 transformationMatrix[2 * 4 + 2] = 1; // Z-scale should be 1. | |
296 transformationMatrix[3 * 4 + 0] = matrix3x3[2 * 3 + 0]; | |
297 transformationMatrix[3 * 4 + 1] = matrix3x3[2 * 3 + 1]; | |
298 transformationMatrix[3 * 4 + 3] = matrix3x3[2 * 3 + 2]; | |
299 | |
300 try { | |
301 eglBase.makeCurrent(); | |
302 // TODO(perkj): glClear() shouldn't be necessary since every pixel is co vered anyway, | |
303 // but it's a workaround for bug webrtc:5147. | |
304 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); | |
305 drawer.drawOes( | |
sakal
2017/07/17 12:25:42
nit: TextureFrame might be RGB frame in the future
mellem
2017/07/17 17:49:29
Done.
| |
306 textureBuffer.getTextureId(), transformationMatrix, width, height, 0 , 0, width, height); | |
307 eglBase.swapBuffers(videoFrame.getTimestampNs()); | |
308 } catch (RuntimeException e) { | |
309 Logging.e(TAG, "encodeTexture failed", e); | |
310 // Keep the output builders in sync with buffers in the codec. | |
311 outputBuilders.pollLast(); | |
312 return VideoCodecStatus.ERROR; | |
313 } | |
314 return VideoCodecStatus.OK; | |
315 } else { | |
316 // No timeout. Don't block for an input buffer, drop frames if the encode r falls behind. | |
sakal
2017/07/17 12:25:42
Can we at least log a clear error if the mode does
mellem
2017/07/17 17:49:30
Logging added. We can actually process a texture
sakal
2017/07/18 08:46:13
We reconfigure the encoder before we hit that chec
mellem
2017/07/18 17:11:57
Acknowledged.
| |
317 int index; | |
318 try { | |
319 index = codec.dequeueInputBuffer(0 /* timeout */); | |
320 } catch (IllegalStateException e) { | |
321 Logging.e(TAG, "dequeueInputBuffer failed", e); | |
322 return VideoCodecStatus.FALLBACK_SOFTWARE; | |
sakal
2017/07/17 12:25:42
Can you just replace all FALLBACK_SOTWAREs with ER
mellem
2017/07/17 17:49:29
Done.
| |
323 } | |
324 | |
325 if (index == -1) { | |
326 // Encoder is falling behind. No input buffers available. Drop the fra me. | |
327 Logging.e(TAG, "Dropped frame, no input buffers available"); | |
328 return VideoCodecStatus.OK; // See webrtc bug 2887. | |
329 } | |
330 | |
331 ByteBuffer buffer; | |
332 try { | |
333 buffer = codec.getInputBuffers()[index]; | |
334 } catch (IllegalStateException e) { | |
335 Logging.e(TAG, "getInputBuffers failed", e); | |
336 return VideoCodecStatus.FALLBACK_SOFTWARE; | |
337 } | |
338 VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420(); | |
sakal
2017/07/17 12:25:41
toI420 will return "a new instance". Therefore, we
mellem
2017/07/17 17:49:29
Done.
| |
339 inputColorFormat.fillBufferFromI420(buffer, i420); | |
340 | |
341 try { | |
342 codec.queueInputBuffer( | |
343 index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* fla gs */); | |
344 } catch (IllegalStateException e) { | |
345 Logging.e(TAG, "queueInputBuffer failed", e); | |
346 // Keep the output builders in sync with buffers in the codec. | |
347 outputBuilders.pollLast(); | |
348 // IllegalStateException thrown when the codec is in the wrong state. | |
349 return VideoCodecStatus.FALLBACK_SOFTWARE; | |
350 } | |
351 return VideoCodecStatus.OK; | |
263 } | 352 } |
264 return VideoCodecStatus.OK; | |
265 } | 353 } |
266 | 354 |
267 @Override | 355 @Override |
268 public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripT imeMs) { | 356 public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripT imeMs) { |
269 // No op. | 357 // No op. |
270 return VideoCodecStatus.OK; | 358 return VideoCodecStatus.OK; |
271 } | 359 } |
272 | 360 |
273 @Override | 361 @Override |
274 public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { | 362 public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
452 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: | 540 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: |
453 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: | 541 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: |
454 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: | 542 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: |
455 return NV12; | 543 return NV12; |
456 default: | 544 default: |
457 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format); | 545 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format); |
458 } | 546 } |
459 } | 547 } |
460 } | 548 } |
461 } | 549 } |
OLD | NEW |