Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(93)

Side by Side Diff: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java

Issue 2977153003: Add texture support to HardwareVideoEncoder. (Closed)
Patch Set: Fix logging and matrix helper Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 package org.webrtc; 11 package org.webrtc;
12 12
13 import android.annotation.TargetApi; 13 import android.annotation.TargetApi;
14 import android.graphics.Matrix;
14 import android.media.MediaCodec; 15 import android.media.MediaCodec;
15 import android.media.MediaCodecInfo; 16 import android.media.MediaCodecInfo;
16 import android.media.MediaFormat; 17 import android.media.MediaFormat;
18 import android.opengl.GLES20;
17 import android.os.Bundle; 19 import android.os.Bundle;
20 import android.view.Surface;
18 import java.io.IOException; 21 import java.io.IOException;
19 import java.nio.ByteBuffer; 22 import java.nio.ByteBuffer;
20 import java.util.Arrays; 23 import java.util.Arrays;
21 import java.util.Deque; 24 import java.util.Deque;
22 import java.util.HashSet; 25 import java.util.HashSet;
23 import java.util.Set; 26 import java.util.Set;
24 import java.util.concurrent.LinkedBlockingDeque; 27 import java.util.concurrent.LinkedBlockingDeque;
25 28
26 /** Android hardware video encoder. */ 29 /** Android hardware video encoder. */
27 @TargetApi(19) 30 @TargetApi(19)
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
66 // Thread that delivers encoded frames to the user callback. 69 // Thread that delivers encoded frames to the user callback.
67 private Thread outputThread; 70 private Thread outputThread;
68 71
69 // Whether the encoder is running. Volatile so that the output thread can wat ch this value and 72 // Whether the encoder is running. Volatile so that the output thread can wat ch this value and
70 // exit when the encoder stops. 73 // exit when the encoder stops.
71 private volatile boolean running = false; 74 private volatile boolean running = false;
72 // Any exception thrown during shutdown. The output thread releases the Media Codec and uses this 75 // Any exception thrown during shutdown. The output thread releases the Media Codec and uses this
73 // value to send exceptions thrown during release back to the encoder thread. 76 // value to send exceptions thrown during release back to the encoder thread.
74 private volatile Exception shutdownException = null; 77 private volatile Exception shutdownException = null;
75 78
79 // Surface objects for texture-mode encoding.
80
81 // EGL context shared with the application. Used to access texture inputs.
82 private EglBase14.Context textureContext;
83 // EGL base wrapping the shared texture context. Holds hooks to both the shar ed context and the
84 // input surface. Making this base current allows textures from the context t o be drawn onto the
85 // surface.
86 private EglBase14 textureEglBase;
87 // Input surface for the codec. The encoder will draw input textures onto thi s surface.
88 private Surface textureInputSurface;
89 // Drawer used to draw input textures onto the codec's input surface.
90 private GlRectDrawer textureDrawer;
91
76 private MediaCodec codec; 92 private MediaCodec codec;
77 private Callback callback; 93 private Callback callback;
78 94
79 private int width; 95 private int width;
80 private int height; 96 private int height;
81 97
82 // Contents of the last observed config frame output by the MediaCodec. Used b y H.264. 98 // Contents of the last observed config frame output by the MediaCodec. Used b y H.264.
83 private ByteBuffer configBuffer = null; 99 private ByteBuffer configBuffer = null;
84 100
85 /** 101 /**
86 * Creates a new HardwareVideoEncoder with the given codecName, codecType, col orFormat, key frame 102 * Creates a new HardwareVideoEncoder with the given codecName, codecType, col orFormat, key frame
87 * intervals, and bitrateAdjuster. 103 * intervals, and bitrateAdjuster.
88 * 104 *
89 * @param codecName the hardware codec implementation to use 105 * @param codecName the hardware codec implementation to use
90 * @param codecType the type of the given video codec (eg. VP8, VP9, or H264) 106 * @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
91 * @param colorFormat color format used by the input buffer 107 * @param colorFormat color format used by the input buffer
92 * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec 108 * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
93 * @param forceKeyFrameIntervalMs interval at which to force a key frame if on e is not requested; 109 * @param forceKeyFrameIntervalMs interval at which to force a key frame if on e is not requested;
94 * used to reduce distortion caused by some codec implementations 110 * used to reduce distortion caused by some codec implementations
95 * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the 111 * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
96 * desired bitrates 112 * desired bitrates
97 * @throws IllegalArgumentException if colorFormat is unsupported 113 * @throws IllegalArgumentException if colorFormat is unsupported
98 */ 114 */
99 public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int co lorFormat, 115 public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int co lorFormat,
100 int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitr ateAdjuster) { 116 int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitr ateAdjuster,
117 EglBase14.Context textureContext) {
101 this.codecName = codecName; 118 this.codecName = codecName;
102 this.codecType = codecType; 119 this.codecType = codecType;
103 this.colorFormat = colorFormat; 120 this.colorFormat = colorFormat;
104 this.inputColorFormat = ColorFormat.valueOf(colorFormat); 121 if (textureContext == null) {
122 this.inputColorFormat = ColorFormat.valueOf(colorFormat);
123 } else {
124 // ColorFormat copies bytes between buffers. It is not used in texture mo de.
125 this.inputColorFormat = null;
126 }
105 this.keyFrameIntervalSec = keyFrameIntervalSec; 127 this.keyFrameIntervalSec = keyFrameIntervalSec;
106 this.forcedKeyFrameMs = forceKeyFrameIntervalMs; 128 this.forcedKeyFrameMs = forceKeyFrameIntervalMs;
107 this.bitrateAdjuster = bitrateAdjuster; 129 this.bitrateAdjuster = bitrateAdjuster;
108 this.outputBuilders = new LinkedBlockingDeque<>(); 130 this.outputBuilders = new LinkedBlockingDeque<>();
131 this.textureContext = textureContext;
109 } 132 }
110 133
111 @Override 134 @Override
112 public VideoCodecStatus initEncode(Settings settings, Callback callback) { 135 public VideoCodecStatus initEncode(Settings settings, Callback callback) {
113 return initEncodeInternal( 136 return initEncodeInternal(
114 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback); 137 settings.width, settings.height, settings.startBitrate, settings.maxFram erate, callback);
115 } 138 }
116 139
117 private VideoCodecStatus initEncodeInternal( 140 private VideoCodecStatus initEncodeInternal(
118 int width, int height, int bitrateKbps, int fps, Callback callback) { 141 int width, int height, int bitrateKbps, int fps, Callback callback) {
(...skipping 18 matching lines...) Expand all
137 } 160 }
138 try { 161 try {
139 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); 162 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height);
140 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); 163 format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
141 format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant); 164 format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
142 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 165 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
143 format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedF ramerate()); 166 format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedF ramerate());
144 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); 167 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
145 Logging.d(TAG, "Format: " + format); 168 Logging.d(TAG, "Format: " + format);
146 codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 169 codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
170
171 if (textureContext != null) {
172 // Texture mode.
173 textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE );
174 textureInputSurface = codec.createInputSurface();
175 textureEglBase.createSurface(textureInputSurface);
176 textureDrawer = new GlRectDrawer();
177 }
178
147 codec.start(); 179 codec.start();
148 } catch (IllegalStateException e) { 180 } catch (IllegalStateException e) {
149 Logging.e(TAG, "initEncode failed", e); 181 Logging.e(TAG, "initEncode failed", e);
150 release(); 182 release();
151 return VideoCodecStatus.ERROR; 183 return VideoCodecStatus.ERROR;
152 } 184 }
153 185
154 running = true; 186 running = true;
155 outputThread = createOutputThread(); 187 outputThread = createOutputThread();
156 outputThread.start(); 188 outputThread.start();
157 189
158 return VideoCodecStatus.OK; 190 return VideoCodecStatus.OK;
159 } 191 }
160 192
161 @Override 193 @Override
162 public VideoCodecStatus release() { 194 public VideoCodecStatus release() {
163 try { 195 try {
196 if (outputThread == null) {
197 return VideoCodecStatus.OK;
198 }
164 // The outputThread actually stops and releases the codec once running is false. 199 // The outputThread actually stops and releases the codec once running is false.
165 running = false; 200 running = false;
166 if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIM EOUT_MS)) { 201 if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIM EOUT_MS)) {
167 Logging.e(TAG, "Media encoder release timeout"); 202 Logging.e(TAG, "Media encoder release timeout");
168 return VideoCodecStatus.TIMEOUT; 203 return VideoCodecStatus.TIMEOUT;
169 } 204 }
170 if (shutdownException != null) { 205 if (shutdownException != null) {
171 // Log the exception and turn it into an error. 206 // Log the exception and turn it into an error.
172 Logging.e(TAG, "Media encoder release exception", shutdownException); 207 Logging.e(TAG, "Media encoder release exception", shutdownException);
173 return VideoCodecStatus.ERROR; 208 return VideoCodecStatus.ERROR;
174 } 209 }
175 } finally { 210 } finally {
176 codec = null; 211 codec = null;
177 outputThread = null; 212 outputThread = null;
178 outputBuilders.clear(); 213 outputBuilders.clear();
214
215 if (textureDrawer != null) {
216 textureDrawer.release();
217 textureDrawer = null;
218 }
219 if (textureEglBase != null) {
220 textureEglBase.release();
221 textureEglBase = null;
222 }
223 if (textureInputSurface != null) {
224 textureInputSurface.release();
225 textureInputSurface = null;
226 }
179 } 227 }
180 return VideoCodecStatus.OK; 228 return VideoCodecStatus.OK;
181 } 229 }
182 230
183 @Override 231 @Override
184 public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { 232 public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
185 if (codec == null) { 233 if (codec == null) {
186 return VideoCodecStatus.UNINITIALIZED; 234 return VideoCodecStatus.UNINITIALIZED;
187 } 235 }
188 236
189 // If input resolution changed, restart the codec with the new resolution. 237 // If input resolution changed, restart the codec with the new resolution.
190 int frameWidth = videoFrame.getWidth(); 238 int frameWidth = videoFrame.getWidth();
191 int frameHeight = videoFrame.getHeight(); 239 int frameHeight = videoFrame.getHeight();
192 if (frameWidth != width || frameHeight != height) { 240 if (frameWidth != width || frameHeight != height) {
193 VideoCodecStatus status = resetCodec(frameWidth, frameHeight); 241 VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
194 if (status != VideoCodecStatus.OK) { 242 if (status != VideoCodecStatus.OK) {
195 return status; 243 return status;
196 } 244 }
197 } 245 }
198 246
199 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
200 int index;
201 try {
202 index = codec.dequeueInputBuffer(0 /* timeout */);
203 } catch (IllegalStateException e) {
204 Logging.e(TAG, "dequeueInputBuffer failed", e);
205 return VideoCodecStatus.FALLBACK_SOFTWARE;
206 }
207
208 if (index == -1) {
209 // Encoder is falling behind. No input buffers available. Drop the frame .
210 Logging.e(TAG, "Dropped frame, no input buffers available");
211 return VideoCodecStatus.OK; // See webrtc bug 2887.
212 }
213 if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { 247 if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
214 // Too many frames in the encoder. Drop this frame. 248 // Too many frames in the encoder. Drop this frame.
215 Logging.e(TAG, "Dropped frame, encoder queue full"); 249 Logging.e(TAG, "Dropped frame, encoder queue full");
216 return VideoCodecStatus.OK; // See webrtc bug 2887. 250 return VideoCodecStatus.OK; // See webrtc bug 2887.
217 } 251 }
218 252
219 // TODO(mellem): Add support for input surfaces and textures.
220 ByteBuffer buffer;
221 try {
222 buffer = codec.getInputBuffers()[index];
223 } catch (IllegalStateException e) {
224 Logging.e(TAG, "getInputBuffers failed", e);
225 return VideoCodecStatus.FALLBACK_SOFTWARE;
226 }
227 VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
228 inputColorFormat.fillBufferFromI420(buffer, i420);
229
230 boolean requestedKeyFrame = false; 253 boolean requestedKeyFrame = false;
231 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) { 254 for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
232 if (frameType == EncodedImage.FrameType.VideoFrameKey) { 255 if (frameType == EncodedImage.FrameType.VideoFrameKey) {
233 requestedKeyFrame = true; 256 requestedKeyFrame = true;
234 } 257 }
235 } 258 }
236 259
237 // Frame timestamp rounded to the nearest microsecond and millisecond. 260 // Frame timestamp rounded to the nearest microsecond and millisecond.
238 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; 261 long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
239 long presentationTimestampMs = (presentationTimestampUs + 500) / 1000; 262 long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
240 if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) { 263 if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) {
241 requestKeyFrame(presentationTimestampMs); 264 requestKeyFrame(presentationTimestampMs);
242 } 265 }
243 266
267 VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
244 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are 268 // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
245 // subsampled at one byte per four pixels. 269 // subsampled at one byte per four pixels.
246 int bufferSize = videoFrame.getBuffer().getHeight() * videoFrame.getBuffer() .getWidth() * 3 / 2; 270 int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
247 EncodedImage.Builder builder = EncodedImage.builder() 271 EncodedImage.Builder builder = EncodedImage.builder()
248 .setCaptureTimeMs(presentationTimestampMs ) 272 .setCaptureTimeMs(presentationTimestampMs )
249 .setCompleteFrame(true) 273 .setCompleteFrame(true)
250 .setEncodedWidth(videoFrame.getWidth()) 274 .setEncodedWidth(videoFrame.getWidth())
251 .setEncodedHeight(videoFrame.getHeight()) 275 .setEncodedHeight(videoFrame.getHeight())
252 .setRotation(videoFrame.getRotation()); 276 .setRotation(videoFrame.getRotation());
253 outputBuilders.offer(builder); 277 outputBuilders.offer(builder);
278
279 if (textureContext != null) {
280 if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
281 Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
282 return VideoCodecStatus.ERROR;
283 }
284 VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoF rameBuffer;
285 return encodeTextureBuffer(videoFrame, textureBuffer);
286 } else {
287 if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
288 Logging.w(TAG, "Encoding texture buffer in byte mode; this may be ineffi cient");
289 }
290 return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize, presenta tionTimestampUs);
291 }
292 }
293
294 private VideoCodecStatus encodeTextureBuffer(
295 VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
296 Matrix matrix = videoFrame.getTransformMatrix();
297 float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphi csMatrix(matrix);
298
299 try {
300 textureEglBase.makeCurrent();
301 // TODO(perkj): glClear() shouldn't be necessary since every pixel is cove red anyway,
302 // but it's a workaround for bug webrtc:5147.
303 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
304 switch (textureBuffer.getType()) {
305 case OES:
306 textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatr ix, width, height,
307 0, 0, width, height);
308 break;
309 case RGB:
310 textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatr ix, width, height,
311 0, 0, width, height);
312 break;
313 }
314 textureEglBase.swapBuffers(videoFrame.getTimestampNs());
315 } catch (RuntimeException e) {
316 Logging.e(TAG, "encodeTexture failed", e);
317 // Keep the output builders in sync with buffers in the codec.
318 outputBuilders.pollLast();
319 return VideoCodecStatus.ERROR;
320 }
321 return VideoCodecStatus.OK;
322 }
323
324 private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame,
325 VideoFrame.Buffer videoFrameBuffer, int bufferSize, long presentationTimes tampUs) {
326 // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
327 int index;
328 try {
329 index = codec.dequeueInputBuffer(0 /* timeout */);
330 } catch (IllegalStateException e) {
331 Logging.e(TAG, "dequeueInputBuffer failed", e);
332 return VideoCodecStatus.FALLBACK_SOFTWARE;
333 }
334
335 if (index == -1) {
336 // Encoder is falling behind. No input buffers available. Drop the frame .
337 Logging.e(TAG, "Dropped frame, no input buffers available");
338 return VideoCodecStatus.OK; // See webrtc bug 2887.
339 }
340
341 ByteBuffer buffer;
342 try {
343 buffer = codec.getInputBuffers()[index];
344 } catch (IllegalStateException e) {
345 Logging.e(TAG, "getInputBuffers failed", e);
346 return VideoCodecStatus.ERROR;
347 }
348 VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420();
349 inputColorFormat.fillBufferFromI420(buffer, i420);
350 i420.release();
351
254 try { 352 try {
255 codec.queueInputBuffer( 353 codec.queueInputBuffer(
256 index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */); 354 index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
257 } catch (IllegalStateException e) { 355 } catch (IllegalStateException e) {
258 Logging.e(TAG, "queueInputBuffer failed", e); 356 Logging.e(TAG, "queueInputBuffer failed", e);
259 // Keep the output builders in sync with buffers in the codec. 357 // Keep the output builders in sync with buffers in the codec.
260 outputBuilders.pollLast(); 358 outputBuilders.pollLast();
261 // IllegalStateException thrown when the codec is in the wrong state. 359 // IllegalStateException thrown when the codec is in the wrong state.
262 return VideoCodecStatus.FALLBACK_SOFTWARE; 360 return VideoCodecStatus.ERROR;
263 } 361 }
264 return VideoCodecStatus.OK; 362 return VideoCodecStatus.OK;
265 } 363 }
266 364
267 @Override 365 @Override
268 public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripT imeMs) { 366 public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripT imeMs) {
269 // No op. 367 // No op.
270 return VideoCodecStatus.OK; 368 return VideoCodecStatus.OK;
271 } 369 }
272 370
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
452 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 550 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
453 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar: 551 case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
454 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m: 552 case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
455 return NV12; 553 return NV12;
456 default: 554 default:
457 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format); 555 throw new IllegalArgumentException("Unsupported colorFormat: " + color Format);
458 } 556 }
459 } 557 }
460 } 558 }
461 } 559 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698