Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(179)

Side by Side Diff: webrtc/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java

Issue 2977643002: Add texture support to HardwareVideoDecoder. (Closed)
Patch Set: Remove unused variables, add comments, and fix the matrix helper Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2017 The WebRTC project authors. All Rights Reserved. 2 * Copyright 2017 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 package org.webrtc; 11 package org.webrtc;
12 12
13 import android.annotation.TargetApi; 13 import android.annotation.TargetApi;
14 import android.graphics.Matrix; 14 import android.graphics.Matrix;
15 import android.media.MediaCodec; 15 import android.media.MediaCodec;
16 import android.media.MediaCodecInfo.CodecCapabilities; 16 import android.media.MediaCodecInfo.CodecCapabilities;
17 import android.media.MediaFormat; 17 import android.media.MediaFormat;
18 import android.os.SystemClock; 18 import android.os.SystemClock;
19 import android.view.Surface;
19 import java.io.IOException; 20 import java.io.IOException;
20 import java.nio.ByteBuffer; 21 import java.nio.ByteBuffer;
21 import java.util.Arrays; 22 import java.util.Arrays;
22 import java.util.Deque; 23 import java.util.Deque;
23 import java.util.concurrent.CountDownLatch; 24 import java.util.concurrent.CountDownLatch;
24 import java.util.concurrent.LinkedBlockingDeque; 25 import java.util.concurrent.LinkedBlockingDeque;
25 import org.webrtc.ThreadUtils.ThreadChecker; 26 import org.webrtc.ThreadUtils.ThreadChecker;
26 27
27 /** Android hardware video decoder. */ 28 /** Android hardware video decoder. */
28 @TargetApi(16) 29 @TargetApi(16)
29 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods. 30 @SuppressWarnings("deprecation") // Cannot support API 16 without using deprecat ed methods.
30 class HardwareVideoDecoder implements VideoDecoder { 31 class HardwareVideoDecoder
32 implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListene r {
31 private static final String TAG = "HardwareVideoDecoder"; 33 private static final String TAG = "HardwareVideoDecoder";
32 34
33 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API. 35 // TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
34 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride"; 36 private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
35 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height"; 37 private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
36 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left"; 38 private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
37 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right"; 39 private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
38 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top"; 40 private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
39 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; 41 private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
40 42
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
93 private int height; 95 private int height;
94 private int stride; 96 private int stride;
95 private int sliceHeight; 97 private int sliceHeight;
96 98
97 // Whether the decoder has finished the first frame. The codec may not change output dimensions 99 // Whether the decoder has finished the first frame. The codec may not change output dimensions
98 // after delivering the first frame. 100 // after delivering the first frame.
99 private boolean hasDecodedFirstFrame; 101 private boolean hasDecodedFirstFrame;
100 // Whether the decoder has seen a key frame. The first frame must be a key fr ame. 102 // Whether the decoder has seen a key frame. The first frame must be a key fr ame.
101 private boolean keyFrameRequired; 103 private boolean keyFrameRequired;
102 104
105 private final EglBase.Context sharedContext;
106 private SurfaceTextureHelper surfaceTextureHelper;
107 private Surface surface = null;
108
109 private static class DecodedTextureMetadata {
110 final int width;
111 final int height;
112 final int rotation;
113 final long presentationTimestampUs;
114 final Integer decodeTimeMs;
115
116 DecodedTextureMetadata(
117 int width, int height, int rotation, long presentationTimestampUs, Integ er decodeTimeMs) {
118 this.width = width;
119 this.height = height;
120 this.rotation = rotation;
121 this.presentationTimestampUs = presentationTimestampUs;
122 this.decodeTimeMs = decodeTimeMs;
123 }
124 }
125
126 // Metadata for the last frame rendered to the texture. Only accessed on the texture helper's
127 // thread.
128 private DecodedTextureMetadata renderedTextureMetadata;
129
103 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller. 130 // Decoding proceeds asynchronously. This callback returns decoded frames to the caller.
104 private Callback callback; 131 private Callback callback;
105 132
106 private MediaCodec codec = null; 133 private MediaCodec codec = null;
107 134
108 HardwareVideoDecoder(String codecName, VideoCodecType codecType, int colorForm at) { 135 HardwareVideoDecoder(
136 String codecName, VideoCodecType codecType, int colorFormat, EglBase.Conte xt sharedContext) {
109 if (!isSupportedColorFormat(colorFormat)) { 137 if (!isSupportedColorFormat(colorFormat)) {
110 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat); 138 throw new IllegalArgumentException("Unsupported color format: " + colorFor mat);
111 } 139 }
112 this.codecName = codecName; 140 this.codecName = codecName;
113 this.codecType = codecType; 141 this.codecType = codecType;
114 this.colorFormat = colorFormat; 142 this.colorFormat = colorFormat;
143 this.sharedContext = sharedContext;
115 this.frameInfos = new LinkedBlockingDeque<>(); 144 this.frameInfos = new LinkedBlockingDeque<>();
116 } 145 }
117 146
118 @Override 147 @Override
119 public VideoCodecStatus initDecode(Settings settings, Callback callback) { 148 public VideoCodecStatus initDecode(Settings settings, Callback callback) {
120 this.decoderThreadChecker = new ThreadChecker(); 149 this.decoderThreadChecker = new ThreadChecker();
121 return initDecodeInternal(settings.width, settings.height, callback); 150 return initDecodeInternal(settings.width, settings.height, callback);
122 } 151 }
123 152
124 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) { 153 private VideoCodecStatus initDecodeInternal(int width, int height, Callback ca llback) {
(...skipping 15 matching lines...) Expand all
140 keyFrameRequired = true; 169 keyFrameRequired = true;
141 170
142 try { 171 try {
143 codec = MediaCodec.createByCodecName(codecName); 172 codec = MediaCodec.createByCodecName(codecName);
144 } catch (IOException | IllegalArgumentException e) { 173 } catch (IOException | IllegalArgumentException e) {
145 Logging.e(TAG, "Cannot create media decoder " + codecName); 174 Logging.e(TAG, "Cannot create media decoder " + codecName);
146 return VideoCodecStatus.ERROR; 175 return VideoCodecStatus.ERROR;
147 } 176 }
148 try { 177 try {
149 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height); 178 MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), w idth, height);
150 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 179 if (sharedContext == null) {
151 codec.configure(format, null, null, 0); 180 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
181 } else {
182 surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thre ad", sharedContext);
183 surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
184 surfaceTextureHelper.startListening(this);
185 }
186 codec.configure(format, surface, null, 0);
152 codec.start(); 187 codec.start();
153 } catch (IllegalStateException e) { 188 } catch (IllegalStateException e) {
154 Logging.e(TAG, "initDecode failed", e); 189 Logging.e(TAG, "initDecode failed", e);
155 release(); 190 release();
156 return VideoCodecStatus.ERROR; 191 return VideoCodecStatus.ERROR;
157 } 192 }
158 193
159 running = true; 194 running = true;
160 outputThread = createOutputThread(); 195 outputThread = createOutputThread();
161 outputThread.start(); 196 outputThread.start();
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
202 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) { 237 if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
203 Logging.e(TAG, "decode() - key frame required first"); 238 Logging.e(TAG, "decode() - key frame required first");
204 return VideoCodecStatus.ERROR; 239 return VideoCodecStatus.ERROR;
205 } 240 }
206 if (!frame.completeFrame) { 241 if (!frame.completeFrame) {
207 Logging.e(TAG, "decode() - complete frame required first"); 242 Logging.e(TAG, "decode() - complete frame required first");
208 return VideoCodecStatus.ERROR; 243 return VideoCodecStatus.ERROR;
209 } 244 }
210 } 245 }
211 246
212 // TODO(mellem): Support textures.
213 int index; 247 int index;
214 try { 248 try {
215 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US); 249 index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
216 } catch (IllegalStateException e) { 250 } catch (IllegalStateException e) {
217 Logging.e(TAG, "dequeueInputBuffer failed", e); 251 Logging.e(TAG, "dequeueInputBuffer failed", e);
218 return VideoCodecStatus.ERROR; 252 return VideoCodecStatus.ERROR;
219 } 253 }
220 if (index < 0) { 254 if (index < 0) {
221 // Decoder is falling behind. No input buffers available. 255 // Decoder is falling behind. No input buffers available.
222 // The decoder can't simply drop frames; it might lose a key frame. 256 // The decoder can't simply drop frames; it might lose a key frame.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 // capture both the output thread's stack trace and this thread's stack trace. 315 // capture both the output thread's stack trace and this thread's stack trace.
282 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException)); 316 Logging.e(TAG, "Media encoder release error", new RuntimeException(shutd ownException));
283 shutdownException = null; 317 shutdownException = null;
284 return VideoCodecStatus.ERROR; 318 return VideoCodecStatus.ERROR;
285 } 319 }
286 } finally { 320 } finally {
287 codec = null; 321 codec = null;
288 callback = null; 322 callback = null;
289 outputThread = null; 323 outputThread = null;
290 frameInfos.clear(); 324 frameInfos.clear();
325 if (surface != null) {
326 surface.release();
327 surface = null;
328 surfaceTextureHelper.stopListening();
329 surfaceTextureHelper.dispose();
330 surfaceTextureHelper = null;
331 }
291 } 332 }
292 return VideoCodecStatus.OK; 333 return VideoCodecStatus.OK;
293 } 334 }
294 335
295 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) { 336 private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
296 decoderThreadChecker.checkIsOnValidThread(); 337 decoderThreadChecker.checkIsOnValidThread();
297 VideoCodecStatus status = release(); 338 VideoCodecStatus status = release();
298 if (status != VideoCodecStatus.OK) { 339 if (status != VideoCodecStatus.OK) {
299 return status; 340 return status;
300 } 341 }
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
336 FrameInfo frameInfo = frameInfos.poll(); 377 FrameInfo frameInfo = frameInfos.poll();
337 Integer decodeTimeMs = null; 378 Integer decodeTimeMs = null;
338 int rotation = 0; 379 int rotation = 0;
339 if (frameInfo != null) { 380 if (frameInfo != null) {
340 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs); 381 decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeSt artTimeMs);
341 rotation = frameInfo.rotation; 382 rotation = frameInfo.rotation;
342 } 383 }
343 384
344 hasDecodedFirstFrame = true; 385 hasDecodedFirstFrame = true;
345 386
346 // Load dimensions from shared memory under the dimension lock. 387 if (surfaceTextureHelper != null) {
347 int width, height, stride, sliceHeight; 388 deliverTextureFrame(result, info, rotation, decodeTimeMs);
348 synchronized (dimensionLock) { 389 } else {
349 width = this.width; 390 deliverByteFrame(result, info, rotation, decodeTimeMs);
350 height = this.height;
351 stride = this.stride;
352 sliceHeight = this.sliceHeight;
353 } 391 }
354 392
355 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
356 // bytes for each of the U and V channels.
357 if (info.size < width * height * 3 / 2) {
358 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
359 return;
360 }
361
362 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
363 // Some codecs (Exynos) report an incorrect stride. Correct it here.
364 // Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
365 // 2 * size / (3 * height).
366 stride = info.size * 2 / (height * 3);
367 }
368
369 ByteBuffer buffer = codec.getOutputBuffers()[result];
370 buffer.position(info.offset);
371 buffer.limit(info.size);
372
373 final VideoFrame.I420Buffer frameBuffer;
374
375 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformatt ing data.
376 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
377 if (sliceHeight % 2 == 0) {
378 frameBuffer =
379 createBufferFromI420(buffer, result, info.offset, stride, sliceHei ght, width, height);
380 } else {
381 frameBuffer = new I420BufferImpl(width, height);
382 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
383 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
384 codec.releaseOutputBuffer(result, false);
385 }
386 } else {
387 frameBuffer = new I420BufferImpl(width, height);
388 // All other supported color formats are NV12.
389 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, height);
390 codec.releaseOutputBuffer(result, false);
391 }
392
393 long presentationTimeNs = info.presentationTimeUs * 1000;
394 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeN s, new Matrix());
395
396 // Note that qp is parsed on the C++ side.
397 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
398 frame.release();
399 } catch (IllegalStateException e) { 393 } catch (IllegalStateException e) {
400 Logging.e(TAG, "deliverDecodedFrame failed", e); 394 Logging.e(TAG, "deliverDecodedFrame failed", e);
401 } 395 }
402 } 396 }
403 397
398 private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
399 final int rotation, final Integer decodeTimeMs) {
400 // Load dimensions from shared memory under the dimension lock.
401 final int width, height;
402 synchronized (dimensionLock) {
403 width = this.width;
404 height = this.height;
405 }
406
407 surfaceTextureHelper.getHandler().post(new Runnable() {
408 @Override
409 public void run() {
410 renderedTextureMetadata = new DecodedTextureMetadata(
411 width, height, rotation, info.presentationTimeUs, decodeTimeMs);
412 codec.releaseOutputBuffer(index, true);
413 }
414 });
415 }
416
417 @Override
418 public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
419 VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffe r(
420 renderedTextureMetadata.width, renderedTextureMetadata.height, transform Matrix);
421
422 Matrix matrix = RendererCommon.convertMatrixToAndroidGraphicsMatrix(transfor mMatrix);
423
424 VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotatio n,
425 renderedTextureMetadata.presentationTimestampUs * 1000, matrix);
426 callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
427 frame.release();
428 }
429
430 private void deliverByteFrame(
431 int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs ) {
432 // Load dimensions from shared memory under the dimension lock.
433 int width, height, stride, sliceHeight;
434 synchronized (dimensionLock) {
435 width = this.width;
436 height = this.height;
437 stride = this.stride;
438 sliceHeight = this.sliceHeight;
439 }
440
441 // Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
442 // bytes for each of the U and V channels.
443 if (info.size < width * height * 3 / 2) {
444 Logging.e(TAG, "Insufficient output buffer size: " + info.size);
445 return;
446 }
447
448 if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
449 // Some codecs (Exynos) report an incorrect stride. Correct it here.
450 // Expected size == stride * height * 3 / 2. A bit of algebra gives the c orrect stride as
451 // 2 * size / (3 * height).
452 stride = info.size * 2 / (height * 3);
453 }
454
455 ByteBuffer buffer = codec.getOutputBuffers()[result];
456 buffer.position(info.offset);
457 buffer.limit(info.size);
458
459 final VideoFrame.I420Buffer frameBuffer;
460
461 // TODO(mellem): As an optimization, use libyuv via JNI to copy/reformattin g data.
462 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
463 if (sliceHeight % 2 == 0) {
464 frameBuffer =
465 createBufferFromI420(buffer, result, info.offset, stride, sliceHeigh t, width, height);
466 } else {
467 frameBuffer = I420BufferImpl.allocate(width, height);
468 // Optimal path is not possible because we have to copy the last rows of U- and V-planes.
469 copyI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
470 codec.releaseOutputBuffer(result, false);
471 }
472 } else {
473 frameBuffer = I420BufferImpl.allocate(width, height);
474 // All other supported color formats are NV12.
475 nv12ToI420(buffer, info.offset, frameBuffer, stride, sliceHeight, width, h eight);
476 codec.releaseOutputBuffer(result, false);
477 }
478
479 long presentationTimeNs = info.presentationTimeUs * 1000;
480 VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs, new Matrix());
481
482 // Note that qp is parsed on the C++ side.
483 callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
484 frame.release();
485 }
486
404 private void reformat(MediaFormat format) { 487 private void reformat(MediaFormat format) {
405 outputThreadChecker.checkIsOnValidThread(); 488 outputThreadChecker.checkIsOnValidThread();
406 Logging.d(TAG, "Decoder format changed: " + format.toString()); 489 Logging.d(TAG, "Decoder format changed: " + format.toString());
407 final int newWidth; 490 final int newWidth;
408 final int newHeight; 491 final int newHeight;
409 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT) 492 if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
410 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT) 493 && format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
411 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM) 494 && format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
412 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) { 495 && format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
413 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT) 496 newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
414 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT); 497 - format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
415 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM) 498 newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
416 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP); 499 - format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
417 } else { 500 } else {
418 newWidth = format.getInteger(MediaFormat.KEY_WIDTH); 501 newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
419 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 502 newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
420 } 503 }
421 // Compare to existing width, height, and save values under the dimension lo ck. 504 // Compare to existing width, height, and save values under the dimension lo ck.
422 synchronized (dimensionLock) { 505 synchronized (dimensionLock) {
423 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) { 506 if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) {
424 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*" 507 stopOnOutputThread(new RuntimeException("Unexpected size change. Configu red " + width + "*"
425 + height + ". New " + newWidth + "*" + newHeight)); 508 + height + ". New " + newWidth + "*" + newHeight));
426 return; 509 return;
427 } 510 }
428 width = newWidth; 511 width = newWidth;
429 height = newHeight; 512 height = newHeight;
430 } 513 }
431 514
432 if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { 515 // Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
516 // color format updates.
517 if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR _FORMAT)) {
433 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 518 colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
434 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); 519 Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
435 if (!isSupportedColorFormat(colorFormat)) { 520 if (!isSupportedColorFormat(colorFormat)) {
436 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat)); 521 stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
437 return; 522 return;
438 } 523 }
439 } 524 }
440 525
441 // Save stride and sliceHeight under the dimension lock. 526 // Save stride and sliceHeight under the dimension lock.
442 synchronized (dimensionLock) { 527 synchronized (dimensionLock) {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
512 final int chromaWidth = (width + 1) / 2; 597 final int chromaWidth = (width + 1) / 2;
513 final int chromaHeight = (height + 1) / 2; 598 final int chromaHeight = (height + 1) / 2;
514 599
515 final int yPos = offset; 600 final int yPos = offset;
516 final int uPos = yPos + stride * sliceHeight; 601 final int uPos = yPos + stride * sliceHeight;
517 final int vPos = uPos + uvStride * sliceHeight / 2; 602 final int vPos = uPos + uvStride * sliceHeight / 2;
518 603
519 synchronized (activeOutputBuffersLock) { 604 synchronized (activeOutputBuffersLock) {
520 activeOutputBuffers++; 605 activeOutputBuffers++;
521 } 606 }
522 return new VideoFrame.I420Buffer() {
523 private int refCount = 1;
524 607
608 I420BufferImpl.ReleaseCallback callback = new I420BufferImpl.ReleaseCallback () {
525 @Override 609 @Override
526 public ByteBuffer getDataY() { 610 public void onRelease() {
527 ByteBuffer data = buffer.slice(); 611 codec.releaseOutputBuffer(outputBufferIndex, false);
528 data.position(yPos); 612 synchronized (activeOutputBuffersLock) {
529 data.limit(yPos + getStrideY() * height); 613 activeOutputBuffers--;
530 return data; 614 activeOutputBuffersLock.notifyAll();
531 }
532
533 @Override
534 public ByteBuffer getDataU() {
535 ByteBuffer data = buffer.slice();
536 data.position(uPos);
537 data.limit(uPos + getStrideU() * chromaHeight);
538 return data;
539 }
540
541 @Override
542 public ByteBuffer getDataV() {
543 ByteBuffer data = buffer.slice();
544 data.position(vPos);
545 data.limit(vPos + getStrideV() * chromaHeight);
546 return data;
547 }
548
549 @Override
550 public int getStrideY() {
551 return stride;
552 }
553
554 @Override
555 public int getStrideU() {
556 return uvStride;
557 }
558
559 @Override
560 public int getStrideV() {
561 return uvStride;
562 }
563
564 @Override
565 public int getWidth() {
566 return width;
567 }
568
569 @Override
570 public int getHeight() {
571 return height;
572 }
573
574 @Override
575 public VideoFrame.I420Buffer toI420() {
576 return this;
577 }
578
579 @Override
580 public void retain() {
581 refCount++;
582 }
583
584 @Override
585 public void release() {
586 refCount--;
587
588 if (refCount == 0) {
589 codec.releaseOutputBuffer(outputBufferIndex, false);
590 synchronized (activeOutputBuffersLock) {
591 activeOutputBuffers--;
592 activeOutputBuffersLock.notifyAll();
593 }
594 } 615 }
595 } 616 }
596 }; 617 };
618
619 return new I420BufferImpl(
620 buffer, width, height, yPos, stride, uPos, uvStride, vPos, uvStride, cal lback);
597 } 621 }
598 622
599 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer, 623 private static void copyI420(ByteBuffer src, int offset, VideoFrame.I420Buffer frameBuffer,
600 int stride, int sliceHeight, int width, int height) { 624 int stride, int sliceHeight, int width, int height) {
601 int uvStride = stride / 2; 625 int uvStride = stride / 2;
602 int chromaWidth = (width + 1) / 2; 626 int chromaWidth = (width + 1) / 2;
603 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last 627 // Note that hardware truncates instead of rounding. WebRTC expects roundin g, so the last
604 // row will be duplicated if the sliceHeight is odd. 628 // row will be duplicated if the sliceHeight is odd.
605 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2; 629 int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
606 630
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
659 dstPos += dstStride; 683 dstPos += dstStride;
660 } 684 }
661 } 685 }
662 686
663 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) { 687 private static void copyRow(ByteBuffer src, int srcPos, ByteBuffer dst, int ds tPos, int width) {
664 for (int i = 0; i < width; ++i) { 688 for (int i = 0; i < width; ++i) {
665 dst.put(dstPos + i, src.get(srcPos + i)); 689 dst.put(dstPos + i, src.get(srcPos + i));
666 } 690 }
667 } 691 }
668 } 692 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698