Bug 1578073 - Use updated camera capture code; r=ng

Differential Revision: https://phabricator.services.mozilla.com/D61861

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Dan Minor 2020-02-10 17:12:52 +00:00
Родитель f84c5a8d02
Коммит 3a77f0eeb3
3 изменённых файлов: 188 добавлений и 520 удалений

Просмотреть файл

@ -12,92 +12,64 @@ package org.webrtc.videoengine;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Exchanger;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceHolder;
import android.view.WindowManager;
import java.util.concurrent.CountDownLatch;
import org.mozilla.gecko.annotation.WebRTCJNITarget;
// Wrapper for android Camera, with support for direct local preview rendering.
// Threading notes: this class is called from ViE C++ code, and from Camera &
// SurfaceHolder Java callbacks. Since these calls happen on different threads,
// the entry points to this class are all synchronized. This shouldn't present
// a performance bottleneck because only onPreviewFrame() is called more than
// once (and is called serially on a single thread), so the lock should be
// uncontended. Note that each of these synchronized methods must check
// |camera| for null to account for having possibly waited for stopCapture() to
// complete.
public class VideoCaptureAndroid implements PreviewCallback, Callback {
import org.webrtc.CameraEnumerator;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.CapturerObserver;
import org.webrtc.EglBase;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrame.I420Buffer;
public class VideoCaptureAndroid implements CameraVideoCapturer.CameraEventsHandler, CapturerObserver {
private final static String TAG = "WEBRTC-JC";
private static SurfaceHolder localPreview;
private Camera camera; // Only non-null while capturing.
private CameraThread cameraThread;
private Handler cameraThreadHandler;
private Context context;
private final int id;
private final Camera.CameraInfo info;
private final String deviceName;
private volatile long native_capturer; // |VideoCaptureAndroid*| in C++.
private SurfaceTexture cameraSurfaceTexture;
private int[] cameraGlTextures = null;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private final int numCaptureBuffers = 3;
private double averageDurationMs;
private long lastCaptureTimeMs;
private int frameCount;
private int frameDropRatio;
private Context context;
private CameraVideoCapturer cameraVideoCapturer;
private EglBase eglBase;
private SurfaceTextureHelper surfaceTextureHelper;
// Requests future capturers to send their frames to |localPreview| directly.
public static void setLocalPreview(SurfaceHolder localPreview) {
// It is a gross hack that this is a class-static. Doing it right would
// mean plumbing this through the C++ API and using it from
// webrtc/examples/android/media_demo's MediaEngine class.
VideoCaptureAndroid.localPreview = localPreview;
}
// This class is recreated everytime we start/stop capture, so we
// can safely create the CountDownLatches here.
private final CountDownLatch capturerStarted = new CountDownLatch(1);
private boolean capturerStartedSucceeded = false;
private final CountDownLatch capturerStopped = new CountDownLatch(1);
@WebRTCJNITarget
public VideoCaptureAndroid(int id, long native_capturer) {
this.id = id;
public VideoCaptureAndroid(String deviceName, long native_capturer) {
this.deviceName = deviceName;
this.native_capturer = native_capturer;
this.context = GetContext();
this.info = new Camera.CameraInfo();
Camera.getCameraInfo(id, info);
CameraEnumerator enumerator;
if (Camera2Enumerator.isSupported(context)) {
enumerator = new Camera2Enumerator(context);
} else {
enumerator = new Camera1Enumerator();
}
cameraVideoCapturer = enumerator.createCapturer(deviceName, this);
eglBase = EglBase.create();
surfaceTextureHelper = SurfaceTextureHelper.create("VideoCaptureAndroidSurfaceTextureHelper", eglBase.getEglBaseContext());
cameraVideoCapturer.initialize(surfaceTextureHelper, context, this);
}
// Return the global application context.
@WebRTCJNITarget
private static native Context GetContext();
private static final class CameraThread extends Thread {
private Exchanger<Handler> handlerExchanger;
public CameraThread(Exchanger<Handler> handlerExchanger) {
this.handlerExchanger = handlerExchanger;
}
@Override public void run() {
Looper.prepare();
exchange(handlerExchanger, new Handler());
Looper.loop();
}
}
// Called by native code. Returns true if capturer is started.
//
// Note that this actually opens the camera, and Camera callbacks run on the
@ -109,39 +81,14 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
final int min_mfps, final int max_mfps) {
Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
min_mfps + ":" + max_mfps);
if (cameraThread == null && cameraThreadHandler == null) {
Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
cameraThread = new CameraThread(handlerExchanger);
cameraThread.start();
cameraThreadHandler = exchange(handlerExchanger, null);
cameraVideoCapturer.startCapture(width, height, max_mfps);
try {
capturerStarted.await();
} catch (InterruptedException e) {
return false;
}
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
boolean startResult =
startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
if (!startResult) {
Looper.myLooper().quit();
}
exchange(result, startResult);
}
});
boolean startResult = exchange(result, false); // |false| is a dummy value.
if (!startResult) {
// Starting failed on the camera thread. The looper has now quit and the
// camera thread is dead.
try {
cameraThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
cameraThreadHandler = null;
cameraThread = null;
}
return startResult;
return capturerStartedSucceeded;
}
@WebRTCJNITarget
@ -153,213 +100,18 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
native_capturer = 0;
}
private boolean startCaptureOnCameraThread(
int width, int height, int min_mfps, int max_mfps) {
Throwable error = null;
try {
boolean isRunning = camera != null;
if (!isRunning) {
camera = Camera.open(id);
if (localPreview != null) {
localPreview.addCallback(this);
if (localPreview.getSurface() != null &&
localPreview.getSurface().isValid()) {
try {
camera.setPreviewDisplay(localPreview);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
} else {
// No local renderer (we only care about onPreviewFrame() buffers, not a
// directly-displayed UI element). Camera won't capture without
// setPreview{Texture,Display}, so we create a SurfaceTexture and hand
// it over to Camera, but never listen for frame-ready callbacks,
// and never call updateTexImage on it.
try {
cameraGlTextures = new int[1];
// Generate one texture pointer and bind it as an external texture.
GLES20.glGenTextures(1, cameraGlTextures, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
cameraGlTextures[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
cameraSurfaceTexture.setOnFrameAvailableListener(null);
camera.setPreviewTexture(cameraSurfaceTexture);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
Log.d(TAG, "Camera orientation: " + info.orientation +
" .Device orientation: " + getDeviceOrientation());
Camera.Parameters parameters = camera.getParameters();
if (isRunning) {
Camera.Size size = parameters.getPreviewSize();
int[] fpsRange = new int[2];
parameters.getPreviewFpsRange(fpsRange);
int minFps = fpsRange[Parameters.PREVIEW_FPS_MIN_INDEX] / frameDropRatio;
int maxFps = fpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] / frameDropRatio;
if (size.width == width && size.height == height &&
minFps == min_mfps && maxFps == max_mfps) {
return true;
} else {
if (!stopCaptureOnCameraThread()) {
throw new RuntimeException("Stopping on reconfig failed");
}
return startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
}
}
Log.d(TAG, "isVideoStabilizationSupported: " +
parameters.isVideoStabilizationSupported());
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
Log.d(TAG, "Enable continuous auto focus mode.");
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
// (width,height) is a valid preview size. It might not be a valid picture
// size.
parameters.setPreviewSize(width, height);
List<Camera.Size> supportedPictureSizes =
parameters.getSupportedPictureSizes();
Camera.Size pictureSize = supportedPictureSizes.get(0);
for (Camera.Size size : supportedPictureSizes) {
if (size.width < width || size.height < height) {
// We want a picture size larger than the preview size
continue;
}
if (pictureSize.width < width || pictureSize.height < height) {
// The so-far chosen pictureSize is smaller than the preview size.
// `size` is a better fit.
pictureSize = size;
continue;
}
if (size.width <= pictureSize.width &&
size.height <= pictureSize.height) {
// Both the so-far chosen pictureSize and `size` are larger than the
// preview size, but `size` is closest, so it's preferred.
pictureSize = size;
}
}
parameters.setPictureSize(pictureSize.width, pictureSize.height);
// Check if requested fps range is supported by camera,
// otherwise calculate frame drop ratio.
List<int[]> supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
frameDropRatio = Integer.MAX_VALUE;
for (int i = 0; i < supportedFpsRanges.size(); i++) {
int[] range = supportedFpsRanges.get(i);
if (range[Parameters.PREVIEW_FPS_MIN_INDEX] == min_mfps &&
range[Parameters.PREVIEW_FPS_MAX_INDEX] == max_mfps) {
frameDropRatio = 1;
break;
}
if (range[Parameters.PREVIEW_FPS_MIN_INDEX] % min_mfps == 0 &&
range[Parameters.PREVIEW_FPS_MAX_INDEX] % max_mfps == 0) {
int dropRatio = range[Parameters.PREVIEW_FPS_MAX_INDEX] / max_mfps;
frameDropRatio = Math.min(dropRatio, frameDropRatio);
}
}
if (frameDropRatio == Integer.MAX_VALUE) {
Log.e(TAG, "Can not find camera fps range");
throw new RuntimeException("Can not find camera fps range");
}
if (frameDropRatio > 1) {
Log.d(TAG, "Frame dropper is enabled. Ratio: " + frameDropRatio);
}
min_mfps *= frameDropRatio;
max_mfps *= frameDropRatio;
Log.d(TAG, "Camera preview mfps range: " + min_mfps + " - " + max_mfps);
parameters.setPreviewFpsRange(min_mfps, max_mfps);
int format = ImageFormat.NV21;
parameters.setPreviewFormat(format);
camera.setParameters(parameters);
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
camera.addCallbackBuffer(new byte[bufSize]);
}
camera.setPreviewCallbackWithBuffer(this);
frameCount = 0;
averageDurationMs = 1000000.0f / (max_mfps / frameDropRatio);
camera.startPreview();
return true;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "startCapture failed", error);
stopCaptureOnCameraThread();
return false;
}
// Called by native code. Returns true when camera is known to be stopped.
@WebRTCJNITarget
private synchronized boolean stopCapture() {
Log.d(TAG, "stopCapture");
final Exchanger<Boolean> result = new Exchanger<Boolean>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
boolean stopResult = stopCaptureOnCameraThread();
Looper.myLooper().quit();
exchange(result, stopResult);
}
});
boolean status = exchange(result, false); // |false| is a dummy value here.
try {
cameraThread.join();
cameraVideoCapturer.stopCapture();
capturerStopped.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
return false;
}
cameraThreadHandler = null;
cameraThread = null;
Log.d(TAG, "stopCapture done");
return status;
}
private boolean stopCaptureOnCameraThread() {
Throwable error = null;
try {
if (camera == null) {
Log.e(TAG, "Camera is already stopped!");
throw new RuntimeException("Camera is already stopped!");
}
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
camera.setPreviewTexture(null);
cameraSurfaceTexture = null;
if (cameraGlTextures != null) {
GLES20.glDeleteTextures(1, cameraGlTextures, 0);
cameraGlTextures = null;
}
camera.release();
camera = null;
return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "Failed to stop camera", error);
return false;
return true;
}
@WebRTCJNITarget
@ -389,151 +141,61 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
@WebRTCJNITarget
private native void ProvideCameraFrame(
byte[] data, int length, int rotation, long timeStamp, long captureObject);
int width, int height,
java.nio.ByteBuffer dataY, int strideY,
java.nio.ByteBuffer dataU, int strideU,
java.nio.ByteBuffer dataV, int strideV,
int rotation, long timeStamp, long captureObject);
// Called on cameraThread so must not "synchronized".
@WebRTCJNITarget
@Override
public void onPreviewFrame(byte[] data, Camera callbackCamera) {
if (Thread.currentThread() != cameraThread) {
throw new RuntimeException("Camera callback not on camera thread?!?");
}
if (camera == null) {
return;
}
if (camera != callbackCamera) {
throw new RuntimeException("Unexpected camera in callback!");
}
frameCount++;
// Check if frame needs to be dropped.
if ((frameDropRatio > 1) && (frameCount % frameDropRatio) > 0) {
camera.addCallbackBuffer(data);
return;
}
long captureTimeMs = SystemClock.elapsedRealtime();
if (frameCount > frameDropRatio) {
double durationMs = captureTimeMs - lastCaptureTimeMs;
averageDurationMs = 0.9 * averageDurationMs + 0.1 * durationMs;
if ((frameCount % 30) == 0) {
Log.d(TAG, "Camera TS " + captureTimeMs +
". Duration: " + (int)durationMs + " ms. FPS: " +
(int) (1000 / averageDurationMs + 0.5));
}
}
lastCaptureTimeMs = captureTimeMs;
//
// CameraVideoCapturer.CameraEventsHandler interface
//
int rotation = getDeviceOrientation();
if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
rotation = (info.orientation + rotation) % 360;
// Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
public void onCameraError(String errorDescription) {}
ProvideCameraFrame(data, data.length, rotation,
captureTimeMs, native_capturer);
camera.addCallbackBuffer(data);
// Called when camera is disconnected.
public void onCameraDisconnected() {}
// Invoked when camera stops receiving frames.
public void onCameraFreezed(String errorDescription) {}
// Callback invoked when camera is opening.
public void onCameraOpening(String cameraName) {}
// Callback invoked when first camera frame is available after camera is started.
public void onFirstFrameAvailable() {}
// Callback invoked when camera is closed.
public void onCameraClosed() {}
//
// CapturerObserver interface
//
// Notify if the capturer have been started successfully or not.
public void onCapturerStarted(boolean success) {
capturerStartedSucceeded = success;
capturerStarted.countDown();
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
// Called by native code.
private synchronized void setPreviewRotation(final int rotation) {
if (camera == null || cameraThreadHandler == null) {
return;
}
final Exchanger<IOException> result = new Exchanger<IOException>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
setPreviewRotationOnCameraThread(rotation, result);
}
});
// Use the exchanger below to block this function until
// setPreviewRotationOnCameraThread() completes, holding the synchronized
// lock for the duration. The exchanged value itself is ignored.
exchange(result, null);
// Notify that the capturer has been stopped.
public void onCapturerStopped() {
capturerStopped.countDown();
}
private void setPreviewRotationOnCameraThread(
int rotation, Exchanger<IOException> result) {
Log.v(TAG, "setPreviewRotation:" + rotation);
// Delivers a captured frame.
public void onFrameCaptured(VideoFrame frame) {
I420Buffer i420Buffer = frame.getBuffer().toI420();
int resultRotation = 0;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
// This is a front facing camera. SetDisplayOrientation will flip
// the image horizontally before doing the rotation.
resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
} else {
// Back-facing camera.
resultRotation = rotation;
}
camera.setDisplayOrientation(resultRotation);
exchange(result, null);
}
ProvideCameraFrame(i420Buffer.getWidth(), i420Buffer.getHeight(),
i420Buffer.getDataY(), i420Buffer.getStrideY(),
i420Buffer.getDataU(), i420Buffer.getStrideU(),
i420Buffer.getDataV(), i420Buffer.getStrideV(),
frame.getRotation(),
frame.getTimestampNs() / 1000000, native_capturer);
@WebRTCJNITarget
@Override
public synchronized void surfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
format + ": " + width + "x" + height);
}
@WebRTCJNITarget
@Override
public synchronized void surfaceCreated(final SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
if (camera == null || cameraThreadHandler == null) {
return;
}
final Exchanger<IOException> result = new Exchanger<IOException>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
setPreviewDisplayOnCameraThread(holder, result);
}
});
IOException e = exchange(result, null); // |null| is a dummy value here.
if (e != null) {
throw new RuntimeException(e);
}
}
@WebRTCJNITarget
@Override
public synchronized void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
if (camera == null || cameraThreadHandler == null) {
return;
}
final Exchanger<IOException> result = new Exchanger<IOException>();
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
setPreviewDisplayOnCameraThread(null, result);
}
});
IOException e = exchange(result, null); // |null| is a dummy value here.
if (e != null) {
throw new RuntimeException(e);
}
}
private void setPreviewDisplayOnCameraThread(
SurfaceHolder holder, Exchanger<IOException> result) {
try {
camera.setPreviewDisplay(holder);
} catch (IOException e) {
exchange(result, e);
return;
}
exchange(result, null);
return;
}
// Exchanges |value| with |exchanger|, converting InterruptedExceptions to
// RuntimeExceptions (since we expect never to see these).
private static <T> T exchange(Exchanger<T> exchanger, T value) {
try {
return exchanger.exchange(value);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
i420Buffer.release();
}
}

Просмотреть файл

@ -20,10 +20,21 @@
static JavaVM* g_jvm_capture = NULL;
static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
static jobject g_context = NULL; // Owned android.content.Context.
static jobject g_context = NULL; // Owned android.content.Context.
namespace webrtc {
jobject JniCommon_allocateNativeByteBuffer(JNIEnv* env, jclass, jint size) {
void* new_data = ::operator new(size);
jobject byte_buffer = env->NewDirectByteBuffer(new_data, size);
return byte_buffer;
}
void JniCommon_freeNativeByteBuffer(JNIEnv* env, jclass, jobject byte_buffer) {
void* data = env->GetDirectBufferAddress(byte_buffer);
::operator delete(data);
}
// Called by Java to get the global application context.
jobject JNICALL GetContext(JNIEnv* env, jclass) {
assert(g_context);
@ -31,14 +42,11 @@ jobject JNICALL GetContext(JNIEnv* env, jclass) {
}
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
jobject,
jbyteArray javaCameraFrame,
jint length,
jint rotation,
jlong timeStamp,
jlong context) {
void JNICALL ProvideCameraFrame(JNIEnv* env, jobject, jint width, jint height,
jobject javaDataY, jint strideY,
jobject javaDataU, jint strideU,
jobject javaDataV, jint strideV, jint rotation,
jlong timeStamp, jlong context) {
if (!context) {
return;
}
@ -46,10 +54,17 @@ void JNICALL ProvideCameraFrame(
webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
context);
jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
captureModule->OnIncomingFrame(
reinterpret_cast<uint8_t*>(cameraFrame), length, rotation, 0);
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
uint8_t* dataY =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataY));
uint8_t* dataU =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataU));
uint8_t* dataV =
reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(javaDataV));
rtc::scoped_refptr<I420Buffer> i420Buffer = I420Buffer::Copy(
width, height, dataY, strideY, dataU, strideU, dataV, strideV);
captureModule->OnIncomingFrame(i420Buffer, rotation, timeStamp);
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
@ -66,23 +81,38 @@ int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
videocapturemodule::DeviceInfoAndroid::Initialize(g_jvm_capture);
jclass clsRef = mozilla::jni::GetClassRef(
ats.env(), "org/webrtc/videoengine/VideoCaptureAndroid");
g_java_capturer_class =
static_cast<jclass>(ats.env()->NewGlobalRef(clsRef));
ats.env()->DeleteLocalRef(clsRef);
assert(g_java_capturer_class);
{
jclass clsRef = mozilla::jni::GetClassRef(
ats.env(), "org/webrtc/videoengine/VideoCaptureAndroid");
g_java_capturer_class =
static_cast<jclass>(ats.env()->NewGlobalRef(clsRef));
ats.env()->DeleteLocalRef(clsRef);
assert(g_java_capturer_class);
JNINativeMethod native_methods[] = {
{"GetContext",
"()Landroid/content/Context;",
reinterpret_cast<void*>(&GetContext)},
{"ProvideCameraFrame",
"([BIIJJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)}};
if (ats.env()->RegisterNatives(g_java_capturer_class,
native_methods, 2) != 0)
assert(false);
JNINativeMethod native_methods[] = {
{"GetContext", "()Landroid/content/Context;",
reinterpret_cast<void*>(&GetContext)},
{"ProvideCameraFrame",
"(IILjava/nio/ByteBuffer;ILjava/nio/ByteBuffer;ILjava/nio/"
"ByteBuffer;IIJJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)}};
if (ats.env()->RegisterNatives(g_java_capturer_class, native_methods,
2) != 0)
assert(false);
}
{
jclass clsRef =
mozilla::jni::GetClassRef(ats.env(), "org/webrtc/JniCommon");
JNINativeMethod native_methods[] = {
{"nativeAllocateByteBuffer", "(I)Ljava/nio/ByteBuffer;",
reinterpret_cast<void*>(&JniCommon_allocateNativeByteBuffer)},
{"nativeFreeByteBuffer", "(Ljava/nio/ByteBuffer;)V",
reinterpret_cast<void*>(&JniCommon_freeNativeByteBuffer)}};
if (ats.env()->RegisterNatives(clsRef, native_methods, 2) != 0)
assert(false);
}
} else {
if (g_jvm_capture) {
AttachThreadScoped ats(g_jvm_capture);
@ -110,75 +140,64 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
return implementation;
}
int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
size_t videoFrameLength,
int32_t degrees,
int64_t captureTime) {
VideoCaptureCapability capability;
{
rtc::CritScope cs(&_apiCs);
if (!_captureStarted) return 0;
capability = _captureCapability;
}
void VideoCaptureAndroid::OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer,
int32_t degrees,
int64_t captureTime) {
VideoRotation rotation =
(degrees <= 45 || degrees > 315)
? kVideoRotation_0
: (degrees > 45 && degrees <= 135)
? kVideoRotation_90
: (degrees > 135 && degrees <= 225)
? kVideoRotation_180
: (degrees > 225 && degrees <= 315)
? kVideoRotation_270
: kVideoRotation_0; // Impossible.
VideoRotation current_rotation =
(degrees <= 45 || degrees > 315) ? kVideoRotation_0 :
(degrees > 45 && degrees <= 135) ? kVideoRotation_90 :
(degrees > 135 && degrees <= 225) ? kVideoRotation_180 :
(degrees > 225 && degrees <= 315) ? kVideoRotation_270 :
kVideoRotation_0; // Impossible.
if (_rotation != current_rotation) {
RTC_LOG(LS_INFO) << "New camera rotation: " << degrees;
_rotation = current_rotation;
int32_t status = VideoCaptureImpl::SetCaptureRotation(_rotation);
if (status != 0)
return status;
}
return IncomingFrame(videoFrame, videoFrameLength, capability, captureTime);
// Historically, we have ignored captureTime. Why?
VideoFrame captureFrame(I420Buffer::Rotate(*buffer, rotation), 0,
rtc::TimeMillis(), rotation);
DeliverCapturedFrame(captureFrame);
}
VideoCaptureAndroid::VideoCaptureAndroid()
: VideoCaptureImpl(),
_deviceInfo(),
_jCapturer(NULL),
_captureStarted(false) {
}
_captureStarted(false) {}
int32_t VideoCaptureAndroid::Init(const char* deviceUniqueIdUTF8) {
const int nameLength = strlen(deviceUniqueIdUTF8);
if (nameLength >= kVideoCaptureUniqueNameSize)
return -1;
if (nameLength >= kVideoCaptureUniqueNameSize) return -1;
// Store the device name
RTC_LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8;
size_t camera_id = 0;
if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
return -1;
_deviceUniqueId = new char[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
AttachThreadScoped ats(g_jvm_capture);
JNIEnv* env = ats.env();
jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>",
"(Ljava/lang/String;J)V");
assert(ctor);
jstring j_deviceName = env->NewStringUTF(_deviceUniqueId);
jlong j_this = reinterpret_cast<intptr_t>(this);
_jCapturer = env->NewGlobalRef(
env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
env->NewObject(g_java_capturer_class, ctor, j_deviceName, j_this));
assert(_jCapturer);
_rotation = kVideoRotation_0;
return 0;
}
VideoCaptureAndroid::~VideoCaptureAndroid() {
// Ensure Java camera is released even if our caller didn't explicitly Stop.
if (_captureStarted)
StopCapture();
if (_captureStarted) StopCapture();
AttachThreadScoped ats(g_jvm_capture);
JNIEnv* env = ats.env();
// Avoid callbacks into ourself even if the above stopCapture fails.
jmethodID j_unlink =
env->GetMethodID(g_java_capturer_class, "unlinkCapturer", "()V");
env->GetMethodID(g_java_capturer_class, "unlinkCapturer", "()V");
env->CallVoidMethod(_jCapturer, j_unlink);
env->DeleteGlobalRef(_jCapturer);
@ -190,11 +209,10 @@ int32_t VideoCaptureAndroid::StartCapture(
AttachThreadScoped ats(g_jvm_capture);
JNIEnv* env = ats.env();
if (_deviceInfo.GetBestMatchedCapability(
_deviceUniqueId, capability, _captureCapability) < 0) {
RTC_LOG(LS_ERROR) << __FUNCTION__ <<
"s: GetBestMatchedCapability failed: " <<
capability.width << "x" << capability.height;
if (_deviceInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
_captureCapability) < 0) {
RTC_LOG(LS_ERROR) << __FUNCTION__ << "s: GetBestMatchedCapability failed: "
<< capability.width << "x" << capability.height;
// Manual exit of critical section
_apiCs.Leave();
return -1;
@ -214,8 +232,7 @@ int32_t VideoCaptureAndroid::StartCapture(
jmethodID j_start =
env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
assert(j_start);
bool started = env->CallBooleanMethod(_jCapturer, j_start,
width, height,
bool started = env->CallBooleanMethod(_jCapturer, j_start, width, height,
min_mfps, max_mfps);
if (started) {
rtc::CritScope cs(&_apiCs);
@ -248,21 +265,11 @@ bool VideoCaptureAndroid::CaptureStarted() {
return _captureStarted;
}
int32_t VideoCaptureAndroid::CaptureSettings(
VideoCaptureCapability& settings) {
int32_t VideoCaptureAndroid::CaptureSettings(VideoCaptureCapability& settings) {
rtc::CritScope cs(&_apiCs);
settings = _requestedCapability;
return 0;
}
int32_t VideoCaptureAndroid::SetCaptureRotation(VideoRotation rotation) {
// Our only caller is ProvideCameraFrame, which is called
// from a synchronized Java method. If we'd take this lock,
// any call going from C++ to Java will deadlock.
// CriticalSectionScoped cs(&_apiCs);
VideoCaptureImpl::SetCaptureRotation(rotation);
return 0;
}
} // namespace videocapturemodule
} // namespace webrtc

Просмотреть файл

@ -14,6 +14,7 @@
#include <jni.h>
#include "device_info_android.h"
#include "api/video/i420_buffer.h"
#include "modules/video_capture/video_capture_impl.h"
namespace webrtc {
@ -28,10 +29,9 @@ class VideoCaptureAndroid : public VideoCaptureImpl {
virtual int32_t StopCapture();
virtual bool CaptureStarted();
virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
virtual int32_t SetCaptureRotation(VideoRotation rotation);
int32_t OnIncomingFrame(uint8_t* videoFrame, size_t videoFrameLength,
int32_t degrees, int64_t captureTime = 0);
void OnIncomingFrame(rtc::scoped_refptr<I420Buffer> buffer, int32_t degrees,
int64_t captureTime = 0);
protected:
virtual ~VideoCaptureAndroid();
@ -39,7 +39,6 @@ class VideoCaptureAndroid : public VideoCaptureImpl {
DeviceInfoAndroid _deviceInfo;
jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object.
VideoCaptureCapability _captureCapability;
VideoRotation _rotation;
bool _captureStarted;
};