зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1578073 - Add newer webrtc.org android camera code; r=ng
This is an import of the Android camera code as of upstream revision 26762d0425ffd15af9ddc3ae669373668827ea00 (Dec 20, 2019). This takes just the files required to build the camera related classes. Differential Revision: https://phabricator.services.mozilla.com/D61849 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
7e7e71c48d
Коммит
a5ea832cbb
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
|
||||
public class Camera1Capturer extends CameraCapturer {
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Capturer(
|
||||
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
|
||||
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
|
||||
int framerate) {
|
||||
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
|
||||
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
|
||||
framerate);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,185 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.os.SystemClock;
|
||||
import android.support.annotation.Nullable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Camera1Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera1Enumerator";
|
||||
// Each entry contains the supported formats for corresponding camera index. The formats for all
|
||||
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
|
||||
// reference.
|
||||
private static List<List<CaptureFormat>> cachedSupportedFormats;
|
||||
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Enumerator() {
|
||||
this(true /* captureToTexture */);
|
||||
}
|
||||
|
||||
public Camera1Enumerator(boolean captureToTexture) {
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
// Returns device names that can be used to create a new VideoCapturerAndroid.
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
ArrayList<String> namesList = new ArrayList<>();
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
String name = getDeviceName(i);
|
||||
if (name != null) {
|
||||
namesList.add(name);
|
||||
Logging.d(TAG, "Index: " + i + ". " + name);
|
||||
} else {
|
||||
Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
|
||||
}
|
||||
}
|
||||
String[] namesArray = new String[namesList.size()];
|
||||
return namesList.toArray(namesArray);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName) {
|
||||
return getSupportedFormats(getCameraIndex(deviceName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
|
||||
}
|
||||
|
||||
private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
|
||||
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
try {
|
||||
android.hardware.Camera.getCameraInfo(index, info);
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getCameraInfo failed on index " + index, e);
|
||||
return null;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
|
||||
if (cachedSupportedFormats == null) {
|
||||
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
cachedSupportedFormats.add(enumerateFormats(i));
|
||||
}
|
||||
}
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
private static List<CaptureFormat> enumerateFormats(int cameraId) {
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
final android.hardware.Camera.Parameters parameters;
|
||||
android.hardware.Camera camera = null;
|
||||
try {
|
||||
Logging.d(TAG, "Opening camera with index " + cameraId);
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
parameters = camera.getParameters();
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
} finally {
|
||||
if (camera != null) {
|
||||
camera.release();
|
||||
}
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
try {
|
||||
int minFps = 0;
|
||||
int maxFps = 0;
|
||||
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
|
||||
if (listFpsRange != null) {
|
||||
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
|
||||
// corresponding to the highest fps.
|
||||
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
|
||||
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
|
||||
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
|
||||
}
|
||||
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
|
||||
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
|
||||
}
|
||||
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
|
||||
// Convert from android.hardware.Camera.Size to Size.
|
||||
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.hardware.Camera.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.width, size.height));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from int[2] to CaptureFormat.FramerateRange.
|
||||
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (int[] range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
|
||||
// if no such camera can be found.
|
||||
static int getCameraIndex(String deviceName) {
|
||||
Logging.d(TAG, "getCameraIndex: " + deviceName);
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
if (deviceName.equals(getDeviceName(i))) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("No such camera: " + deviceName);
|
||||
}
|
||||
|
||||
// Returns the name of the camera with camera index. Returns null if the
|
||||
// camera can not be used.
|
||||
static @Nullable String getDeviceName(int index) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
|
||||
if (info == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String facing =
|
||||
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
||||
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.support.annotation.Nullable;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Capturer extends CameraCapturer {
|
||||
private final Context context;
|
||||
@Nullable private final CameraManager cameraManager;
|
||||
|
||||
public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
|
||||
super(cameraName, eventsHandler, new Camera2Enumerator(context));
|
||||
|
||||
this.context = context;
|
||||
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
|
||||
int framerate) {
|
||||
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
|
||||
surfaceTextureHelper, cameraName, width, height, framerate);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,250 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.util.AndroidException;
|
||||
import android.util.Range;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera2Enumerator";
|
||||
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
||||
|
||||
// Each entry contains the supported formats for a given camera index. The formats are enumerated
|
||||
// lazily in getSupportedFormats(), and cached for future reference.
|
||||
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
|
||||
new HashMap<String, List<CaptureFormat>>();
|
||||
|
||||
final Context context;
|
||||
@Nullable final CameraManager cameraManager;
|
||||
|
||||
public Camera2Enumerator(Context context) {
|
||||
this.context = context;
|
||||
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
try {
|
||||
return cameraManager.getCameraIdList();
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return new String[] {};
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_BACK;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName) {
|
||||
return getSupportedFormats(context, deviceName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera2Capturer(context, deviceName, eventsHandler);
|
||||
}
|
||||
|
||||
private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
|
||||
try {
|
||||
return cameraManager.getCameraCharacteristics(deviceName);
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if API is supported and all cameras have better than legacy support.
|
||||
*/
|
||||
public static boolean isSupported(Context context) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
try {
|
||||
String[] cameraIds = cameraManager.getCameraIdList();
|
||||
for (String id : cameraIds) {
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
|
||||
if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
|
||||
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
|
||||
if (fpsRanges.length == 0) {
|
||||
return 1000;
|
||||
}
|
||||
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
|
||||
}
|
||||
|
||||
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
final int supportLevel =
|
||||
cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
|
||||
|
||||
final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
|
||||
final List<Size> sizes = convertSizes(nativeSizes);
|
||||
|
||||
// Video may be stretched pre LMR1 on legacy implementations.
|
||||
// Filter out formats that have different aspect ratio than the sensor array.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
|
||||
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
final Rect activeArraySize =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
|
||||
|
||||
for (Size size : sizes) {
|
||||
if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
|
||||
filteredSizes.add(size);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredSizes;
|
||||
} else {
|
||||
return sizes;
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
|
||||
return getSupportedFormats(
|
||||
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
|
||||
synchronized (cachedSupportedFormats) {
|
||||
if (cachedSupportedFormats.containsKey(cameraId)) {
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
|
||||
final CameraCharacteristics cameraCharacteristics;
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (Exception ex) {
|
||||
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
}
|
||||
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
|
||||
Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
|
||||
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
|
||||
|
||||
int defaultMaxFps = 0;
|
||||
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
|
||||
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
for (Size size : sizes) {
|
||||
long minFrameDurationNs = 0;
|
||||
try {
|
||||
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
|
||||
SurfaceTexture.class, new android.util.Size(size.width, size.height));
|
||||
} catch (Exception e) {
|
||||
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
|
||||
}
|
||||
final int maxFps = (minFrameDurationNs == 0)
|
||||
? defaultMaxFps
|
||||
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
|
||||
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
|
||||
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
|
||||
}
|
||||
|
||||
cachedSupportedFormats.put(cameraId, formatList);
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert from android.util.Size to Size.
|
||||
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.util.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.getWidth(), size.getHeight()));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
|
||||
static List<CaptureFormat.FramerateRange> convertFramerates(
|
||||
Range<Integer>[] arrayRanges, int unitFactor) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (Range<Integer> range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range.getLower() * unitFactor, range.getUpper() * unitFactor));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,206 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static java.lang.Math.abs;
|
||||
|
||||
import android.graphics.ImageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class CameraEnumerationAndroid {
|
||||
private final static String TAG = "CameraEnumerationAndroid";
|
||||
|
||||
static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
|
||||
// 0, Unknown resolution
|
||||
new Size(160, 120), // 1, QQVGA
|
||||
new Size(240, 160), // 2, HQVGA
|
||||
new Size(320, 240), // 3, QVGA
|
||||
new Size(400, 240), // 4, WQVGA
|
||||
new Size(480, 320), // 5, HVGA
|
||||
new Size(640, 360), // 6, nHD
|
||||
new Size(640, 480), // 7, VGA
|
||||
new Size(768, 480), // 8, WVGA
|
||||
new Size(854, 480), // 9, FWVGA
|
||||
new Size(800, 600), // 10, SVGA
|
||||
new Size(960, 540), // 11, qHD
|
||||
new Size(960, 640), // 12, DVGA
|
||||
new Size(1024, 576), // 13, WSVGA
|
||||
new Size(1024, 600), // 14, WVSGA
|
||||
new Size(1280, 720), // 15, HD
|
||||
new Size(1280, 1024), // 16, SXGA
|
||||
new Size(1920, 1080), // 17, Full HD
|
||||
new Size(1920, 1440), // 18, Full HD 4:3
|
||||
new Size(2560, 1440), // 19, QHD
|
||||
new Size(3840, 2160) // 20, UHD
|
||||
));
|
||||
|
||||
public static class CaptureFormat {
|
||||
// Class to represent a framerate range. The framerate varies because of lightning conditions.
|
||||
// The values are multiplied by 1000, so 1000 represents one frame per second.
|
||||
public static class FramerateRange {
|
||||
public int min;
|
||||
public int max;
|
||||
|
||||
public FramerateRange(int min, int max) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof FramerateRange)) {
|
||||
return false;
|
||||
}
|
||||
final FramerateRange otherFramerate = (FramerateRange) other;
|
||||
return min == otherFramerate.min && max == otherFramerate.max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
|
||||
return 1 + 65537 * min + max;
|
||||
}
|
||||
}
|
||||
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final FramerateRange framerate;
|
||||
|
||||
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
|
||||
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
|
||||
// all imageFormats.
|
||||
public final int imageFormat = ImageFormat.NV21;
|
||||
|
||||
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = new FramerateRange(minFramerate, maxFramerate);
|
||||
}
|
||||
|
||||
public CaptureFormat(int width, int height, FramerateRange framerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
}
|
||||
|
||||
// Calculates the frame size of this capture format.
|
||||
public int frameSize() {
|
||||
return frameSize(width, height, imageFormat);
|
||||
}
|
||||
|
||||
// Calculates the frame size of the specified image format. Currently only
|
||||
// supporting ImageFormat.NV21.
|
||||
// The size is width * height * number of bytes per pixel.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
|
||||
public static int frameSize(int width, int height, int imageFormat) {
|
||||
if (imageFormat != ImageFormat.NV21) {
|
||||
throw new UnsupportedOperationException("Don't know how to calculate "
|
||||
+ "the frame size of non-NV21 image formats.");
|
||||
}
|
||||
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return width + "x" + height + "@" + framerate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof CaptureFormat)) {
|
||||
return false;
|
||||
}
|
||||
final CaptureFormat otherFormat = (CaptureFormat) other;
|
||||
return width == otherFormat.width && height == otherFormat.height
|
||||
&& framerate.equals(otherFormat.framerate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
// Helper class for finding the closest supported format for the two functions below. It creates a
|
||||
// comparator based on the difference to some requested parameters, where the element with the
|
||||
// minimum difference is the element that is closest to the requested parameters.
|
||||
private static abstract class ClosestComparator<T> implements Comparator<T> {
|
||||
// Difference between supported and requested parameter.
|
||||
abstract int diff(T supportedParameter);
|
||||
|
||||
@Override
|
||||
public int compare(T t1, T t2) {
|
||||
return diff(t1) - diff(t2);
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
|
||||
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
|
||||
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
|
||||
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
|
||||
return Collections.min(
|
||||
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
|
||||
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
|
||||
// from requested.
|
||||
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
|
||||
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
|
||||
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
|
||||
|
||||
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
|
||||
private static final int MIN_FPS_THRESHOLD = 8000;
|
||||
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
|
||||
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
|
||||
|
||||
// Use one weight for small |value| less than |threshold|, and another weight above.
|
||||
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
|
||||
return (value < threshold) ? value * lowWeight
|
||||
: threshold * lowWeight + (value - threshold) * highWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
int diff(CaptureFormat.FramerateRange range) {
|
||||
final int minFpsError = progressivePenalty(
|
||||
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
|
||||
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
|
||||
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
|
||||
return minFpsError + maxFpsError;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static Size getClosestSupportedSize(
|
||||
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
|
||||
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
|
||||
@Override
|
||||
int diff(Size size) {
|
||||
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper method for camera classes.
|
||||
static void reportCameraResolution(Histogram histogram, Size resolution) {
|
||||
int index = COMMON_RESOLUTIONS.indexOf(resolution);
|
||||
// 0 is reserved for unknown resolution, so add 1.
|
||||
// indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
|
||||
histogram.addSample(index + 1);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface CameraEnumerator {
|
||||
public String[] getDeviceNames();
|
||||
public boolean isFrontFacing(String deviceName);
|
||||
public boolean isBackFacing(String deviceName);
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName);
|
||||
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
|
||||
}
|
|
@ -0,0 +1,166 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
/**
|
||||
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
|
||||
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
|
||||
* class for detecting camera freezes.
|
||||
*/
|
||||
public interface CameraVideoCapturer extends VideoCapturer {
|
||||
/**
|
||||
* Camera events handler - can be used to be notifed about camera events. The callbacks are
|
||||
* executed from an arbitrary thread.
|
||||
*/
|
||||
public interface CameraEventsHandler {
|
||||
// Camera error handler - invoked when camera can not be opened
|
||||
// or any camera exception happens on camera thread.
|
||||
void onCameraError(String errorDescription);
|
||||
|
||||
// Called when camera is disconnected.
|
||||
void onCameraDisconnected();
|
||||
|
||||
// Invoked when camera stops receiving frames.
|
||||
void onCameraFreezed(String errorDescription);
|
||||
|
||||
// Callback invoked when camera is opening.
|
||||
void onCameraOpening(String cameraName);
|
||||
|
||||
// Callback invoked when first camera frame is available after camera is started.
|
||||
void onFirstFrameAvailable();
|
||||
|
||||
// Callback invoked when camera is closed.
|
||||
void onCameraClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
|
||||
* The callback may be called on an arbitrary thread.
|
||||
*/
|
||||
public interface CameraSwitchHandler {
|
||||
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
|
||||
void onCameraSwitchDone(boolean isFrontCamera);
|
||||
|
||||
// Invoked on failure, e.g. camera is stopped or only one camera available.
|
||||
void onCameraSwitchError(String errorDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch camera to the next valid camera id. This can only be called while the camera is running.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
void switchCamera(CameraSwitchHandler switchEventsHandler);
|
||||
|
||||
/**
|
||||
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
|
||||
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
|
||||
* The callback may be called on an arbitrary thread.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MediaRecorderHandler {
|
||||
// Invoked on success.
|
||||
void onMediaRecorderSuccess();
|
||||
|
||||
// Invoked on failure, e.g. camera is stopped or any exception happens.
|
||||
void onMediaRecorderError(String errorDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
|
||||
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
@Deprecated
|
||||
default void addMediaRecorderToCamera(
|
||||
MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
|
||||
throw new UnsupportedOperationException("Deprecated and not implemented.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
@Deprecated
|
||||
default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
|
||||
throw new UnsupportedOperationException("Deprecated and not implemented.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
|
||||
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
|
||||
* thread.
|
||||
*/
|
||||
public static class CameraStatistics {
|
||||
private final static String TAG = "CameraStatistics";
|
||||
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
|
||||
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
|
||||
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private int frameCount;
|
||||
private int freezePeriodCount;
|
||||
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
|
||||
private final Runnable cameraObserver = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
|
||||
if (frameCount == 0) {
|
||||
++freezePeriodCount;
|
||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||
&& eventsHandler != null) {
|
||||
Logging.e(TAG, "Camera freezed.");
|
||||
if (surfaceTextureHelper.isTextureInUse()) {
|
||||
// This can only happen if we are capturing to textures.
|
||||
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
||||
} else {
|
||||
eventsHandler.onCameraFreezed("Camera failure.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
freezePeriodCount = 0;
|
||||
}
|
||||
frameCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
};
|
||||
|
||||
public CameraStatistics(
|
||||
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new IllegalArgumentException("SurfaceTextureHelper is null");
|
||||
}
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.frameCount = 0;
|
||||
this.freezePeriodCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
|
||||
private void checkThread() {
|
||||
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
|
||||
public void addFrame() {
|
||||
checkThread();
|
||||
++frameCount;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Interface for observering a capturer. Passed to {@link VideoCapturer#initialize}. Provided by
|
||||
* {@link VideoSource#getCapturerObserver}.
|
||||
*
|
||||
* All callbacks must be executed on a single thread.
|
||||
*/
|
||||
public interface CapturerObserver {
|
||||
/** Notify if the capturer have been started successfully or not. */
|
||||
void onCapturerStarted(boolean success);
|
||||
/** Notify that the capturer has been stopped. */
|
||||
void onCapturerStopped();
|
||||
|
||||
/** Delivers a captured frame. */
|
||||
void onFrameCaptured(VideoFrame frame);
|
||||
}
|
|
@ -0,0 +1,257 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.view.Surface;
|
||||
import java.util.ArrayList;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
public interface EglBase {
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public interface Context {
|
||||
public final static long NO_CONTEXT = 0;
|
||||
|
||||
/**
|
||||
* Returns an EGL context that can be used by native code. Returns NO_CONTEXT if the method is
|
||||
* unsupported.
|
||||
*
|
||||
* @note This is currently only supported for EGL 1.4 and not for EGL 1.0.
|
||||
*/
|
||||
long getNativeEglContext();
|
||||
}
|
||||
|
||||
// According to the documentation, EGL can be used from multiple threads at the same time if each
|
||||
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
|
||||
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
|
||||
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
public static final Object lock = new Object();
|
||||
|
||||
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
|
||||
// This is similar to how GlSurfaceView does:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
|
||||
public static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
public static final int EGL_OPENGL_ES3_BIT = 0x40;
|
||||
// Android-specific extension.
|
||||
public static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
public static ConfigBuilder configBuilder() {
|
||||
return new ConfigBuilder();
|
||||
}
|
||||
|
||||
public static class ConfigBuilder {
|
||||
private int openGlesVersion = 2;
|
||||
private boolean hasAlphaChannel;
|
||||
private boolean supportsPixelBuffer;
|
||||
private boolean isRecordable;
|
||||
|
||||
public ConfigBuilder setOpenGlesVersion(int version) {
|
||||
if (version < 1 || version > 3) {
|
||||
throw new IllegalArgumentException("OpenGL ES version " + version + " not supported");
|
||||
}
|
||||
this.openGlesVersion = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConfigBuilder setHasAlphaChannel(boolean hasAlphaChannel) {
|
||||
this.hasAlphaChannel = hasAlphaChannel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConfigBuilder setSupportsPixelBuffer(boolean supportsPixelBuffer) {
|
||||
this.supportsPixelBuffer = supportsPixelBuffer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ConfigBuilder setIsRecordable(boolean isRecordable) {
|
||||
this.isRecordable = isRecordable;
|
||||
return this;
|
||||
}
|
||||
|
||||
public int[] createConfigAttributes() {
|
||||
ArrayList<Integer> list = new ArrayList<>();
|
||||
list.add(EGL10.EGL_RED_SIZE);
|
||||
list.add(8);
|
||||
list.add(EGL10.EGL_GREEN_SIZE);
|
||||
list.add(8);
|
||||
list.add(EGL10.EGL_BLUE_SIZE);
|
||||
list.add(8);
|
||||
if (hasAlphaChannel) {
|
||||
list.add(EGL10.EGL_ALPHA_SIZE);
|
||||
list.add(8);
|
||||
}
|
||||
if (openGlesVersion == 2 || openGlesVersion == 3) {
|
||||
list.add(EGL10.EGL_RENDERABLE_TYPE);
|
||||
list.add(openGlesVersion == 3 ? EGL_OPENGL_ES3_BIT : EGL_OPENGL_ES2_BIT);
|
||||
}
|
||||
if (supportsPixelBuffer) {
|
||||
list.add(EGL10.EGL_SURFACE_TYPE);
|
||||
list.add(EGL10.EGL_PBUFFER_BIT);
|
||||
}
|
||||
if (isRecordable) {
|
||||
list.add(EGL_RECORDABLE_ANDROID);
|
||||
list.add(1);
|
||||
}
|
||||
list.add(EGL10.EGL_NONE);
|
||||
|
||||
final int[] res = new int[list.size()];
|
||||
for (int i = 0; i < list.size(); ++i) {
|
||||
res[i] = list.get(i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
public static final int[] CONFIG_PLAIN = configBuilder().createConfigAttributes();
|
||||
public static final int[] CONFIG_RGBA =
|
||||
configBuilder().setHasAlphaChannel(true).createConfigAttributes();
|
||||
public static final int[] CONFIG_PIXEL_BUFFER =
|
||||
configBuilder().setSupportsPixelBuffer(true).createConfigAttributes();
|
||||
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = configBuilder()
|
||||
.setHasAlphaChannel(true)
|
||||
.setSupportsPixelBuffer(true)
|
||||
.createConfigAttributes();
|
||||
public static final int[] CONFIG_RECORDABLE =
|
||||
configBuilder().setIsRecordable(true).createConfigAttributes();
|
||||
|
||||
static int getOpenGlesVersionFromConfig(int[] configAttributes) {
|
||||
for (int i = 0; i < configAttributes.length - 1; ++i) {
|
||||
if (configAttributes[i] == EGL10.EGL_RENDERABLE_TYPE) {
|
||||
switch (configAttributes[i + 1]) {
|
||||
case EGL_OPENGL_ES2_BIT:
|
||||
return 2;
|
||||
case EGL_OPENGL_ES3_BIT:
|
||||
return 3;
|
||||
default:
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Default to V1 if no renderable type is specified.
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new context with the specified config attributes, sharing data with |sharedContext|.
|
||||
* If |sharedContext| is null, a root context is created. This function will try to create an EGL
|
||||
* 1.4 context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
|
||||
if (sharedContext == null) {
|
||||
return EglBase14Impl.isEGL14Supported() ? createEgl14(configAttributes)
|
||||
: createEgl10(configAttributes);
|
||||
} else if (sharedContext instanceof EglBase14.Context) {
|
||||
return createEgl14((EglBase14.Context) sharedContext, configAttributes);
|
||||
} else if (sharedContext instanceof EglBase10.Context) {
|
||||
return createEgl10((EglBase10.Context) sharedContext, configAttributes);
|
||||
}
|
||||
throw new IllegalArgumentException("Unrecognized Context");
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for creating a plain root context. This function will try to create an EGL 1.4
|
||||
* context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create() {
|
||||
return create(null /* shaderContext */, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for creating a plain context, sharing data with |sharedContext|. This function
|
||||
* will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create(Context sharedContext) {
|
||||
return create(sharedContext, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
/** Explicitly create a root EGl 1.0 context with the specified config attributes. */
|
||||
public static EglBase10 createEgl10(int[] configAttributes) {
|
||||
return new EglBase10Impl(/* sharedContext= */ null, configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.0 context with the specified config attributes and shared
|
||||
* context.
|
||||
*/
|
||||
public static EglBase10 createEgl10(EglBase10.Context sharedContext, int[] configAttributes) {
|
||||
return new EglBase10Impl(
|
||||
sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.0 context with the specified config attributes
|
||||
* and shared context.
|
||||
*/
|
||||
public static EglBase10 createEgl10(
|
||||
javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
|
||||
return new EglBase10Impl(sharedContext, configAttributes);
|
||||
}
|
||||
|
||||
/** Explicitly create a root EGl 1.4 context with the specified config attributes. */
|
||||
public static EglBase14 createEgl14(int[] configAttributes) {
|
||||
return new EglBase14Impl(/* sharedContext= */ null, configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.4 context with the specified config attributes and shared
|
||||
* context.
|
||||
*/
|
||||
public static EglBase14 createEgl14(EglBase14.Context sharedContext, int[] configAttributes) {
|
||||
return new EglBase14Impl(
|
||||
sharedContext == null ? null : sharedContext.getRawContext(), configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.4 context with the specified config attributes
|
||||
* and shared context.
|
||||
*/
|
||||
public static EglBase14 createEgl14(
|
||||
android.opengl.EGLContext sharedContext, int[] configAttributes) {
|
||||
return new EglBase14Impl(sharedContext, configAttributes);
|
||||
}
|
||||
|
||||
void createSurface(Surface surface);
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
void createSurface(SurfaceTexture surfaceTexture);
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
void createDummyPbufferSurface();
|
||||
|
||||
void createPbufferSurface(int width, int height);
|
||||
|
||||
Context getEglBaseContext();
|
||||
|
||||
boolean hasSurface();
|
||||
|
||||
int surfaceWidth();
|
||||
|
||||
int surfaceHeight();
|
||||
|
||||
void releaseSurface();
|
||||
|
||||
void release();
|
||||
|
||||
void makeCurrent();
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
void detachCurrent();
|
||||
|
||||
void swapBuffers();
|
||||
|
||||
void swapBuffers(long presentationTimeStampNs);
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
/** EGL 1.0 implementation of EglBase. */
|
||||
public interface EglBase10 extends EglBase {
|
||||
interface Context extends EglBase.Context {
|
||||
EGLContext getRawContext();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.EGLContext;
|
||||
|
||||
/** EGL 1.4 implementation of EglBase. */
|
||||
public interface EglBase14 extends EglBase {
|
||||
interface Context extends EglBase.Context {
|
||||
EGLContext getRawContext();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
// Helper class for handling OpenGL shaders and shader programs.
|
||||
public class GlShader {
|
||||
private static final String TAG = "GlShader";
|
||||
|
||||
private static int compileShader(int shaderType, String source) {
|
||||
final int shader = GLES20.glCreateShader(shaderType);
|
||||
if (shader == 0) {
|
||||
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compileStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
|
||||
if (compileStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(
|
||||
TAG, "Compile error " + GLES20.glGetShaderInfoLog(shader) + " in shader:\n" + source);
|
||||
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("compileShader");
|
||||
return shader;
|
||||
}
|
||||
|
||||
private int program;
|
||||
|
||||
public GlShader(String vertexSource, String fragmentSource) {
|
||||
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
program = GLES20.glCreateProgram();
|
||||
if (program == 0) {
|
||||
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
GLES20.glAttachShader(program, fragmentShader);
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
|
||||
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
|
||||
}
|
||||
// According to the documentation of glLinkProgram():
|
||||
// "After the link operation, applications are free to modify attached shader objects, compile
|
||||
// attached shader objects, detach shader objects, delete shader objects, and attach additional
|
||||
// shader objects. None of these operations affects the information log or the program that is
|
||||
// part of the program object."
|
||||
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
|
||||
// shaders are fine however - it will delete them when they are no longer attached to a program.
|
||||
GLES20.glDeleteShader(vertexShader);
|
||||
GLES20.glDeleteShader(fragmentShader);
|
||||
GlUtil.checkNoGLES2Error("Creating GlShader");
|
||||
}
|
||||
|
||||
public int getAttribLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetAttribLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
|
||||
* |buffer| with |dimension| number of components per vertex.
|
||||
*/
|
||||
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
|
||||
setVertexAttribArray(label, dimension, 0 /* stride */, buffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
|
||||
* |buffer| with |dimension| number of components per vertex and specified |stride|.
|
||||
*/
|
||||
public void setVertexAttribArray(String label, int dimension, int stride, FloatBuffer buffer) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = getAttribLocation(label);
|
||||
GLES20.glEnableVertexAttribArray(location);
|
||||
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, stride, buffer);
|
||||
GlUtil.checkNoGLES2Error("setVertexAttribArray");
|
||||
}
|
||||
|
||||
public int getUniformLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetUniformLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
public void useProgram() {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
GLES20.glUseProgram(program);
|
||||
GlUtil.checkNoGLES2Error("glUseProgram");
|
||||
}
|
||||
|
||||
public void release() {
|
||||
Logging.d(TAG, "Deleting shader.");
|
||||
// Delete program, automatically detaching any shaders from it.
|
||||
if (program != -1) {
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = -1;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
|
||||
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
|
||||
* conversion. This class is not thread safe and must be used by a thread with an active GL context.
|
||||
*/
|
||||
// TODO(magjed): Add unittests for this class.
|
||||
public class GlTextureFrameBuffer {
|
||||
private final int pixelFormat;
|
||||
private int frameBufferId;
|
||||
private int textureId;
|
||||
private int width;
|
||||
private int height;
|
||||
|
||||
/**
|
||||
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
|
||||
* when calling this function. The framebuffer is not complete until setSize() is called.
|
||||
*/
|
||||
public GlTextureFrameBuffer(int pixelFormat) {
|
||||
switch (pixelFormat) {
|
||||
case GLES20.GL_LUMINANCE:
|
||||
case GLES20.GL_RGB:
|
||||
case GLES20.GL_RGBA:
|
||||
this.pixelFormat = pixelFormat;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
|
||||
}
|
||||
this.width = 0;
|
||||
this.height = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
|
||||
* EGLContext must be bound on the current thread when calling this function. Must be called at
|
||||
* least once before using the framebuffer. May be called multiple times to change size.
|
||||
*/
|
||||
public void setSize(int width, int height) {
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
|
||||
}
|
||||
if (width == this.width && height == this.height) {
|
||||
return;
|
||||
}
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
// Lazy allocation the first time setSize() is called.
|
||||
if (textureId == 0) {
|
||||
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
if (frameBufferId == 0) {
|
||||
final int frameBuffers[] = new int[1];
|
||||
GLES20.glGenFramebuffers(1, frameBuffers, 0);
|
||||
frameBufferId = frameBuffers[0];
|
||||
}
|
||||
|
||||
// Allocate texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
|
||||
GLES20.GL_UNSIGNED_BYTE, null);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
|
||||
|
||||
// Attach the texture to the framebuffer as color attachment.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
|
||||
GLES20.glFramebufferTexture2D(
|
||||
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
|
||||
|
||||
// Check that the framebuffer is in a good state.
|
||||
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
|
||||
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
|
||||
throw new IllegalStateException("Framebuffer not complete, status: " + status);
|
||||
}
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
/** Gets the OpenGL frame buffer id. This value is only valid after setSize() has been called. */
|
||||
public int getFrameBufferId() {
|
||||
return frameBufferId;
|
||||
}
|
||||
|
||||
/** Gets the OpenGL texture id. This value is only valid after setSize() has been called. */
|
||||
public int getTextureId() {
|
||||
return textureId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
|
||||
* this function. This object should not be used after this call.
|
||||
*/
|
||||
public void release() {
|
||||
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
|
||||
textureId = 0;
|
||||
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
|
||||
frameBufferId = 0;
|
||||
width = 0;
|
||||
height = 0;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
/**
|
||||
* Some OpenGL static utility functions.
|
||||
*/
|
||||
public class GlUtil {
|
||||
private GlUtil() {}
|
||||
|
||||
public static class GlOutOfMemoryException extends RuntimeException {
|
||||
public GlOutOfMemoryException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Assert that no OpenGL ES 2.0 error has been raised.
|
||||
public static void checkNoGLES2Error(String msg) {
|
||||
int error = GLES20.glGetError();
|
||||
if (error != GLES20.GL_NO_ERROR) {
|
||||
throw error == GLES20.GL_OUT_OF_MEMORY
|
||||
? new GlOutOfMemoryException(msg)
|
||||
: new RuntimeException(msg + ": GLES20 error: " + error);
|
||||
}
|
||||
}
|
||||
|
||||
public static FloatBuffer createFloatBuffer(float[] coords) {
|
||||
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
|
||||
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
|
||||
bb.order(ByteOrder.nativeOrder());
|
||||
FloatBuffer fb = bb.asFloatBuffer();
|
||||
fb.put(coords);
|
||||
fb.position(0);
|
||||
return fb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate texture with standard parameters.
|
||||
*/
|
||||
public static int generateTexture(int target) {
|
||||
final int textureArray[] = new int[1];
|
||||
GLES20.glGenTextures(1, textureArray, 0);
|
||||
final int textureId = textureArray[0];
|
||||
GLES20.glBindTexture(target, textureId);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkNoGLES2Error("generateTexture");
|
||||
return textureId;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,200 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.support.annotation.Nullable;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.webrtc.VideoFrame.I420Buffer;
|
||||
|
||||
/** Implementation of VideoFrame.I420Buffer backed by Java direct byte buffers. */
|
||||
public class JavaI420Buffer implements VideoFrame.I420Buffer {
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final ByteBuffer dataY;
|
||||
private final ByteBuffer dataU;
|
||||
private final ByteBuffer dataV;
|
||||
private final int strideY;
|
||||
private final int strideU;
|
||||
private final int strideV;
|
||||
private final RefCountDelegate refCountDelegate;
|
||||
|
||||
private JavaI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
|
||||
int strideU, ByteBuffer dataV, int strideV, @Nullable Runnable releaseCallback) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.dataY = dataY;
|
||||
this.dataU = dataU;
|
||||
this.dataV = dataV;
|
||||
this.strideY = strideY;
|
||||
this.strideU = strideU;
|
||||
this.strideV = strideV;
|
||||
this.refCountDelegate = new RefCountDelegate(releaseCallback);
|
||||
}
|
||||
|
||||
private static void checkCapacity(ByteBuffer data, int width, int height, int stride) {
|
||||
// The last row does not necessarily need padding.
|
||||
final int minCapacity = stride * (height - 1) + width;
|
||||
if (data.capacity() < minCapacity) {
|
||||
throw new IllegalArgumentException(
|
||||
"Buffer must be at least " + minCapacity + " bytes, but was " + data.capacity());
|
||||
}
|
||||
}
|
||||
|
||||
/** Wraps existing ByteBuffers into JavaI420Buffer object without copying the contents. */
|
||||
public static JavaI420Buffer wrap(int width, int height, ByteBuffer dataY, int strideY,
|
||||
ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV,
|
||||
@Nullable Runnable releaseCallback) {
|
||||
if (dataY == null || dataU == null || dataV == null) {
|
||||
throw new IllegalArgumentException("Data buffers cannot be null.");
|
||||
}
|
||||
if (!dataY.isDirect() || !dataU.isDirect() || !dataV.isDirect()) {
|
||||
throw new IllegalArgumentException("Data buffers must be direct byte buffers.");
|
||||
}
|
||||
|
||||
// Slice the buffers to prevent external modifications to the position / limit of the buffer.
|
||||
// Note that this doesn't protect the contents of the buffers from modifications.
|
||||
dataY = dataY.slice();
|
||||
dataU = dataU.slice();
|
||||
dataV = dataV.slice();
|
||||
|
||||
final int chromaWidth = (width + 1) / 2;
|
||||
final int chromaHeight = (height + 1) / 2;
|
||||
checkCapacity(dataY, width, height, strideY);
|
||||
checkCapacity(dataU, chromaWidth, chromaHeight, strideU);
|
||||
checkCapacity(dataV, chromaWidth, chromaHeight, strideV);
|
||||
|
||||
return new JavaI420Buffer(
|
||||
width, height, dataY, strideY, dataU, strideU, dataV, strideV, releaseCallback);
|
||||
}
|
||||
|
||||
/** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
|
||||
public static JavaI420Buffer allocate(int width, int height) {
|
||||
int chromaHeight = (height + 1) / 2;
|
||||
int strideUV = (width + 1) / 2;
|
||||
int yPos = 0;
|
||||
int uPos = yPos + width * height;
|
||||
int vPos = uPos + strideUV * chromaHeight;
|
||||
|
||||
ByteBuffer buffer =
|
||||
JniCommon.nativeAllocateByteBuffer(width * height + 2 * strideUV * chromaHeight);
|
||||
|
||||
buffer.position(yPos);
|
||||
buffer.limit(uPos);
|
||||
ByteBuffer dataY = buffer.slice();
|
||||
|
||||
buffer.position(uPos);
|
||||
buffer.limit(vPos);
|
||||
ByteBuffer dataU = buffer.slice();
|
||||
|
||||
buffer.position(vPos);
|
||||
buffer.limit(vPos + strideUV * chromaHeight);
|
||||
ByteBuffer dataV = buffer.slice();
|
||||
|
||||
return new JavaI420Buffer(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
|
||||
() -> { JniCommon.nativeFreeByteBuffer(buffer); });
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getDataY() {
|
||||
// Return a slice to prevent relative reads from changing the position.
|
||||
return dataY.slice();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getDataU() {
|
||||
// Return a slice to prevent relative reads from changing the position.
|
||||
return dataU.slice();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getDataV() {
|
||||
// Return a slice to prevent relative reads from changing the position.
|
||||
return dataV.slice();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStrideY() {
|
||||
return strideY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStrideU() {
|
||||
return strideU;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStrideV() {
|
||||
return strideV;
|
||||
}
|
||||
|
||||
@Override
|
||||
public I420Buffer toI420() {
|
||||
retain();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
refCountDelegate.retain();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
refCountDelegate.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
return cropAndScaleI420(this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
|
||||
}
|
||||
|
||||
public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
|
||||
int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
|
||||
// No scaling.
|
||||
ByteBuffer dataY = buffer.getDataY();
|
||||
ByteBuffer dataU = buffer.getDataU();
|
||||
ByteBuffer dataV = buffer.getDataV();
|
||||
|
||||
dataY.position(cropX + cropY * buffer.getStrideY());
|
||||
dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
|
||||
dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
|
||||
|
||||
buffer.retain();
|
||||
return JavaI420Buffer.wrap(scaleWidth, scaleHeight, dataY.slice(), buffer.getStrideY(),
|
||||
dataU.slice(), buffer.getStrideU(), dataV.slice(), buffer.getStrideV(), buffer::release);
|
||||
}
|
||||
|
||||
JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
|
||||
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
|
||||
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
|
||||
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
|
||||
scaleHeight);
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
|
||||
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
|
||||
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Interface for ref counted objects in WebRTC. These objects have significant resources that need
|
||||
* to be freed when they are no longer in use. Each objects starts with ref count of one when
|
||||
* created. If a reference is passed as a parameter to a method, the caller has ownesrship of the
|
||||
* object by default - calling release is not necessary unless retain is called.
|
||||
*/
|
||||
public interface RefCounted {
|
||||
/** Increases ref count by one. */
|
||||
@CalledByNative void retain();
|
||||
|
||||
/**
|
||||
* Decreases ref count by one. When the ref count reaches zero, resources related to the object
|
||||
* will be freed.
|
||||
*/
|
||||
@CalledByNative void release();
|
||||
}
|
|
@ -0,0 +1,259 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.opengl.Matrix;
|
||||
import android.view.View;
|
||||
|
||||
/**
|
||||
* Static helper functions for renderer implementations.
|
||||
*/
|
||||
public class RendererCommon {
|
||||
/** Interface for reporting rendering events. */
|
||||
public static interface RendererEvents {
|
||||
/**
|
||||
* Callback fired once first frame is rendered.
|
||||
*/
|
||||
public void onFirstFrameRendered();
|
||||
|
||||
/**
|
||||
* Callback fired when rendered frame resolution or rotation has changed.
|
||||
*/
|
||||
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for rendering frames on an EGLSurface with specified viewport location. Rotation,
|
||||
* mirror, and cropping is specified using a 4x4 texture coordinate transform matrix. The frame
|
||||
* input can either be an OES texture, RGB texture, or YUV textures in I420 format. The function
|
||||
* release() must be called manually to free the resources held by this object.
|
||||
*/
|
||||
public static interface GlDrawer {
|
||||
/**
|
||||
* Functions for drawing frames with different sources. The rendering surface target is
|
||||
* implied by the current EGL context of the calling thread and requires no explicit argument.
|
||||
* The coordinates specify the viewport location on the surface target.
|
||||
*/
|
||||
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
|
||||
int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
|
||||
/**
|
||||
* Release all GL resources. This needs to be done manually, otherwise resources may leak.
|
||||
*/
|
||||
void release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class for determining layout size based on layout requirements, scaling type, and video
|
||||
* aspect ratio.
|
||||
*/
|
||||
public static class VideoLayoutMeasure {
|
||||
// The scaling type determines how the video will fill the allowed layout area in measure(). It
|
||||
// can be specified separately for the case when video has matched orientation with layout size
|
||||
// and when there is an orientation mismatch.
|
||||
private float visibleFractionMatchOrientation =
|
||||
convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
|
||||
private float visibleFractionMismatchOrientation =
|
||||
convertScalingTypeToVisibleFraction(ScalingType.SCALE_ASPECT_BALANCED);
|
||||
|
||||
public void setScalingType(ScalingType scalingType) {
|
||||
setScalingType(/* scalingTypeMatchOrientation= */ scalingType,
|
||||
/* scalingTypeMismatchOrientation= */ scalingType);
|
||||
}
|
||||
|
||||
public void setScalingType(
|
||||
ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
|
||||
this.visibleFractionMatchOrientation =
|
||||
convertScalingTypeToVisibleFraction(scalingTypeMatchOrientation);
|
||||
this.visibleFractionMismatchOrientation =
|
||||
convertScalingTypeToVisibleFraction(scalingTypeMismatchOrientation);
|
||||
}
|
||||
|
||||
public void setVisibleFraction(
|
||||
float visibleFractionMatchOrientation, float visibleFractionMismatchOrientation) {
|
||||
this.visibleFractionMatchOrientation = visibleFractionMatchOrientation;
|
||||
this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
|
||||
}
|
||||
|
||||
public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
||||
// Calculate max allowed layout size.
|
||||
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
||||
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
||||
if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
|
||||
return new Point(maxWidth, maxHeight);
|
||||
}
|
||||
// Calculate desired display size based on scaling type, video aspect ratio,
|
||||
// and maximum layout size.
|
||||
final float frameAspect = frameWidth / (float) frameHeight;
|
||||
final float displayAspect = maxWidth / (float) maxHeight;
|
||||
final float visibleFraction = (frameAspect > 1.0f) == (displayAspect > 1.0f)
|
||||
? visibleFractionMatchOrientation
|
||||
: visibleFractionMismatchOrientation;
|
||||
final Point layoutSize = getDisplaySize(visibleFraction, frameAspect, maxWidth, maxHeight);
|
||||
|
||||
// If the measure specification is forcing a specific size - yield.
|
||||
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
|
||||
layoutSize.x = maxWidth;
|
||||
}
|
||||
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
|
||||
layoutSize.y = maxHeight;
|
||||
}
|
||||
return layoutSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Types of video scaling:
|
||||
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
|
||||
// maintaining the aspect ratio (black borders may be displayed).
|
||||
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
|
||||
// maintaining the aspect ratio. Some portion of the video frame may be
|
||||
// clipped.
|
||||
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
|
||||
// possible of the view while maintaining aspect ratio, under the constraint that at least
|
||||
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
|
||||
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
|
||||
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
|
||||
// This limits excessive cropping when adjusting display size.
|
||||
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
|
||||
|
||||
/**
|
||||
* Returns layout transformation matrix that applies an optional mirror effect and compensates
|
||||
* for video vs display aspect ratio.
|
||||
*/
|
||||
public static float[] getLayoutMatrix(
|
||||
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
|
||||
float scaleX = 1;
|
||||
float scaleY = 1;
|
||||
// Scale X or Y dimension so that video and display size have same aspect ratio.
|
||||
if (displayAspectRatio > videoAspectRatio) {
|
||||
scaleY = videoAspectRatio / displayAspectRatio;
|
||||
} else {
|
||||
scaleX = displayAspectRatio / videoAspectRatio;
|
||||
}
|
||||
// Apply optional horizontal flip.
|
||||
if (mirror) {
|
||||
scaleX *= -1;
|
||||
}
|
||||
final float matrix[] = new float[16];
|
||||
Matrix.setIdentityM(matrix, 0);
|
||||
Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
|
||||
adjustOrigin(matrix);
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/** Converts a float[16] matrix array to android.graphics.Matrix. */
|
||||
public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
|
||||
// clang-format off
|
||||
float[] values = {
|
||||
matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
|
||||
matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
|
||||
matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
android.graphics.Matrix matrix = new android.graphics.Matrix();
|
||||
matrix.setValues(values);
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/** Converts android.graphics.Matrix to a float[16] matrix array. */
|
||||
public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
|
||||
float[] values = new float[9];
|
||||
matrix.getValues(values);
|
||||
|
||||
// The android.graphics.Matrix looks like this:
|
||||
// [x1 y1 w1]
|
||||
// [x2 y2 w2]
|
||||
// [x3 y3 w3]
|
||||
// We want to contruct a matrix that looks like this:
|
||||
// [x1 y1 0 w1]
|
||||
// [x2 y2 0 w2]
|
||||
// [ 0 0 1 0]
|
||||
// [x3 y3 0 w3]
|
||||
// Since it is stored in column-major order, it looks like this:
|
||||
// [x1 x2 0 x3
|
||||
// y1 y2 0 y3
|
||||
// 0 0 1 0
|
||||
// w1 w2 0 w3]
|
||||
// clang-format off
|
||||
float[] matrix4x4 = {
|
||||
values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
|
||||
values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
|
||||
0, 0, 1, 0,
|
||||
values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
|
||||
};
|
||||
// clang-format on
|
||||
return matrix4x4;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
|
||||
*/
|
||||
public static Point getDisplaySize(
|
||||
ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
|
||||
maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
|
||||
* that are in the range 0 to 1.
|
||||
*/
|
||||
private static void adjustOrigin(float[] matrix) {
|
||||
// Note that OpenGL is using column-major order.
|
||||
// Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
|
||||
matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
|
||||
matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
|
||||
// Post translate with 0.5 to move coordinates to range [0, 1].
|
||||
matrix[12] += 0.5f;
|
||||
matrix[13] += 0.5f;
|
||||
}
|
||||
|
||||
/**
|
||||
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
|
||||
* that must remain visible.
|
||||
*/
|
||||
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
|
||||
switch (scalingType) {
|
||||
case SCALE_ASPECT_FIT:
|
||||
return 1.0f;
|
||||
case SCALE_ASPECT_FILL:
|
||||
return 0.0f;
|
||||
case SCALE_ASPECT_BALANCED:
|
||||
return BALANCED_VISIBLE_FRACTION;
|
||||
default:
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on minimum fraction of the video that must remain visible,
|
||||
* video aspect ratio, and maximum display size.
|
||||
*/
|
||||
public static Point getDisplaySize(
|
||||
float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
// If there is no constraint on the amount of cropping, fill the allowed display area.
|
||||
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
|
||||
return new Point(maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
// Each dimension is constrained on max display size and how much we are allowed to crop.
|
||||
final int width = Math.min(
|
||||
maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
||||
final int height = Math.min(
|
||||
maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
||||
return new Point(width, height);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,389 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.support.annotation.Nullable;
|
||||
import java.util.concurrent.Callable;
|
||||
import org.webrtc.EglBase.Context;
|
||||
import org.webrtc.TextureBufferImpl.RefCountMonitor;
|
||||
import org.webrtc.VideoFrame.TextureBuffer;
|
||||
|
||||
/**
|
||||
* Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC
|
||||
* VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only
|
||||
* one texture frame can be in flight at once, so the frame must be released in order to receive a
|
||||
* new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all
|
||||
* resources once the texture frame is released.
|
||||
*/
|
||||
public class SurfaceTextureHelper {
|
||||
/**
|
||||
* Interface for monitoring texture buffers created from this SurfaceTexture. Since only one
|
||||
* texture buffer can exist at a time, this can be used to monitor for stuck frames.
|
||||
*/
|
||||
public interface FrameRefMonitor {
|
||||
/** A new frame was created. New frames start with ref count of 1. */
|
||||
void onNewBuffer(TextureBuffer textureBuffer);
|
||||
/** Ref count of the frame was incremented by the calling thread. */
|
||||
void onRetainBuffer(TextureBuffer textureBuffer);
|
||||
/** Ref count of the frame was decremented by the calling thread. */
|
||||
void onReleaseBuffer(TextureBuffer textureBuffer);
|
||||
/** Frame was destroyed (ref count reached 0). */
|
||||
void onDestroyBuffer(TextureBuffer textureBuffer);
|
||||
}
|
||||
|
||||
private static final String TAG = "SurfaceTextureHelper";
|
||||
/**
|
||||
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
|
||||
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
|
||||
* initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
|
||||
* timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
|
||||
* rtc::TimeNanos() there is no need for aligning timestamps again in
|
||||
* PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
|
||||
* closer to actual creation time.
|
||||
*/
|
||||
public static SurfaceTextureHelper create(final String threadName,
|
||||
final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter,
|
||||
FrameRefMonitor frameRefMonitor) {
|
||||
final HandlerThread thread = new HandlerThread(threadName);
|
||||
thread.start();
|
||||
final Handler handler = new Handler(thread.getLooper());
|
||||
|
||||
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
|
||||
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
|
||||
// is constructed on the |handler| thread.
|
||||
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
|
||||
@Nullable
|
||||
@Override
|
||||
public SurfaceTextureHelper call() {
|
||||
try {
|
||||
return new SurfaceTextureHelper(
|
||||
sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor);
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, threadName + " create failure", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter.
|
||||
*
|
||||
* @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
|
||||
*/
|
||||
public static SurfaceTextureHelper create(
|
||||
final String threadName, final EglBase.Context sharedContext) {
|
||||
return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(),
|
||||
/*frameRefMonitor=*/null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as above with yuvConverter set to new YuvConverter.
|
||||
*
|
||||
* @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
|
||||
*/
|
||||
public static SurfaceTextureHelper create(
|
||||
final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
|
||||
return create(
|
||||
threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SurfaceTextureHelper without frame ref monitor.
|
||||
*
|
||||
* @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor)
|
||||
*/
|
||||
public static SurfaceTextureHelper create(final String threadName,
|
||||
final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) {
|
||||
return create(
|
||||
threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null);
|
||||
}
|
||||
|
||||
private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() {
|
||||
@Override
|
||||
public void onRetain(TextureBufferImpl textureBuffer) {
|
||||
if (frameRefMonitor != null) {
|
||||
frameRefMonitor.onRetainBuffer(textureBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRelease(TextureBufferImpl textureBuffer) {
|
||||
if (frameRefMonitor != null) {
|
||||
frameRefMonitor.onReleaseBuffer(textureBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDestroy(TextureBufferImpl textureBuffer) {
|
||||
returnTextureFrame();
|
||||
if (frameRefMonitor != null) {
|
||||
frameRefMonitor.onDestroyBuffer(textureBuffer);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Handler handler;
|
||||
private final EglBase eglBase;
|
||||
private final SurfaceTexture surfaceTexture;
|
||||
private final int oesTextureId;
|
||||
private final YuvConverter yuvConverter;
|
||||
@Nullable private final TimestampAligner timestampAligner;
|
||||
private final FrameRefMonitor frameRefMonitor;
|
||||
|
||||
// These variables are only accessed from the |handler| thread.
|
||||
@Nullable private VideoSink listener;
|
||||
// The possible states of this class.
|
||||
private boolean hasPendingTexture;
|
||||
private volatile boolean isTextureInUse;
|
||||
private boolean isQuitting;
|
||||
private int frameRotation;
|
||||
private int textureWidth;
|
||||
private int textureHeight;
|
||||
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
|
||||
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
|
||||
@Nullable private VideoSink pendingListener;
|
||||
final Runnable setListenerRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.d(TAG, "Setting listener to " + pendingListener);
|
||||
listener = pendingListener;
|
||||
pendingListener = null;
|
||||
// May have a pending frame from the previous capture session - drop it.
|
||||
if (hasPendingTexture) {
|
||||
// Calling updateTexImage() is neccessary in order to receive new frames.
|
||||
updateTexImage();
|
||||
hasPendingTexture = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps,
|
||||
YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
|
||||
}
|
||||
this.handler = handler;
|
||||
this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
|
||||
this.yuvConverter = yuvConverter;
|
||||
this.frameRefMonitor = frameRefMonitor;
|
||||
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
try {
|
||||
// Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
} catch (RuntimeException e) {
|
||||
// Clean up before rethrowing the exception.
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
throw e;
|
||||
}
|
||||
|
||||
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
surfaceTexture = new SurfaceTexture(oesTextureId);
|
||||
setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
|
||||
hasPendingTexture = true;
|
||||
tryDeliverTextureFrame();
|
||||
}, handler);
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
|
||||
SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
surfaceTexture.setOnFrameAvailableListener(listener, handler);
|
||||
} else {
|
||||
// The documentation states that the listener will be called on an arbitrary thread, but in
|
||||
// pratice, it is always the thread on which the SurfaceTexture was constructed. There are
|
||||
// assertions in place in case this ever changes. For API >= 21, we use the new API to
|
||||
// explicitly specify the handler.
|
||||
surfaceTexture.setOnFrameAvailableListener(listener);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start to stream textures to the given |listener|. If you need to change listener, you need to
|
||||
* call stopListening() first.
|
||||
*/
|
||||
public void startListening(final VideoSink listener) {
|
||||
if (this.listener != null || this.pendingListener != null) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
|
||||
}
|
||||
this.pendingListener = listener;
|
||||
handler.post(setListenerRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop listening. The listener set in startListening() is guaranteded to not receive any more
|
||||
* onFrame() callbacks after this function returns.
|
||||
*/
|
||||
public void stopListening() {
|
||||
Logging.d(TAG, "stopListening()");
|
||||
handler.removeCallbacks(setListenerRunnable);
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
|
||||
listener = null;
|
||||
pendingListener = null;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself
|
||||
* since this class needs to be aware of the texture size.
|
||||
*/
|
||||
public void setTextureSize(int textureWidth, int textureHeight) {
|
||||
if (textureWidth <= 0) {
|
||||
throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth);
|
||||
}
|
||||
if (textureHeight <= 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"Texture height must be positive, but was " + textureHeight);
|
||||
}
|
||||
surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight);
|
||||
handler.post(() -> {
|
||||
this.textureWidth = textureWidth;
|
||||
this.textureHeight = textureHeight;
|
||||
tryDeliverTextureFrame();
|
||||
});
|
||||
}
|
||||
|
||||
/** Set the rotation of the delivered frames. */
|
||||
public void setFrameRotation(int rotation) {
|
||||
handler.post(() -> this.frameRotation = rotation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
|
||||
* producer such as a camera or decoder.
|
||||
*/
|
||||
public SurfaceTexture getSurfaceTexture() {
|
||||
return surfaceTexture;
|
||||
}
|
||||
|
||||
/** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */
|
||||
public Handler getHandler() {
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called when the texture frame is released. Only one texture frame can be in
|
||||
* flight at once, so this function must be called before a new frame is delivered.
|
||||
*/
|
||||
private void returnTextureFrame() {
|
||||
handler.post(() -> {
|
||||
isTextureInUse = false;
|
||||
if (isQuitting) {
|
||||
release();
|
||||
} else {
|
||||
tryDeliverTextureFrame();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean isTextureInUse() {
|
||||
return isTextureInUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
|
||||
* stopped when the texture frame has been released. You are guaranteed to not receive any more
|
||||
* onFrame() after this function returns.
|
||||
*/
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose()");
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> {
|
||||
isQuitting = true;
|
||||
if (!isTextureInUse) {
|
||||
release();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts to the correct thread to convert |textureBuffer| to I420.
|
||||
*
|
||||
* @deprecated Use toI420() instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) {
|
||||
return textureBuffer.toI420();
|
||||
}
|
||||
|
||||
private void updateTexImage() {
|
||||
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
|
||||
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
|
||||
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
synchronized (EglBase.lock) {
|
||||
surfaceTexture.updateTexImage();
|
||||
}
|
||||
}
|
||||
|
||||
private void tryDeliverTextureFrame() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
|
||||
return;
|
||||
}
|
||||
if (textureWidth == 0 || textureHeight == 0) {
|
||||
// Information about the resolution needs to be provided by a call to setTextureSize() before
|
||||
// frames are produced.
|
||||
Logging.w(TAG, "Texture size has not been set.");
|
||||
return;
|
||||
}
|
||||
isTextureInUse = true;
|
||||
hasPendingTexture = false;
|
||||
|
||||
updateTexImage();
|
||||
|
||||
final float[] transformMatrix = new float[16];
|
||||
surfaceTexture.getTransformMatrix(transformMatrix);
|
||||
long timestampNs = surfaceTexture.getTimestamp();
|
||||
if (timestampAligner != null) {
|
||||
timestampNs = timestampAligner.translateTimestamp(timestampNs);
|
||||
}
|
||||
final VideoFrame.TextureBuffer buffer =
|
||||
new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler,
|
||||
yuvConverter, textureRefCountMonitor);
|
||||
if (frameRefMonitor != null) {
|
||||
frameRefMonitor.onNewBuffer(buffer);
|
||||
}
|
||||
final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs);
|
||||
listener.onFrame(frame);
|
||||
frame.release();
|
||||
}
|
||||
|
||||
private void release() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isTextureInUse || !isQuitting) {
|
||||
throw new IllegalStateException("Unexpected release.");
|
||||
}
|
||||
yuvConverter.release();
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
surfaceTexture.release();
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
if (timestampAligner != null) {
|
||||
timestampAligner.dispose();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,202 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.os.Handler;
|
||||
import android.support.annotation.Nullable;
|
||||
|
||||
/**
|
||||
* Android texture buffer that glues together the necessary information together with a generic
|
||||
* release callback. ToI420() is implemented by providing a Handler and a YuvConverter.
|
||||
*/
|
||||
public class TextureBufferImpl implements VideoFrame.TextureBuffer {
|
||||
interface RefCountMonitor {
|
||||
void onRetain(TextureBufferImpl textureBuffer);
|
||||
void onRelease(TextureBufferImpl textureBuffer);
|
||||
void onDestroy(TextureBufferImpl textureBuffer);
|
||||
}
|
||||
|
||||
// This is the full resolution the texture has in memory after applying the transformation matrix
|
||||
// that might include cropping. This resolution is useful to know when sampling the texture to
|
||||
// avoid downscaling artifacts.
|
||||
private final int unscaledWidth;
|
||||
private final int unscaledHeight;
|
||||
// This is the resolution that has been applied after cropAndScale().
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final Type type;
|
||||
private final int id;
|
||||
private final Matrix transformMatrix;
|
||||
private final Handler toI420Handler;
|
||||
private final YuvConverter yuvConverter;
|
||||
private final RefCountDelegate refCountDelegate;
|
||||
private final RefCountMonitor refCountMonitor;
|
||||
|
||||
public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
|
||||
Handler toI420Handler, YuvConverter yuvConverter, @Nullable Runnable releaseCallback) {
|
||||
this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
|
||||
new RefCountMonitor() {
|
||||
@Override
|
||||
public void onRetain(TextureBufferImpl textureBuffer) {}
|
||||
|
||||
@Override
|
||||
public void onRelease(TextureBufferImpl textureBuffer) {}
|
||||
|
||||
@Override
|
||||
public void onDestroy(TextureBufferImpl textureBuffer) {
|
||||
if (releaseCallback != null) {
|
||||
releaseCallback.run();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
|
||||
Handler toI420Handler, YuvConverter yuvConverter, RefCountMonitor refCountMonitor) {
|
||||
this(width, height, width, height, type, id, transformMatrix, toI420Handler, yuvConverter,
|
||||
refCountMonitor);
|
||||
}
|
||||
|
||||
private TextureBufferImpl(int unscaledWidth, int unscaledHeight, int width, int height, Type type,
|
||||
int id, Matrix transformMatrix, Handler toI420Handler, YuvConverter yuvConverter,
|
||||
RefCountMonitor refCountMonitor) {
|
||||
this.unscaledWidth = unscaledWidth;
|
||||
this.unscaledHeight = unscaledHeight;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.transformMatrix = transformMatrix;
|
||||
this.toI420Handler = toI420Handler;
|
||||
this.yuvConverter = yuvConverter;
|
||||
this.refCountDelegate = new RefCountDelegate(() -> refCountMonitor.onDestroy(this));
|
||||
this.refCountMonitor = refCountMonitor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.TextureBuffer.Type getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTextureId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Matrix getTransformMatrix() {
|
||||
return transformMatrix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
return ThreadUtils.invokeAtFrontUninterruptibly(
|
||||
toI420Handler, () -> yuvConverter.convert(this));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
refCountMonitor.onRetain(this);
|
||||
refCountDelegate.retain();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
refCountMonitor.onRelease(this);
|
||||
refCountDelegate.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
final Matrix cropAndScaleMatrix = new Matrix();
|
||||
// In WebRTC, Y=0 is the top row, while in OpenGL Y=0 is the bottom row. This means that the Y
|
||||
// direction is effectively reversed.
|
||||
final int cropYFromBottom = height - (cropY + cropHeight);
|
||||
cropAndScaleMatrix.preTranslate(cropX / (float) width, cropYFromBottom / (float) height);
|
||||
cropAndScaleMatrix.preScale(cropWidth / (float) width, cropHeight / (float) height);
|
||||
|
||||
return applyTransformMatrix(cropAndScaleMatrix,
|
||||
(int) Math.round(unscaledWidth * cropWidth / (float) width),
|
||||
(int) Math.round(unscaledHeight * cropHeight / (float) height), scaleWidth, scaleHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the width of the texture in memory. This should only be used for downscaling, and you
|
||||
* should still respect the width from getWidth().
|
||||
*/
|
||||
public int getUnscaledWidth() {
|
||||
return unscaledWidth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the height of the texture in memory. This should only be used for downscaling, and you
|
||||
* should still respect the height from getHeight().
|
||||
*/
|
||||
public int getUnscaledHeight() {
|
||||
return unscaledHeight;
|
||||
}
|
||||
|
||||
public Handler getToI420Handler() {
|
||||
return toI420Handler;
|
||||
}
|
||||
|
||||
public YuvConverter getYuvConverter() {
|
||||
return yuvConverter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new TextureBufferImpl with an applied transform matrix and a new size. The
|
||||
* existing buffer is unchanged. The given transform matrix is applied first when texture
|
||||
* coordinates are still in the unmodified [0, 1] range.
|
||||
*/
|
||||
public TextureBufferImpl applyTransformMatrix(
|
||||
Matrix transformMatrix, int newWidth, int newHeight) {
|
||||
return applyTransformMatrix(transformMatrix, /* unscaledWidth= */ newWidth,
|
||||
/* unscaledHeight= */ newHeight, /* scaledWidth= */ newWidth,
|
||||
/* scaledHeight= */ newHeight);
|
||||
}
|
||||
|
||||
private TextureBufferImpl applyTransformMatrix(Matrix transformMatrix, int unscaledWidth,
|
||||
int unscaledHeight, int scaledWidth, int scaledHeight) {
|
||||
final Matrix newMatrix = new Matrix(this.transformMatrix);
|
||||
newMatrix.preConcat(transformMatrix);
|
||||
retain();
|
||||
return new TextureBufferImpl(unscaledWidth, unscaledHeight, scaledWidth, scaledHeight, type, id,
|
||||
newMatrix, toI420Handler, yuvConverter, new RefCountMonitor() {
|
||||
@Override
|
||||
public void onRetain(TextureBufferImpl textureBuffer) {
|
||||
refCountMonitor.onRetain(TextureBufferImpl.this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRelease(TextureBufferImpl textureBuffer) {
|
||||
refCountMonitor.onRelease(TextureBufferImpl.this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDestroy(TextureBufferImpl textureBuffer) {
|
||||
release();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* The TimestampAligner class helps translating camera timestamps into the same timescale as is
|
||||
* used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
|
||||
* reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps
|
||||
* in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter
|
||||
* which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher
|
||||
* accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner.
|
||||
*/
|
||||
public class TimestampAligner {
|
||||
/**
|
||||
* Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
|
||||
* function to be safe.
|
||||
*/
|
||||
public static long getRtcTimeNanos() {
|
||||
return nativeRtcTimeNanos();
|
||||
}
|
||||
|
||||
private volatile long nativeTimestampAligner = nativeCreateTimestampAligner();
|
||||
|
||||
/**
|
||||
* Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
|
||||
* |cameraTimeNs| is assumed to be accurate, but with an unknown epoch and clock drift. Returns
|
||||
* the translated timestamp.
|
||||
*/
|
||||
public long translateTimestamp(long cameraTimeNs) {
|
||||
checkNativeAlignerExists();
|
||||
return nativeTranslateTimestamp(nativeTimestampAligner, cameraTimeNs);
|
||||
}
|
||||
|
||||
/** Dispose native timestamp aligner. */
|
||||
public void dispose() {
|
||||
checkNativeAlignerExists();
|
||||
nativeReleaseTimestampAligner(nativeTimestampAligner);
|
||||
nativeTimestampAligner = 0;
|
||||
}
|
||||
|
||||
private void checkNativeAlignerExists() {
|
||||
if (nativeTimestampAligner == 0) {
|
||||
throw new IllegalStateException("TimestampAligner has been disposed.");
|
||||
}
|
||||
}
|
||||
|
||||
private static native long nativeRtcTimeNanos();
|
||||
private static native long nativeCreateTimestampAligner();
|
||||
private static native void nativeReleaseTimestampAligner(long timestampAligner);
|
||||
private static native long nativeTranslateTimestamp(long timestampAligner, long cameraTimeNs);
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
|
||||
// Base interface for all VideoCapturers to implement.
|
||||
public interface VideoCapturer {
|
||||
/**
|
||||
* This function is used to initialize the camera thread, the android application context, and the
|
||||
* capture observer. It will be called only once and before any startCapture() request. The
|
||||
* camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
|
||||
* to deliver texture frames, it should do this by rendering on the SurfaceTexture in
|
||||
* {@code surfaceTextureHelper}, register itself as a listener, and forward the frames to
|
||||
* CapturerObserver.onFrameCaptured(). The caller still has ownership of {@code
|
||||
* surfaceTextureHelper} and is responsible for making sure surfaceTextureHelper.dispose() is
|
||||
* called. This also means that the caller can reuse the SurfaceTextureHelper to initialize a new
|
||||
* VideoCapturer once the previous VideoCapturer has been disposed.
|
||||
*/
|
||||
void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver);
|
||||
|
||||
/**
|
||||
* Start capturing frames in a format that is as close as possible to {@code width x height} and
|
||||
* {@code framerate}.
|
||||
*/
|
||||
void startCapture(int width, int height, int framerate);
|
||||
|
||||
/**
|
||||
* Stop capturing. This function should block until capture is actually stopped.
|
||||
*/
|
||||
void stopCapture() throws InterruptedException;
|
||||
|
||||
void changeCaptureFormat(int width, int height, int framerate);
|
||||
|
||||
/**
|
||||
* Perform any final cleanup here. No more capturing will be done after this call.
|
||||
*/
|
||||
void dispose();
|
||||
|
||||
/**
|
||||
* @return true if-and-only-if this is a screen capturer.
|
||||
*/
|
||||
boolean isScreencast();
|
||||
}
|
|
@ -0,0 +1,190 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
|
||||
* version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the
|
||||
* right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in
|
||||
* arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in
|
||||
* their custom VideoSinks. All implementations must also implement the toI420() function,
|
||||
* converting from the underlying representation if necessary. I420 is the most widely accepted
|
||||
* format and serves as a fallback for video sinks that can only handle I420, e.g. the internal
|
||||
* WebRTC software encoders.
|
||||
*/
|
||||
public class VideoFrame implements RefCounted {
|
||||
/**
|
||||
* Implements image storage medium. Might be for example an OpenGL texture or a memory region
|
||||
* containing I420-data.
|
||||
*
|
||||
* <p>Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
|
||||
* and the buffer needs to be returned to the VideoSource as soon as all references are gone.
|
||||
*/
|
||||
public interface Buffer extends RefCounted {
|
||||
/**
|
||||
* Resolution of the buffer in pixels.
|
||||
*/
|
||||
@CalledByNative("Buffer") int getWidth();
|
||||
@CalledByNative("Buffer") int getHeight();
|
||||
|
||||
/**
|
||||
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
|
||||
* conversion will take place. All implementations must provide a fallback to I420 for
|
||||
* compatibility with e.g. the internal WebRTC software encoders.
|
||||
*/
|
||||
@CalledByNative("Buffer") I420Buffer toI420();
|
||||
|
||||
@Override @CalledByNative("Buffer") void retain();
|
||||
@Override @CalledByNative("Buffer") void release();
|
||||
|
||||
/**
|
||||
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
|
||||
* |scaleWidth| x |scaleHeight|.
|
||||
*/
|
||||
@CalledByNative("Buffer")
|
||||
Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for I420 buffers.
|
||||
*/
|
||||
public interface I420Buffer extends Buffer {
|
||||
/**
|
||||
* Returns a direct ByteBuffer containing Y-plane data. The buffer capacity is at least
|
||||
* getStrideY() * getHeight() bytes. The position of the returned buffer is ignored and must
|
||||
* be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
|
||||
* implementations must return a new ByteBuffer or slice for each call.
|
||||
*/
|
||||
@CalledByNative("I420Buffer") ByteBuffer getDataY();
|
||||
/**
|
||||
* Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least
|
||||
* getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
|
||||
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
|
||||
* implementations must return a new ByteBuffer or slice for each call.
|
||||
*/
|
||||
@CalledByNative("I420Buffer") ByteBuffer getDataU();
|
||||
/**
|
||||
* Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least
|
||||
* getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
|
||||
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
|
||||
* implementations must return a new ByteBuffer or slice for each call.
|
||||
*/
|
||||
@CalledByNative("I420Buffer") ByteBuffer getDataV();
|
||||
|
||||
@CalledByNative("I420Buffer") int getStrideY();
|
||||
@CalledByNative("I420Buffer") int getStrideU();
|
||||
@CalledByNative("I420Buffer") int getStrideV();
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for buffers that are stored as a single texture, either in OES or RGB format.
|
||||
*/
|
||||
public interface TextureBuffer extends Buffer {
|
||||
enum Type {
|
||||
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
|
||||
RGB(GLES20.GL_TEXTURE_2D);
|
||||
|
||||
private final int glTarget;
|
||||
|
||||
private Type(final int glTarget) {
|
||||
this.glTarget = glTarget;
|
||||
}
|
||||
|
||||
public int getGlTarget() {
|
||||
return glTarget;
|
||||
}
|
||||
}
|
||||
|
||||
Type getType();
|
||||
int getTextureId();
|
||||
|
||||
/**
|
||||
* Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
|
||||
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
|
||||
* the coordinate that should be used to sample that location from the buffer.
|
||||
*/
|
||||
Matrix getTransformMatrix();
|
||||
}
|
||||
|
||||
private final Buffer buffer;
|
||||
private final int rotation;
|
||||
private final long timestampNs;
|
||||
|
||||
/**
|
||||
* Constructs a new VideoFrame backed by the given {@code buffer}.
|
||||
*
|
||||
* @note Ownership of the buffer object is tranferred to the new VideoFrame.
|
||||
*/
|
||||
@CalledByNative
|
||||
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
|
||||
if (buffer == null) {
|
||||
throw new IllegalArgumentException("buffer not allowed to be null");
|
||||
}
|
||||
if (rotation % 90 != 0) {
|
||||
throw new IllegalArgumentException("rotation must be a multiple of 90");
|
||||
}
|
||||
this.buffer = buffer;
|
||||
this.rotation = rotation;
|
||||
this.timestampNs = timestampNs;
|
||||
}
|
||||
|
||||
@CalledByNative
|
||||
public Buffer getBuffer() {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotation of the frame in degrees.
|
||||
*/
|
||||
@CalledByNative
|
||||
public int getRotation() {
|
||||
return rotation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Timestamp of the frame in nano seconds.
|
||||
*/
|
||||
@CalledByNative
|
||||
public long getTimestampNs() {
|
||||
return timestampNs;
|
||||
}
|
||||
|
||||
public int getRotatedWidth() {
|
||||
if (rotation % 180 == 0) {
|
||||
return buffer.getWidth();
|
||||
}
|
||||
return buffer.getHeight();
|
||||
}
|
||||
|
||||
public int getRotatedHeight() {
|
||||
if (rotation % 180 == 0) {
|
||||
return buffer.getHeight();
|
||||
}
|
||||
return buffer.getWidth();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
buffer.retain();
|
||||
}
|
||||
|
||||
@Override
|
||||
@CalledByNative
|
||||
public void release() {
|
||||
buffer.release();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,241 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.Point;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.annotation.Nullable;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
|
||||
* drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
|
||||
* taken into account. You can supply an additional render matrix for custom transformations.
|
||||
*/
|
||||
public class VideoFrameDrawer {
|
||||
public static final String TAG = "VideoFrameDrawer";
|
||||
/**
|
||||
* Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
|
||||
* depending on the type of the buffer. You can supply an additional render matrix. This is
|
||||
* used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
|
||||
* transformationMatrix)
|
||||
*/
|
||||
public static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
|
||||
Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
|
||||
int viewportWidth, int viewportHeight) {
|
||||
Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
|
||||
finalMatrix.preConcat(renderMatrix);
|
||||
float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
|
||||
switch (buffer.getType()) {
|
||||
case OES:
|
||||
drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
|
||||
viewportY, viewportWidth, viewportHeight);
|
||||
break;
|
||||
case RGB:
|
||||
drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
|
||||
viewportY, viewportWidth, viewportHeight);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Unknown texture type.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
|
||||
* class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
|
||||
*/
|
||||
private static class YuvUploader {
|
||||
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
|
||||
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
|
||||
// that handles stride and compare performance with intermediate copy.
|
||||
@Nullable private ByteBuffer copyBuffer;
|
||||
@Nullable private int[] yuvTextures;
|
||||
|
||||
/**
|
||||
* Upload |planes| into OpenGL textures, taking stride into consideration.
|
||||
*
|
||||
* @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
|
||||
*/
|
||||
@Nullable
|
||||
public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
|
||||
final int[] planeWidths = new int[] {width, width / 2, width / 2};
|
||||
final int[] planeHeights = new int[] {height, height / 2, height / 2};
|
||||
// Make a first pass to see if we need a temporary copy buffer.
|
||||
int copyCapacityNeeded = 0;
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
if (strides[i] > planeWidths[i]) {
|
||||
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
|
||||
}
|
||||
}
|
||||
// Allocate copy buffer if necessary.
|
||||
if (copyCapacityNeeded > 0
|
||||
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
|
||||
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
|
||||
}
|
||||
// Make sure YUV textures are allocated.
|
||||
if (yuvTextures == null) {
|
||||
yuvTextures = new int[3];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
}
|
||||
// Upload each plane.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
// GLES only accepts packed data, i.e. stride == planeWidth.
|
||||
final ByteBuffer packedByteBuffer;
|
||||
if (strides[i] == planeWidths[i]) {
|
||||
// Input is packed already.
|
||||
packedByteBuffer = planes[i];
|
||||
} else {
|
||||
YuvHelper.copyPlane(
|
||||
planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
|
||||
packedByteBuffer = copyBuffer;
|
||||
}
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
|
||||
planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
|
||||
}
|
||||
return yuvTextures;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
|
||||
int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
|
||||
ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
|
||||
return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public int[] getYuvTextures() {
|
||||
return yuvTextures;
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases cached resources. Uploader can still be used and the resources will be reallocated
|
||||
* on first use.
|
||||
*/
|
||||
public void release() {
|
||||
copyBuffer = null;
|
||||
if (yuvTextures != null) {
|
||||
GLES20.glDeleteTextures(3, yuvTextures, 0);
|
||||
yuvTextures = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static int distance(float x0, float y0, float x1, float y1) {
|
||||
return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
|
||||
}
|
||||
|
||||
// These points are used to calculate the size of the part of the frame we are rendering.
|
||||
final static float[] srcPoints =
|
||||
new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
|
||||
private final float[] dstPoints = new float[6];
|
||||
private final Point renderSize = new Point();
|
||||
private int renderWidth;
|
||||
private int renderHeight;
|
||||
|
||||
// Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
|
||||
// |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
|
||||
// frame.
|
||||
private void calculateTransformedRenderSize(
|
||||
int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
|
||||
if (renderMatrix == null) {
|
||||
renderWidth = frameWidth;
|
||||
renderHeight = frameHeight;
|
||||
return;
|
||||
}
|
||||
// Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
|
||||
renderMatrix.mapPoints(dstPoints, srcPoints);
|
||||
|
||||
// Multiply with the width and height to get the positions in terms of pixels.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
dstPoints[i * 2 + 0] *= frameWidth;
|
||||
dstPoints[i * 2 + 1] *= frameHeight;
|
||||
}
|
||||
|
||||
// Get the length of the sides of the transformed rectangle in terms of pixels.
|
||||
renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
|
||||
renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
|
||||
}
|
||||
|
||||
private final YuvUploader yuvUploader = new YuvUploader();
|
||||
// This variable will only be used for checking reference equality and is used for caching I420
|
||||
// textures.
|
||||
@Nullable private VideoFrame lastI420Frame;
|
||||
private final Matrix renderMatrix = new Matrix();
|
||||
|
||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
|
||||
drawFrame(frame, drawer, null /* additionalRenderMatrix */);
|
||||
}
|
||||
|
||||
public void drawFrame(
|
||||
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
|
||||
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||
frame.getRotatedWidth(), frame.getRotatedHeight());
|
||||
}
|
||||
|
||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
|
||||
@Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
|
||||
int viewportHeight) {
|
||||
final int width = frame.getRotatedWidth();
|
||||
final int height = frame.getRotatedHeight();
|
||||
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
|
||||
if (renderWidth <= 0 || renderHeight <= 0) {
|
||||
Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
|
||||
return;
|
||||
}
|
||||
|
||||
final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
|
||||
renderMatrix.reset();
|
||||
renderMatrix.preTranslate(0.5f, 0.5f);
|
||||
if (!isTextureFrame) {
|
||||
renderMatrix.preScale(1f, -1f); // I420-frames are upside down
|
||||
}
|
||||
renderMatrix.preRotate(frame.getRotation());
|
||||
renderMatrix.preTranslate(-0.5f, -0.5f);
|
||||
if (additionalRenderMatrix != null) {
|
||||
renderMatrix.preConcat(additionalRenderMatrix);
|
||||
}
|
||||
|
||||
if (isTextureFrame) {
|
||||
lastI420Frame = null;
|
||||
drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
|
||||
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
} else {
|
||||
// Only upload the I420 data to textures once per frame, if we are called multiple times
|
||||
// with the same frame.
|
||||
if (frame != lastI420Frame) {
|
||||
lastI420Frame = frame;
|
||||
final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
|
||||
yuvUploader.uploadFromBuffer(i420Buffer);
|
||||
i420Buffer.release();
|
||||
}
|
||||
|
||||
drawer.drawYuv(yuvUploader.getYuvTextures(),
|
||||
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
|
||||
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
}
|
||||
}
|
||||
|
||||
public VideoFrame.Buffer prepareBufferForViewportSize(
|
||||
VideoFrame.Buffer buffer, int width, int height) {
|
||||
buffer.retain();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
yuvUploader.release();
|
||||
lastI420Frame = null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Java version of rtc::VideoSinkInterface.
|
||||
*/
|
||||
public interface VideoSink {
|
||||
/**
|
||||
* Implementations should call frame.retain() if they need to hold a reference to the frame after
|
||||
* this function returns. Each call to retain() should be followed by a call to frame.release()
|
||||
* when the reference is no longer needed.
|
||||
*/
|
||||
@CalledByNative void onFrame(VideoFrame frame);
|
||||
}
|
|
@ -0,0 +1,242 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES20;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.webrtc.VideoFrame.I420Buffer;
|
||||
import org.webrtc.VideoFrame.TextureBuffer;
|
||||
|
||||
/**
|
||||
* Class for converting OES textures to a YUV ByteBuffer. It can be constructed on any thread, but
|
||||
* should only be operated from a single thread with an active EGL context.
|
||||
*/
|
||||
public class YuvConverter {
|
||||
private static final String FRAGMENT_SHADER =
|
||||
// Difference in texture coordinate corresponding to one
|
||||
// sub-pixel in the x direction.
|
||||
"uniform vec2 xUnit;\n"
|
||||
// Color conversion coefficients, including constant term
|
||||
+ "uniform vec4 coeffs;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
// Since the alpha read from the texture is always 1, this could
|
||||
// be written as a mat4 x vec4 multiply. However, that seems to
|
||||
// give a worse framerate, possibly because the additional
|
||||
// multiplies by 1.0 consume resources. TODO(nisse): Could also
|
||||
// try to do it as a vec3 x mat3x4, followed by an add in of a
|
||||
// constant vector.
|
||||
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " sample(tc - 1.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " sample(tc - 0.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " sample(tc + 0.5 * xUnit).rgb);\n"
|
||||
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
|
||||
+ " sample(tc + 1.5 * xUnit).rgb);\n"
|
||||
+ "}\n";
|
||||
|
||||
private static class ShaderCallbacks implements GlGenericDrawer.ShaderCallbacks {
|
||||
// Y'UV444 to RGB888, see https://en.wikipedia.org/wiki/YUV#Y%E2%80%B2UV444_to_RGB888_conversion
|
||||
// We use the ITU-R BT.601 coefficients for Y, U and V.
|
||||
// The values in Wikipedia are inaccurate, the accurate values derived from the spec are:
|
||||
// Y = 0.299 * R + 0.587 * G + 0.114 * B
|
||||
// U = -0.168736 * R - 0.331264 * G + 0.5 * B + 0.5
|
||||
// V = 0.5 * R - 0.418688 * G - 0.0813124 * B + 0.5
|
||||
// To map the Y-values to range [16-235] and U- and V-values to range [16-240], the matrix has
|
||||
// been multiplied with matrix:
|
||||
// {{219 / 255, 0, 0, 16 / 255},
|
||||
// {0, 224 / 255, 0, 16 / 255},
|
||||
// {0, 0, 224 / 255, 16 / 255},
|
||||
// {0, 0, 0, 1}}
|
||||
private static final float[] yCoeffs =
|
||||
new float[] {0.256788f, 0.504129f, 0.0979059f, 0.0627451f};
|
||||
private static final float[] uCoeffs =
|
||||
new float[] {-0.148223f, -0.290993f, 0.439216f, 0.501961f};
|
||||
private static final float[] vCoeffs =
|
||||
new float[] {0.439216f, -0.367788f, -0.0714274f, 0.501961f};
|
||||
|
||||
private int xUnitLoc;
|
||||
private int coeffsLoc;
|
||||
|
||||
private float[] coeffs;
|
||||
private float stepSize;
|
||||
|
||||
public void setPlaneY() {
|
||||
coeffs = yCoeffs;
|
||||
stepSize = 1.0f;
|
||||
}
|
||||
|
||||
public void setPlaneU() {
|
||||
coeffs = uCoeffs;
|
||||
stepSize = 2.0f;
|
||||
}
|
||||
|
||||
public void setPlaneV() {
|
||||
coeffs = vCoeffs;
|
||||
stepSize = 2.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewShader(GlShader shader) {
|
||||
xUnitLoc = shader.getUniformLocation("xUnit");
|
||||
coeffsLoc = shader.getUniformLocation("coeffs");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportWidth, int viewportHeight) {
|
||||
GLES20.glUniform4fv(coeffsLoc, /* count= */ 1, coeffs, /* offset= */ 0);
|
||||
// Matrix * (1;0;0;0) / (width / stepSize). Note that OpenGL uses column major order.
|
||||
GLES20.glUniform2f(
|
||||
xUnitLoc, stepSize * texMatrix[0] / frameWidth, stepSize * texMatrix[1] / frameWidth);
|
||||
}
|
||||
}
|
||||
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
private final GlTextureFrameBuffer i420TextureFrameBuffer =
|
||||
new GlTextureFrameBuffer(GLES20.GL_RGBA);
|
||||
private final ShaderCallbacks shaderCallbacks = new ShaderCallbacks();
|
||||
private final GlGenericDrawer drawer = new GlGenericDrawer(FRAGMENT_SHADER, shaderCallbacks);
|
||||
private final VideoFrameDrawer videoFrameDrawer;
|
||||
|
||||
/**
|
||||
* This class should be constructed on a thread that has an active EGL context.
|
||||
*/
|
||||
public YuvConverter() {
|
||||
this(new VideoFrameDrawer());
|
||||
}
|
||||
|
||||
public YuvConverter(VideoFrameDrawer videoFrameDrawer) {
|
||||
this.videoFrameDrawer = videoFrameDrawer;
|
||||
threadChecker.detachThread();
|
||||
}
|
||||
|
||||
/** Converts the texture buffer to I420. */
|
||||
public I420Buffer convert(TextureBuffer inputTextureBuffer) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
|
||||
TextureBuffer preparedBuffer = (TextureBuffer) videoFrameDrawer.prepareBufferForViewportSize(
|
||||
inputTextureBuffer, inputTextureBuffer.getWidth(), inputTextureBuffer.getHeight());
|
||||
|
||||
// We draw into a buffer laid out like
|
||||
//
|
||||
// +---------+
|
||||
// | |
|
||||
// | Y |
|
||||
// | |
|
||||
// | |
|
||||
// +----+----+
|
||||
// | U | V |
|
||||
// | | |
|
||||
// +----+----+
|
||||
//
|
||||
// In memory, we use the same stride for all of Y, U and V. The
|
||||
// U data starts at offset |height| * |stride| from the Y data,
|
||||
// and the V data starts at at offset |stride/2| from the U
|
||||
// data, with rows of U and V data alternating.
|
||||
//
|
||||
// Now, it would have made sense to allocate a pixel buffer with
|
||||
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
|
||||
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
|
||||
// unsupported by devices. So do the following hack: Allocate an
|
||||
// RGBA buffer, of width |stride|/4. To render each of these
|
||||
// large pixels, sample the texture at 4 different x coordinates
|
||||
// and store the results in the four components.
|
||||
//
|
||||
// Since the V data needs to start on a boundary of such a
|
||||
// larger pixel, it is not sufficient that |stride| is even, it
|
||||
// has to be a multiple of 8 pixels.
|
||||
final int frameWidth = preparedBuffer.getWidth();
|
||||
final int frameHeight = preparedBuffer.getHeight();
|
||||
final int stride = ((frameWidth + 7) / 8) * 8;
|
||||
final int uvHeight = (frameHeight + 1) / 2;
|
||||
// Total height of the combined memory layout.
|
||||
final int totalHeight = frameHeight + uvHeight;
|
||||
final ByteBuffer i420ByteBuffer = JniCommon.nativeAllocateByteBuffer(stride * totalHeight);
|
||||
// Viewport width is divided by four since we are squeezing in four color bytes in each RGBA
|
||||
// pixel.
|
||||
final int viewportWidth = stride / 4;
|
||||
|
||||
// Produce a frame buffer starting at top-left corner, not bottom-left.
|
||||
final Matrix renderMatrix = new Matrix();
|
||||
renderMatrix.preTranslate(0.5f, 0.5f);
|
||||
renderMatrix.preScale(1f, -1f);
|
||||
renderMatrix.preTranslate(-0.5f, -0.5f);
|
||||
|
||||
i420TextureFrameBuffer.setSize(viewportWidth, totalHeight);
|
||||
|
||||
// Bind our framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, i420TextureFrameBuffer.getFrameBufferId());
|
||||
GlUtil.checkNoGLES2Error("glBindFramebuffer");
|
||||
|
||||
// Draw Y.
|
||||
shaderCallbacks.setPlaneY();
|
||||
VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
|
||||
/* viewportX= */ 0, /* viewportY= */ 0, viewportWidth,
|
||||
/* viewportHeight= */ frameHeight);
|
||||
|
||||
// Draw U.
|
||||
shaderCallbacks.setPlaneU();
|
||||
VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
|
||||
/* viewportX= */ 0, /* viewportY= */ frameHeight, viewportWidth / 2,
|
||||
/* viewportHeight= */ uvHeight);
|
||||
|
||||
// Draw V.
|
||||
shaderCallbacks.setPlaneV();
|
||||
VideoFrameDrawer.drawTexture(drawer, preparedBuffer, renderMatrix, frameWidth, frameHeight,
|
||||
/* viewportX= */ viewportWidth / 2, /* viewportY= */ frameHeight, viewportWidth / 2,
|
||||
/* viewportHeight= */ uvHeight);
|
||||
|
||||
GLES20.glReadPixels(0, 0, i420TextureFrameBuffer.getWidth(), i420TextureFrameBuffer.getHeight(),
|
||||
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, i420ByteBuffer);
|
||||
|
||||
GlUtil.checkNoGLES2Error("YuvConverter.convert");
|
||||
|
||||
// Restore normal framebuffer.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
|
||||
// Prepare Y, U, and V ByteBuffer slices.
|
||||
final int yPos = 0;
|
||||
final int uPos = yPos + stride * frameHeight;
|
||||
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
|
||||
final int vPos = uPos + stride / 2;
|
||||
|
||||
i420ByteBuffer.position(yPos);
|
||||
i420ByteBuffer.limit(yPos + stride * frameHeight);
|
||||
final ByteBuffer dataY = i420ByteBuffer.slice();
|
||||
|
||||
i420ByteBuffer.position(uPos);
|
||||
// The last row does not have padding.
|
||||
final int uvSize = stride * (uvHeight - 1) + stride / 2;
|
||||
i420ByteBuffer.limit(uPos + uvSize);
|
||||
final ByteBuffer dataU = i420ByteBuffer.slice();
|
||||
|
||||
i420ByteBuffer.position(vPos);
|
||||
i420ByteBuffer.limit(vPos + uvSize);
|
||||
final ByteBuffer dataV = i420ByteBuffer.slice();
|
||||
|
||||
preparedBuffer.release();
|
||||
|
||||
return JavaI420Buffer.wrap(frameWidth, frameHeight, dataY, stride, dataU, stride, dataV, stride,
|
||||
() -> { JniCommon.nativeFreeByteBuffer(i420ByteBuffer); });
|
||||
}
|
||||
|
||||
public void release() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
drawer.release();
|
||||
i420TextureFrameBuffer.release();
|
||||
videoFrameDrawer.release();
|
||||
// Allow this class to be reused.
|
||||
threadChecker.detachThread();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,149 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/** Wraps libyuv methods to Java. All passed byte buffers must be direct byte buffers. */
|
||||
public class YuvHelper {
|
||||
/** Helper method for copying I420 to tightly packed destination buffer. */
|
||||
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int width, int height) {
|
||||
final int chromaHeight = (height + 1) / 2;
|
||||
final int chromaWidth = (width + 1) / 2;
|
||||
|
||||
final int minSize = width * height + chromaWidth * chromaHeight * 2;
|
||||
if (dst.capacity() < minSize) {
|
||||
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
|
||||
+ minSize + " was " + dst.capacity());
|
||||
}
|
||||
|
||||
final int startY = 0;
|
||||
final int startU = height * width;
|
||||
final int startV = startU + chromaHeight * chromaWidth;
|
||||
|
||||
dst.position(startY);
|
||||
final ByteBuffer dstY = dst.slice();
|
||||
dst.position(startU);
|
||||
final ByteBuffer dstU = dst.slice();
|
||||
dst.position(startV);
|
||||
final ByteBuffer dstV = dst.slice();
|
||||
|
||||
nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, width, dstU,
|
||||
chromaWidth, dstV, chromaWidth, width, height);
|
||||
}
|
||||
|
||||
/** Helper method for copying I420 to tightly packed NV12 destination buffer. */
|
||||
public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int width, int height) {
|
||||
final int chromaWidth = (width + 1) / 2;
|
||||
final int chromaHeight = (height + 1) / 2;
|
||||
|
||||
final int minSize = width * height + chromaWidth * chromaHeight * 2;
|
||||
if (dst.capacity() < minSize) {
|
||||
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
|
||||
+ minSize + " was " + dst.capacity());
|
||||
}
|
||||
|
||||
final int startY = 0;
|
||||
final int startUV = height * width;
|
||||
|
||||
dst.position(startY);
|
||||
final ByteBuffer dstY = dst.slice();
|
||||
dst.position(startUV);
|
||||
final ByteBuffer dstUV = dst.slice();
|
||||
|
||||
nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, width, dstUV,
|
||||
chromaWidth * 2, width, height);
|
||||
}
|
||||
|
||||
/** Helper method for rotating I420 to tightly packed destination buffer. */
|
||||
public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dst, int srcWidth, int srcHeight,
|
||||
int rotationMode) {
|
||||
final int dstWidth = rotationMode % 180 == 0 ? srcWidth : srcHeight;
|
||||
final int dstHeight = rotationMode % 180 == 0 ? srcHeight : srcWidth;
|
||||
|
||||
final int dstChromaHeight = (dstHeight + 1) / 2;
|
||||
final int dstChromaWidth = (dstWidth + 1) / 2;
|
||||
|
||||
final int minSize = dstWidth * dstHeight + dstChromaWidth * dstChromaHeight * 2;
|
||||
if (dst.capacity() < minSize) {
|
||||
throw new IllegalArgumentException("Expected destination buffer capacity to be at least "
|
||||
+ minSize + " was " + dst.capacity());
|
||||
}
|
||||
|
||||
final int startY = 0;
|
||||
final int startU = dstHeight * dstWidth;
|
||||
final int startV = startU + dstChromaHeight * dstChromaWidth;
|
||||
|
||||
dst.position(startY);
|
||||
final ByteBuffer dstY = dst.slice();
|
||||
dst.position(startU);
|
||||
final ByteBuffer dstU = dst.slice();
|
||||
dst.position(startV);
|
||||
final ByteBuffer dstV = dst.slice();
|
||||
|
||||
nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstWidth, dstU,
|
||||
dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode);
|
||||
}
|
||||
|
||||
/** Helper method for copying a single colour plane. */
|
||||
public static void copyPlane(
|
||||
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
|
||||
nativeCopyPlane(src, srcStride, dst, dstStride, width, height);
|
||||
}
|
||||
|
||||
/** Converts ABGR little endian (rgba in memory) to I420. */
|
||||
public static void ABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY, int dstStrideY,
|
||||
ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
|
||||
nativeABGRToI420(
|
||||
src, srcStride, dstY, dstStrideY, dstU, dstStrideU, dstV, dstStrideV, width, height);
|
||||
}
|
||||
|
||||
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||
int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
|
||||
nativeI420Copy(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
|
||||
dstStrideU, dstV, dstStrideV, width, height);
|
||||
}
|
||||
|
||||
public static void I420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstUV,
|
||||
int dstStrideUV, int width, int height) {
|
||||
nativeI420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstUV,
|
||||
dstStrideUV, width, height);
|
||||
}
|
||||
|
||||
public static void I420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
|
||||
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||
int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
|
||||
int rotationMode) {
|
||||
nativeI420Rotate(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstY, dstStrideY, dstU,
|
||||
dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode);
|
||||
}
|
||||
|
||||
private static native void nativeCopyPlane(
|
||||
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height);
|
||||
private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
|
||||
int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
|
||||
ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height);
|
||||
private static native void nativeI420ToNV12(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
|
||||
int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
|
||||
ByteBuffer dstUV, int dstStrideUV, int width, int height);
|
||||
private static native void nativeI420Rotate(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
|
||||
int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
|
||||
ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int srcWidth, int srcHeight,
|
||||
int rotationMode);
|
||||
private static native void nativeABGRToI420(ByteBuffer src, int srcStride, ByteBuffer dstY,
|
||||
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width,
|
||||
int height);
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* @CalledByNative is used by the JNI generator to create the necessary JNI
|
||||
* bindings and expose this method to native code.
|
||||
*/
|
||||
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.CLASS)
|
||||
public @interface CalledByNative {
|
||||
/*
|
||||
* If present, tells which inner class the method belongs to.
|
||||
*/
|
||||
public String value() default "";
|
||||
}
|
|
@ -0,0 +1,328 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
class Camera1Session implements CameraSession {
|
||||
private static final String TAG = "Camera1Session";
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
|
||||
private static final Histogram camera1StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
private final boolean captureToTexture;
|
||||
private final Context applicationContext;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final int cameraId;
|
||||
private final android.hardware.Camera camera;
|
||||
private final android.hardware.Camera.CameraInfo info;
|
||||
private final CaptureFormat captureFormat;
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private SessionState state;
|
||||
private boolean firstFrameReported;
|
||||
|
||||
// TODO(titovartem) make correct fix during webrtc:9175
|
||||
@SuppressWarnings("ByteBufferBackingArray")
|
||||
public static void create(final CreateSessionCallback callback, final Events events,
|
||||
final boolean captureToTexture, final Context applicationContext,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
|
||||
final int height, final int framerate) {
|
||||
final long constructionTimeNs = System.nanoTime();
|
||||
Logging.d(TAG, "Open camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
final android.hardware.Camera camera;
|
||||
try {
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
} catch (RuntimeException e) {
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
if (camera == null) {
|
||||
callback.onFailure(FailureType.ERROR,
|
||||
"android.hardware.Camera.open returned null for camera id = " + cameraId);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
|
||||
} catch (IOException | RuntimeException e) {
|
||||
camera.release();
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
android.hardware.Camera.getCameraInfo(cameraId, info);
|
||||
|
||||
final CaptureFormat captureFormat;
|
||||
try {
|
||||
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||
captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
|
||||
final Size pictureSize = findClosestPictureSize(parameters, width, height);
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
} catch (RuntimeException e) {
|
||||
camera.release();
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
if (!captureToTexture) {
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate orientation manually and send it as CVO insted.
|
||||
camera.setDisplayOrientation(0 /* degrees */);
|
||||
|
||||
callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
|
||||
surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
|
||||
}
|
||||
|
||||
private static void updateCameraParameters(android.hardware.Camera camera,
|
||||
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
|
||||
boolean captureToTexture) {
|
||||
final List<String> focusModes = parameters.getSupportedFocusModes();
|
||||
|
||||
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
|
||||
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
|
||||
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
||||
if (!captureToTexture) {
|
||||
parameters.setPreviewFormat(captureFormat.imageFormat);
|
||||
}
|
||||
|
||||
if (parameters.isVideoStabilizationSupported()) {
|
||||
parameters.setVideoStabilization(true);
|
||||
}
|
||||
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
||||
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
camera.setParameters(parameters);
|
||||
}
|
||||
|
||||
private static CaptureFormat findClosestCaptureFormat(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
|
||||
// Find closest supported format for |width| x |height| @ |framerate|.
|
||||
final List<CaptureFormat.FramerateRange> supportedFramerates =
|
||||
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
|
||||
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
|
||||
|
||||
final CaptureFormat.FramerateRange fpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
|
||||
|
||||
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
|
||||
|
||||
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
|
||||
}
|
||||
|
||||
private static Size findClosestPictureSize(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height) {
|
||||
return CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
|
||||
}
|
||||
|
||||
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
|
||||
android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
|
||||
long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.events = events;
|
||||
this.captureToTexture = captureToTexture;
|
||||
this.applicationContext = applicationContext;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraId = cameraId;
|
||||
this.camera = camera;
|
||||
this.info = info;
|
||||
this.captureFormat = captureFormat;
|
||||
this.constructionTimeNs = constructionTimeNs;
|
||||
|
||||
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
|
||||
|
||||
startCapturing();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera1StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void startCapturing() {
|
||||
Logging.d(TAG, "Start capturing");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
state = SessionState.RUNNING;
|
||||
|
||||
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
|
||||
@Override
|
||||
public void onError(int error, android.hardware.Camera camera) {
|
||||
String errorMessage;
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
||||
errorMessage = "Camera server died!";
|
||||
} else {
|
||||
errorMessage = "Camera error: " + error;
|
||||
}
|
||||
Logging.e(TAG, errorMessage);
|
||||
stopInternal();
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
|
||||
events.onCameraDisconnected(Camera1Session.this);
|
||||
} else {
|
||||
events.onCameraError(Camera1Session.this, errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (captureToTexture) {
|
||||
listenForTextureFrames();
|
||||
} else {
|
||||
listenForBytebufferFrames();
|
||||
}
|
||||
try {
|
||||
camera.startPreview();
|
||||
} catch (RuntimeException e) {
|
||||
stopInternal();
|
||||
events.onCameraError(this, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
if (state == SessionState.STOPPED) {
|
||||
Logging.d(TAG, "Camera is already stopped");
|
||||
return;
|
||||
}
|
||||
|
||||
state = SessionState.STOPPED;
|
||||
surfaceTextureHelper.stopListening();
|
||||
// Note: stopPreview or other driver code might deadlock. Deadlock in
|
||||
// android.hardware.Camera._stopPreview(Native Method) has been observed on
|
||||
// Nexus 5 (hammerhead), OS version LMY48I.
|
||||
camera.stopPreview();
|
||||
camera.release();
|
||||
events.onCameraClosed(this);
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void listenForTextureFrames() {
|
||||
surfaceTextureHelper.startListening((VideoFrame frame) -> {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
final VideoFrame modifiedFrame = new VideoFrame(
|
||||
CameraSession.createTextureBufferWithModifiedTransformMatrix(
|
||||
(TextureBufferImpl) frame.getBuffer(),
|
||||
/* mirror= */ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT,
|
||||
/* rotation= */ 0),
|
||||
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
|
||||
events.onFrameCaptured(Camera1Session.this, modifiedFrame);
|
||||
modifiedFrame.release();
|
||||
});
|
||||
}
|
||||
|
||||
private void listenForBytebufferFrames() {
|
||||
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
|
||||
@Override
|
||||
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (callbackCamera != camera) {
|
||||
Logging.e(TAG, "Callback from a different camera. This should never happen.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
|
||||
return;
|
||||
}
|
||||
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
VideoFrame.Buffer frameBuffer = new NV21Buffer(
|
||||
data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
|
||||
if (state == SessionState.RUNNING) {
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}));
|
||||
final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (info.orientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,420 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.CaptureFailure;
|
||||
import android.hardware.camera2.CaptureRequest;
|
||||
import android.os.Handler;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.util.Range;
|
||||
import android.view.Surface;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
class Camera2Session implements CameraSession {
|
||||
private static final String TAG = "Camera2Session";
|
||||
|
||||
private static final Histogram camera2StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
private final Events events;
|
||||
private final Context applicationContext;
|
||||
private final CameraManager cameraManager;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final String cameraId;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final int framerate;
|
||||
|
||||
// Initialized at start
|
||||
private CameraCharacteristics cameraCharacteristics;
|
||||
private int cameraOrientation;
|
||||
private boolean isCameraFrontFacing;
|
||||
private int fpsUnitFactor;
|
||||
private CaptureFormat captureFormat;
|
||||
|
||||
// Initialized when camera opens
|
||||
@Nullable private CameraDevice cameraDevice;
|
||||
@Nullable private Surface surface;
|
||||
|
||||
// Initialized when capture session is created
|
||||
@Nullable private CameraCaptureSession captureSession;
|
||||
|
||||
// State
|
||||
private SessionState state = SessionState.RUNNING;
|
||||
private boolean firstFrameReported;
|
||||
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private class CameraStateCallback extends CameraDevice.StateCallback {
|
||||
private String getErrorDescription(int errorCode) {
|
||||
switch (errorCode) {
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
|
||||
return "Camera device has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
|
||||
return "Camera device could not be opened due to a device policy.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
|
||||
return "Camera device is in use already.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
|
||||
return "Camera service has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
|
||||
return "Camera device could not be opened because"
|
||||
+ " there are too many other open camera devices.";
|
||||
default:
|
||||
return "Unknown camera error: " + errorCode;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
|
||||
} else {
|
||||
events.onCameraDisconnected(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int errorCode) {
|
||||
checkIsOnCameraThread();
|
||||
reportError(getErrorDescription(errorCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera opened.");
|
||||
cameraDevice = camera;
|
||||
|
||||
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
|
||||
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
|
||||
try {
|
||||
camera.createCaptureSession(
|
||||
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to create capture session. " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClosed(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera device closed.");
|
||||
events.onCameraClosed(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
session.close();
|
||||
reportError("Failed to configure capture session.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "Camera capture session configured.");
|
||||
captureSession = session;
|
||||
try {
|
||||
/*
|
||||
* The viable options for video capture requests are:
|
||||
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
|
||||
* post-processing.
|
||||
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
|
||||
* quality.
|
||||
*/
|
||||
final CaptureRequest.Builder captureRequestBuilder =
|
||||
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
// Set auto exposure fps range.
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
|
||||
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureFormat.framerate.max / fpsUnitFactor));
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
|
||||
chooseStabilizationMode(captureRequestBuilder);
|
||||
chooseFocusMode(captureRequestBuilder);
|
||||
|
||||
captureRequestBuilder.addTarget(surface);
|
||||
session.setRepeatingRequest(
|
||||
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to start capture request. " + e);
|
||||
return;
|
||||
}
|
||||
|
||||
surfaceTextureHelper.startListening((VideoFrame frame) -> {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
firstFrameReported = true;
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera2StartTimeMsHistogram.addSample(startTimeMs);
|
||||
}
|
||||
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
// Also, undo camera orientation, we report it as rotation instead.
|
||||
final VideoFrame modifiedFrame =
|
||||
new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
|
||||
(TextureBufferImpl) frame.getBuffer(),
|
||||
/* mirror= */ isCameraFrontFacing,
|
||||
/* rotation= */ -cameraOrientation),
|
||||
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
|
||||
events.onFrameCaptured(Camera2Session.this, modifiedFrame);
|
||||
modifiedFrame.release();
|
||||
});
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
callback.onDone(Camera2Session.this);
|
||||
}
|
||||
|
||||
// Prefers optical stabilization over software stabilization if available. Only enables one of
|
||||
// the stabilization modes at a time because having both enabled can cause strange results.
|
||||
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableOpticalStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
|
||||
if (availableOpticalStabilization != null) {
|
||||
for (int mode : availableOpticalStabilization) {
|
||||
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using optical stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If no optical mode is available, try software.
|
||||
final int[] availableVideoStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
|
||||
for (int mode : availableVideoStabilization) {
|
||||
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using video stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Stabilization not available.");
|
||||
}
|
||||
|
||||
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableFocusModes =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
|
||||
for (int mode : availableFocusModes) {
|
||||
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
Logging.d(TAG, "Using continuous video auto-focus.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Auto-focus is not available.");
|
||||
}
|
||||
}
|
||||
|
||||
private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
|
||||
@Override
|
||||
public void onCaptureFailed(
|
||||
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
||||
Logging.d(TAG, "Capture failed: " + failure);
|
||||
}
|
||||
}
|
||||
|
||||
public static void create(CreateSessionCallback callback, Events events,
|
||||
Context applicationContext, CameraManager cameraManager,
|
||||
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
|
||||
int framerate) {
|
||||
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
|
||||
cameraId, width, height, framerate);
|
||||
}
|
||||
|
||||
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
|
||||
int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.callback = callback;
|
||||
this.events = events;
|
||||
this.applicationContext = applicationContext;
|
||||
this.cameraManager = cameraManager;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraId = cameraId;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
start();
|
||||
}
|
||||
|
||||
private void start() {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "start");
|
||||
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (final CameraAccessException e) {
|
||||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
|
||||
findCaptureFormat();
|
||||
openCamera();
|
||||
}
|
||||
|
||||
private void findCaptureFormat() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
|
||||
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
|
||||
Logging.d(TAG, "Available preview sizes: " + sizes);
|
||||
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
|
||||
|
||||
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
|
||||
reportError("No supported capture formats.");
|
||||
return;
|
||||
}
|
||||
|
||||
final CaptureFormat.FramerateRange bestFpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
|
||||
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
|
||||
|
||||
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
|
||||
Logging.d(TAG, "Using capture format: " + captureFormat);
|
||||
}
|
||||
|
||||
private void openCamera() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Opening camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
try {
|
||||
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to open camera: " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera2StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
surfaceTextureHelper.stopListening();
|
||||
|
||||
if (captureSession != null) {
|
||||
captureSession.close();
|
||||
captureSession = null;
|
||||
}
|
||||
if (surface != null) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
}
|
||||
if (cameraDevice != null) {
|
||||
cameraDevice.close();
|
||||
cameraDevice = null;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void reportError(String error) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.e(TAG, "Error: " + error);
|
||||
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.ERROR, error);
|
||||
} else {
|
||||
events.onCameraError(this, error);
|
||||
}
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
||||
if (!isCameraFrontFacing) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (cameraOrientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,436 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.support.annotation.Nullable;
|
||||
import java.util.Arrays;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
enum SwitchState {
|
||||
IDLE, // No switch requested.
|
||||
PENDING, // Waiting for previous capture session to open.
|
||||
IN_PROGRESS, // Waiting for new switched capture session to start.
|
||||
}
|
||||
|
||||
private static final String TAG = "CameraCapturer";
|
||||
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
|
||||
private final static int OPEN_CAMERA_DELAY_MS = 500;
|
||||
private final static int OPEN_CAMERA_TIMEOUT = 10000;
|
||||
|
||||
private final CameraEnumerator cameraEnumerator;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private final Handler uiThreadHandler;
|
||||
|
||||
@Nullable
|
||||
private final CameraSession.CreateSessionCallback createSessionCallback =
|
||||
new CameraSession.CreateSessionCallback() {
|
||||
@Override
|
||||
public void onDone(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "Create session done. Switch state: " + switchState);
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(true /* success */);
|
||||
sessionOpening = false;
|
||||
currentSession = session;
|
||||
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
|
||||
firstFrameObserved = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState == SwitchState.IN_PROGRESS) {
|
||||
switchState = SwitchState.IDLE;
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
} else if (switchState == SwitchState.PENDING) {
|
||||
switchState = SwitchState.IDLE;
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(CameraSession.FailureType failureType, String error) {
|
||||
checkIsOnCameraThread();
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(false /* success */);
|
||||
openAttemptsRemaining--;
|
||||
|
||||
if (openAttemptsRemaining <= 0) {
|
||||
Logging.w(TAG, "Opening camera failed, passing: " + error);
|
||||
sessionOpening = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(error);
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
}
|
||||
|
||||
if (failureType == CameraSession.FailureType.DISCONNECTED) {
|
||||
eventsHandler.onCameraDisconnected();
|
||||
} else {
|
||||
eventsHandler.onCameraError(error);
|
||||
}
|
||||
} else {
|
||||
Logging.w(TAG, "Opening camera failed, retry: " + error);
|
||||
createSessionInternal(OPEN_CAMERA_DELAY_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@Nullable
|
||||
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
|
||||
@Override
|
||||
public void onCameraOpening() {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (currentSession != null) {
|
||||
Logging.w(TAG, "onCameraOpening while session was open.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraOpening(cameraName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraError(CameraSession session, String error) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraError from another session: " + error);
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraError(error);
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraDisconnected(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraDisconnected from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraDisconnected();
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraClosed(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession && currentSession != null) {
|
||||
Logging.d(TAG, "onCameraClosed from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onFrameCaptured(frame);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Runnable openCameraTimeoutRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventsHandler.onCameraError("Camera failed to start within timeout.");
|
||||
}
|
||||
};
|
||||
|
||||
// Initialized on initialize
|
||||
// -------------------------
|
||||
private Handler cameraThreadHandler;
|
||||
private Context applicationContext;
|
||||
private org.webrtc.CapturerObserver capturerObserver;
|
||||
private SurfaceTextureHelper surfaceHelper;
|
||||
|
||||
private final Object stateLock = new Object();
|
||||
private boolean sessionOpening; /* guarded by stateLock */
|
||||
@Nullable private CameraSession currentSession; /* guarded by stateLock */
|
||||
private String cameraName; /* guarded by stateLock */
|
||||
private int width; /* guarded by stateLock */
|
||||
private int height; /* guarded by stateLock */
|
||||
private int framerate; /* guarded by stateLock */
|
||||
private int openAttemptsRemaining; /* guarded by stateLock */
|
||||
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
|
||||
@Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
|
||||
// Valid from onDone call until stopCapture, otherwise null.
|
||||
@Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
|
||||
private boolean firstFrameObserved; /* guarded by stateLock */
|
||||
|
||||
public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
|
||||
CameraEnumerator cameraEnumerator) {
|
||||
if (eventsHandler == null) {
|
||||
eventsHandler = new CameraEventsHandler() {
|
||||
@Override
|
||||
public void onCameraError(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraDisconnected() {}
|
||||
@Override
|
||||
public void onCameraFreezed(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraOpening(String cameraName) {}
|
||||
@Override
|
||||
public void onFirstFrameAvailable() {}
|
||||
@Override
|
||||
public void onCameraClosed() {}
|
||||
};
|
||||
}
|
||||
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.cameraEnumerator = cameraEnumerator;
|
||||
this.cameraName = cameraName;
|
||||
uiThreadHandler = new Handler(Looper.getMainLooper());
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length == 0) {
|
||||
throw new RuntimeException("No cameras attached.");
|
||||
}
|
||||
if (!Arrays.asList(deviceNames).contains(this.cameraName)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Camera name " + this.cameraName + " does not match any known camera device.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
org.webrtc.CapturerObserver capturerObserver) {
|
||||
this.applicationContext = applicationContext;
|
||||
this.capturerObserver = capturerObserver;
|
||||
this.surfaceHelper = surfaceTextureHelper;
|
||||
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startCapture(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
|
||||
if (applicationContext == null) {
|
||||
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (sessionOpening || currentSession != null) {
|
||||
Logging.w(TAG, "Session already open");
|
||||
return;
|
||||
}
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
|
||||
createSessionInternal(0);
|
||||
}
|
||||
}
|
||||
|
||||
private void createSessionInternal(int delayMs) {
|
||||
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
|
||||
cameraThreadHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
|
||||
surfaceHelper, cameraName, width, height, framerate);
|
||||
}
|
||||
}, delayMs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopCapture() {
|
||||
Logging.d(TAG, "Stop capture");
|
||||
|
||||
synchronized (stateLock) {
|
||||
while (sessionOpening) {
|
||||
Logging.d(TAG, "Stop capture: Waiting for session to open");
|
||||
try {
|
||||
stateLock.wait();
|
||||
} catch (InterruptedException e) {
|
||||
Logging.w(TAG, "Stop capture interrupted while waiting for the session to open.");
|
||||
Thread.currentThread().interrupt();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentSession != null) {
|
||||
Logging.d(TAG, "Stop capture: Nulling session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
capturerObserver.onCapturerStopped();
|
||||
} else {
|
||||
Logging.d(TAG, "Stop capture: No session open");
|
||||
}
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop capture done");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
|
||||
synchronized (stateLock) {
|
||||
stopCapture();
|
||||
startCapture(width, height, framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose");
|
||||
stopCapture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera");
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
Thread cameraThread = null;
|
||||
if (cameraThreadHandler != null) {
|
||||
cameraThread = cameraThreadHandler.getLooper().getThread();
|
||||
}
|
||||
if (cameraThread != null) {
|
||||
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
|
||||
if (cameraStackTrace.length > 0) {
|
||||
Logging.d(TAG, "CameraCapturer stack trace:");
|
||||
for (StackTraceElement traceElem : cameraStackTrace) {
|
||||
Logging.d(TAG, traceElem.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void reportCameraSwitchError(
|
||||
String error, @Nullable CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.e(TAG, error);
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(error);
|
||||
}
|
||||
}
|
||||
|
||||
private void switchCameraInternal(@Nullable final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera internal");
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length < 2) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (!sessionOpening && currentSession == null) {
|
||||
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
|
||||
return;
|
||||
}
|
||||
|
||||
this.switchEventsHandler = switchEventsHandler;
|
||||
if (sessionOpening) {
|
||||
switchState = SwitchState.PENDING;
|
||||
return;
|
||||
} else {
|
||||
switchState = SwitchState.IN_PROGRESS;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "switchCamera: Stopping session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
|
||||
int cameraNameIndex = Arrays.asList(deviceNames).indexOf(cameraName);
|
||||
cameraName = deviceNames[(cameraNameIndex + 1) % deviceNames.length];
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = 1;
|
||||
createSessionInternal(0);
|
||||
}
|
||||
Logging.d(TAG, "switchCamera done");
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
Logging.e(TAG, "Check is on camera thread failed.");
|
||||
throw new RuntimeException("Not on camera thread.");
|
||||
}
|
||||
}
|
||||
|
||||
protected String getCameraName() {
|
||||
synchronized (stateLock) {
|
||||
return cameraName;
|
||||
}
|
||||
}
|
||||
|
||||
abstract protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
|
||||
int width, int height, int framerate);
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import android.view.WindowManager;
|
||||
import android.view.Surface;
|
||||
|
||||
interface CameraSession {
|
||||
enum FailureType { ERROR, DISCONNECTED }
|
||||
|
||||
// Callbacks are fired on the camera thread.
|
||||
interface CreateSessionCallback {
|
||||
void onDone(CameraSession session);
|
||||
void onFailure(FailureType failureType, String error);
|
||||
}
|
||||
|
||||
// Events are fired on the camera thread.
|
||||
interface Events {
|
||||
void onCameraOpening();
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraDisconnected(CameraSession session);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onFrameCaptured(CameraSession session, VideoFrame frame);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the capture. Waits until no more calls to capture observer will be made.
|
||||
* If waitCameraStop is true, also waits for the camera to stop.
|
||||
*/
|
||||
void stop();
|
||||
|
||||
static int getDeviceOrientation(Context context) {
|
||||
final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
return 90;
|
||||
case Surface.ROTATION_180:
|
||||
return 180;
|
||||
case Surface.ROTATION_270:
|
||||
return 270;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
|
||||
TextureBufferImpl buffer, boolean mirror, int rotation) {
|
||||
final Matrix transformMatrix = new Matrix();
|
||||
// Perform mirror and rotation around (0.5, 0.5) since that is the center of the texture.
|
||||
transformMatrix.preTranslate(/* dx= */ 0.5f, /* dy= */ 0.5f);
|
||||
if (mirror) {
|
||||
transformMatrix.preScale(/* sx= */ -1f, /* sy= */ 1f);
|
||||
}
|
||||
transformMatrix.preRotate(rotation);
|
||||
transformMatrix.preTranslate(/* dx= */ -0.5f, /* dy= */ -0.5f);
|
||||
|
||||
// The width and height are not affected by rotation since Camera2Session has set them to the
|
||||
// value they should be after undoing the rotation.
|
||||
return buffer.applyTransformMatrix(transformMatrix, buffer.getWidth(), buffer.getHeight());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,332 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceHolder;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
class EglBase10Impl implements EglBase10 {
|
||||
private static final String TAG = "EglBase10Impl";
|
||||
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
||||
|
||||
private final EGL10 egl;
|
||||
private EGLContext eglContext;
|
||||
@Nullable private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
private static class Context implements EglBase10.Context {
|
||||
private final EGLContext eglContext;
|
||||
|
||||
@Override
|
||||
public EGLContext getRawContext() {
|
||||
return eglContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getNativeEglContext() {
|
||||
// TODO(magjed): Implement. There is no easy way of getting the native context for EGL 1.0. We
|
||||
// need to make sure to have an EglSurface, then make the context current using that surface,
|
||||
// and then call into JNI and call the native version of eglGetCurrentContext. Then we need to
|
||||
// restore the state and return the native context.
|
||||
return 0 /* EGL_NO_CONTEXT */;
|
||||
}
|
||||
|
||||
public Context(EGLContext eglContext) {
|
||||
this.eglContext = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
public EglBase10Impl(EGLContext sharedContext, int[] configAttributes) {
|
||||
this.egl = (EGL10) EGLContext.getEGL();
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
|
||||
Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
/**
|
||||
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
|
||||
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
|
||||
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
|
||||
*/
|
||||
class FakeSurfaceHolder implements SurfaceHolder {
|
||||
private final Surface surface;
|
||||
|
||||
FakeSurfaceHolder(Surface surface) {
|
||||
this.surface = surface;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public void removeCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public boolean isCreating() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public void setType(int i) {}
|
||||
|
||||
@Override
|
||||
public void setFixedSize(int i, int i2) {}
|
||||
|
||||
@Override
|
||||
public void setSizeFromLayout() {}
|
||||
|
||||
@Override
|
||||
public void setFormat(int i) {}
|
||||
|
||||
@Override
|
||||
public void setKeepScreenOn(boolean b) {}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Canvas lockCanvas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Canvas lockCanvas(Rect rect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unlockCanvasAndPost(Canvas canvas) {}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Rect getSurfaceFrame() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Surface getSurface() {
|
||||
return surface;
|
||||
}
|
||||
}
|
||||
|
||||
createSurfaceInternal(new FakeSurfaceHolder(surface));
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
|
||||
private void createSurfaceInternal(Object nativeWindow) {
|
||||
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.webrtc.EglBase.Context getEglBaseContext() {
|
||||
return new Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
egl.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
egl.eglDestroyContext(eglDisplay, eglContext);
|
||||
egl.eglTerminate(eglDisplay);
|
||||
eglContext = EGL10.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL10.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException(
|
||||
"eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(
|
||||
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException(
|
||||
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
egl.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers(long timeStampNs) {
|
||||
// Setting presentation time is not supported for EGL 1.0.
|
||||
swapBuffers();
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException(
|
||||
"Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!egl.eglInitialize(eglDisplay, version)) {
|
||||
throw new RuntimeException(
|
||||
"Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
throw new RuntimeException(
|
||||
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLContext createEglContext(@Nullable EGLContext sharedContext, EGLDisplay eglDisplay,
|
||||
EGLConfig eglConfig, int openGlesVersion) {
|
||||
if (sharedContext != null && sharedContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL10.EGL_NONE};
|
||||
EGLContext rootContext = sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
|
||||
}
|
||||
if (eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,284 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.os.Build;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.view.Surface;
|
||||
import org.webrtc.EglBase;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
@SuppressWarnings("ReferenceEquality") // We want to compare to EGL14 constants.
|
||||
@TargetApi(18)
|
||||
class EglBase14Impl implements EglBase14 {
|
||||
private static final String TAG = "EglBase14Impl";
|
||||
private static final int EGLExt_SDK_VERSION = Build.VERSION_CODES.JELLY_BEAN_MR2;
|
||||
private static final int CURRENT_SDK_VERSION = Build.VERSION.SDK_INT;
|
||||
private EGLContext eglContext;
|
||||
@Nullable private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
|
||||
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
|
||||
// time stamp on a surface is supported from 18 so we require 18.
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG,
|
||||
"SDK version: " + CURRENT_SDK_VERSION
|
||||
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
|
||||
}
|
||||
|
||||
public static class Context implements EglBase14.Context {
|
||||
private final EGLContext egl14Context;
|
||||
|
||||
@Override
|
||||
public EGLContext getRawContext() {
|
||||
return egl14Context;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("deprecation")
|
||||
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
|
||||
public long getNativeEglContext() {
|
||||
return CURRENT_SDK_VERSION >= Build.VERSION_CODES.LOLLIPOP ? egl14Context.getNativeHandle()
|
||||
: egl14Context.getHandle();
|
||||
}
|
||||
|
||||
public Context(android.opengl.EGLContext eglContext) {
|
||||
this.egl14Context = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
// |sharedContext| may be null.
|
||||
public EglBase14Impl(EGLContext sharedContext, int[] configAttributes) {
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
final int openGlesVersion = EglBase.getOpenGlesVersionFromConfig(configAttributes);
|
||||
Logging.d(TAG, "Using OpenGL ES version " + openGlesVersion);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig, openGlesVersion);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android Surface.
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
createSurfaceInternal(surface);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either Surface or SurfaceTexture.
|
||||
private void createSurfaceInternal(Object surface) {
|
||||
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getEglBaseContext() {
|
||||
return new Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException(
|
||||
"eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(
|
||||
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException(
|
||||
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers(long timeStampNs) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
// See
|
||||
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private static EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException(
|
||||
"Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
throw new RuntimeException(
|
||||
"Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(
|
||||
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
|
||||
throw new RuntimeException(
|
||||
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLContext createEglContext(@Nullable EGLContext sharedContext,
|
||||
EGLDisplay eglDisplay, EGLConfig eglConfig, int openGlesVersion) {
|
||||
if (sharedContext != null && sharedContext == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, openGlesVersion, EGL14.EGL_NONE};
|
||||
EGLContext rootContext = sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
|
||||
}
|
||||
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,281 @@
|
|||
/*
|
||||
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.annotation.Nullable;
|
||||
import java.nio.FloatBuffer;
|
||||
import org.webrtc.GlShader;
|
||||
import org.webrtc.GlUtil;
|
||||
import org.webrtc.RendererCommon;
|
||||
|
||||
/**
|
||||
* Helper class to implement an instance of RendererCommon.GlDrawer that can accept multiple input
|
||||
* sources (OES, RGB, or YUV) using a generic fragment shader as input. The generic fragment shader
|
||||
* should sample pixel values from the function "sample" that will be provided by this class and
|
||||
* provides an abstraction for the input source type (OES, RGB, or YUV). The texture coordinate
|
||||
* variable name will be "tc" and the texture matrix in the vertex shader will be "tex_mat". The
|
||||
* simplest possible generic shader that just draws pixel from the frame unmodified looks like:
|
||||
* void main() {
|
||||
* gl_FragColor = sample(tc);
|
||||
* }
|
||||
* This class covers the cases for most simple shaders and generates the necessary boiler plate.
|
||||
* Advanced shaders can always implement RendererCommon.GlDrawer directly.
|
||||
*/
|
||||
class GlGenericDrawer implements RendererCommon.GlDrawer {
|
||||
/**
|
||||
* The different shader types representing different input sources. YUV here represents three
|
||||
* separate Y, U, V textures.
|
||||
*/
|
||||
public static enum ShaderType { OES, RGB, YUV }
|
||||
|
||||
/**
|
||||
* The shader callbacks is used to customize behavior for a GlDrawer. It provides a hook to set
|
||||
* uniform variables in the shader before a frame is drawn.
|
||||
*/
|
||||
public static interface ShaderCallbacks {
|
||||
/**
|
||||
* This callback is called when a new shader has been compiled and created. It will be called
|
||||
* for the first frame as well as when the shader type is changed. This callback can be used to
|
||||
* do custom initialization of the shader that only needs to happen once.
|
||||
*/
|
||||
void onNewShader(GlShader shader);
|
||||
|
||||
/**
|
||||
* This callback is called before rendering a frame. It can be used to do custom preparation of
|
||||
* the shader that needs to happen every frame.
|
||||
*/
|
||||
void onPrepareShader(GlShader shader, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportWidth, int viewportHeight);
|
||||
}
|
||||
|
||||
private static final String INPUT_VERTEX_COORDINATE_NAME = "in_pos";
|
||||
private static final String INPUT_TEXTURE_COORDINATE_NAME = "in_tc";
|
||||
private static final String TEXTURE_MATRIX_NAME = "tex_mat";
|
||||
private static final String DEFAULT_VERTEX_SHADER_STRING = "varying vec2 tc;\n"
|
||||
+ "attribute vec4 in_pos;\n"
|
||||
+ "attribute vec4 in_tc;\n"
|
||||
+ "uniform mat4 tex_mat;\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_Position = in_pos;\n"
|
||||
+ " tc = (tex_mat * in_tc).xy;\n"
|
||||
+ "}\n";
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1)
|
||||
// is top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_BUFFER = GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f, // Top right.
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_TEXTURE_BUFFER =
|
||||
GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f, // Top right.
|
||||
});
|
||||
|
||||
static String createFragmentShaderString(String genericFragmentSource, ShaderType shaderType) {
|
||||
final StringBuilder stringBuilder = new StringBuilder();
|
||||
if (shaderType == ShaderType.OES) {
|
||||
stringBuilder.append("#extension GL_OES_EGL_image_external : require\n");
|
||||
}
|
||||
stringBuilder.append("precision mediump float;\n");
|
||||
stringBuilder.append("varying vec2 tc;\n");
|
||||
|
||||
if (shaderType == ShaderType.YUV) {
|
||||
stringBuilder.append("uniform sampler2D y_tex;\n");
|
||||
stringBuilder.append("uniform sampler2D u_tex;\n");
|
||||
stringBuilder.append("uniform sampler2D v_tex;\n");
|
||||
|
||||
// Add separate function for sampling texture.
|
||||
// yuv_to_rgb_mat is inverse of the matrix defined in YuvConverter.
|
||||
stringBuilder.append("vec4 sample(vec2 p) {\n");
|
||||
stringBuilder.append(" float y = texture2D(y_tex, p).r * 1.16438;\n");
|
||||
stringBuilder.append(" float u = texture2D(u_tex, p).r;\n");
|
||||
stringBuilder.append(" float v = texture2D(v_tex, p).r;\n");
|
||||
stringBuilder.append(" return vec4(y + 1.59603 * v - 0.874202,\n");
|
||||
stringBuilder.append(" y - 0.391762 * u - 0.812968 * v + 0.531668,\n");
|
||||
stringBuilder.append(" y + 2.01723 * u - 1.08563, 1);\n");
|
||||
stringBuilder.append("}\n");
|
||||
stringBuilder.append(genericFragmentSource);
|
||||
} else {
|
||||
final String samplerName = shaderType == ShaderType.OES ? "samplerExternalOES" : "sampler2D";
|
||||
stringBuilder.append("uniform ").append(samplerName).append(" tex;\n");
|
||||
|
||||
// Update the sampling function in-place.
|
||||
stringBuilder.append(genericFragmentSource.replace("sample(", "texture2D(tex, "));
|
||||
}
|
||||
|
||||
return stringBuilder.toString();
|
||||
}
|
||||
|
||||
private final String genericFragmentSource;
|
||||
private final String vertexShader;
|
||||
private final ShaderCallbacks shaderCallbacks;
|
||||
@Nullable private ShaderType currentShaderType;
|
||||
@Nullable private GlShader currentShader;
|
||||
private int inPosLocation;
|
||||
private int inTcLocation;
|
||||
private int texMatrixLocation;
|
||||
|
||||
public GlGenericDrawer(String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
|
||||
this(DEFAULT_VERTEX_SHADER_STRING, genericFragmentSource, shaderCallbacks);
|
||||
}
|
||||
|
||||
public GlGenericDrawer(
|
||||
String vertexShader, String genericFragmentSource, ShaderCallbacks shaderCallbacks) {
|
||||
this.vertexShader = vertexShader;
|
||||
this.genericFragmentSource = genericFragmentSource;
|
||||
this.shaderCallbacks = shaderCallbacks;
|
||||
}
|
||||
|
||||
// Visible for testing.
|
||||
GlShader createShader(ShaderType shaderType) {
|
||||
return new GlShader(
|
||||
vertexShader, createFragmentShaderString(genericFragmentSource, shaderType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
|
||||
* allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(
|
||||
ShaderType.OES, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
|
||||
// Bind the texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
|
||||
// Draw the texture.
|
||||
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
// Unbind the texture as a precaution.
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
|
||||
* are allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(
|
||||
ShaderType.RGB, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
|
||||
// Bind the texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
// Draw the texture.
|
||||
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
// Unbind the texture as a precaution.
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a YUV frame with specified texture transformation matrix. Required resources are allocated
|
||||
* at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(
|
||||
ShaderType.YUV, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
|
||||
// Bind the textures.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
}
|
||||
// Draw the textures.
|
||||
GLES20.glViewport(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
// Unbind the textures as a precaution.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void prepareShader(ShaderType shaderType, float[] texMatrix, int frameWidth,
|
||||
int frameHeight, int viewportWidth, int viewportHeight) {
|
||||
final GlShader shader;
|
||||
if (shaderType.equals(currentShaderType)) {
|
||||
// Same shader type as before, reuse exising shader.
|
||||
shader = currentShader;
|
||||
} else {
|
||||
// Allocate new shader.
|
||||
currentShaderType = shaderType;
|
||||
if (currentShader != null) {
|
||||
currentShader.release();
|
||||
}
|
||||
shader = createShader(shaderType);
|
||||
currentShader = shader;
|
||||
|
||||
shader.useProgram();
|
||||
// Set input texture units.
|
||||
if (shaderType == ShaderType.YUV) {
|
||||
GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
|
||||
GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
|
||||
GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
|
||||
} else {
|
||||
GLES20.glUniform1i(shader.getUniformLocation("tex"), 0);
|
||||
}
|
||||
|
||||
GlUtil.checkNoGLES2Error("Create shader");
|
||||
shaderCallbacks.onNewShader(shader);
|
||||
texMatrixLocation = shader.getUniformLocation(TEXTURE_MATRIX_NAME);
|
||||
inPosLocation = shader.getAttribLocation(INPUT_VERTEX_COORDINATE_NAME);
|
||||
inTcLocation = shader.getAttribLocation(INPUT_TEXTURE_COORDINATE_NAME);
|
||||
}
|
||||
|
||||
shader.useProgram();
|
||||
|
||||
// Upload the vertex coordinates.
|
||||
GLES20.glEnableVertexAttribArray(inPosLocation);
|
||||
GLES20.glVertexAttribPointer(inPosLocation, /* size= */ 2,
|
||||
/* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
|
||||
FULL_RECTANGLE_BUFFER);
|
||||
|
||||
// Upload the texture coordinates.
|
||||
GLES20.glEnableVertexAttribArray(inTcLocation);
|
||||
GLES20.glVertexAttribPointer(inTcLocation, /* size= */ 2,
|
||||
/* type= */ GLES20.GL_FLOAT, /* normalized= */ false, /* stride= */ 0,
|
||||
FULL_RECTANGLE_TEXTURE_BUFFER);
|
||||
|
||||
// Upload the texture transformation matrix.
|
||||
GLES20.glUniformMatrix4fv(
|
||||
texMatrixLocation, 1 /* count= */, false /* transpose= */, texMatrix, 0 /* offset= */);
|
||||
|
||||
// Do custom per-frame shader preparation.
|
||||
shaderCallbacks.onPrepareShader(
|
||||
shader, texMatrix, frameWidth, frameHeight, viewportWidth, viewportHeight);
|
||||
GlUtil.checkNoGLES2Error("Prepare shader");
|
||||
}
|
||||
|
||||
/**
|
||||
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
|
||||
*/
|
||||
@Override
|
||||
public void release() {
|
||||
if (currentShader != null) {
|
||||
currentShader.release();
|
||||
currentShader = null;
|
||||
currentShaderType = null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Class for holding the native pointer of a histogram. Since there is no way to destroy a
|
||||
* histogram, please don't create unnecessary instances of this object. This class is thread safe.
|
||||
*
|
||||
* Usage example:
|
||||
* private static final Histogram someMetricHistogram =
|
||||
* Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
|
||||
* someMetricHistogram.addSample(someVariable);
|
||||
*/
|
||||
class Histogram {
|
||||
private final long handle;
|
||||
|
||||
private Histogram(long handle) {
|
||||
this.handle = handle;
|
||||
}
|
||||
|
||||
static public Histogram createCounts(String name, int min, int max, int bucketCount) {
|
||||
return new Histogram(nativeCreateCounts(name, min, max, bucketCount));
|
||||
}
|
||||
|
||||
static public Histogram createEnumeration(String name, int max) {
|
||||
return new Histogram(nativeCreateEnumeration(name, max));
|
||||
}
|
||||
|
||||
public void addSample(int sample) {
|
||||
nativeAddSample(handle, sample);
|
||||
}
|
||||
|
||||
private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
|
||||
private static native long nativeCreateEnumeration(String name, int max);
|
||||
private static native void nativeAddSample(long handle, int sample);
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/** Class with static JNI helper functions that are used in many places. */
|
||||
public class JniCommon {
|
||||
/** Functions to increment/decrement an rtc::RefCountInterface pointer. */
|
||||
public static native void nativeAddRef(long refCountedPointer);
|
||||
public static native void nativeReleaseRef(long refCountedPointer);
|
||||
|
||||
public static native ByteBuffer nativeAllocateByteBuffer(int size);
|
||||
public static native void nativeFreeByteBuffer(ByteBuffer buffer);
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.support.annotation.Nullable;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class NV21Buffer implements VideoFrame.Buffer {
|
||||
private final byte[] data;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final RefCountDelegate refCountDelegate;
|
||||
|
||||
public NV21Buffer(byte[] data, int width, int height, @Nullable Runnable releaseCallback) {
|
||||
this.data = data;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.refCountDelegate = new RefCountDelegate(releaseCallback);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
// Cropping converts the frame to I420. Just crop and scale to the whole image.
|
||||
return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
|
||||
height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
refCountDelegate.retain();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
refCountDelegate.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
|
||||
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
|
||||
height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
|
||||
int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
|
||||
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.support.annotation.Nullable;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Implementation of RefCounted that executes a Runnable once the ref count reaches zero.
|
||||
*/
|
||||
class RefCountDelegate implements RefCounted {
|
||||
private final AtomicInteger refCount = new AtomicInteger(1);
|
||||
private final @Nullable Runnable releaseCallback;
|
||||
|
||||
/**
|
||||
* @param releaseCallback Callback that will be executed once the ref count reaches zero.
|
||||
*/
|
||||
public RefCountDelegate(@Nullable Runnable releaseCallback) {
|
||||
this.releaseCallback = releaseCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
int updated_count = refCount.incrementAndGet();
|
||||
if (updated_count < 2) {
|
||||
throw new IllegalStateException("retain() called on an object with refcount < 1");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
int updated_count = refCount.decrementAndGet();
|
||||
if (updated_count < 0) {
|
||||
throw new IllegalStateException("release() called on an object with refcount < 1");
|
||||
}
|
||||
if (updated_count == 0 && releaseCallback != null) {
|
||||
releaseCallback.run();
|
||||
}
|
||||
}
|
||||
}
|
Загрузка…
Ссылка в новой задаче