Bug 987979: Patch 7 - Remove JSON/UCI requirements for Camera capture capability. r=blassey

This commit is contained in:
Randell Jesup 2014-05-29 17:05:15 -04:00
Родитель 66ce6ff1ad
Коммит 4812c3eb03
5 изменённых файлов: 176 добавлений и 86 удалений

Просмотреть файл

@ -20,7 +20,6 @@
'../testing/gmock.gyp:*',
'../testing/gtest.gyp:*',
'../third_party/bzip2/bzip2.gyp:*',
'../third_party/icu/icu.gyp:*',
'../third_party/libxml/libxml.gyp:*',
'../third_party/sqlite/sqlite.gyp:*',
'../third_party/zlib/zlib.gyp:*',

Просмотреть файл

@ -11,11 +11,10 @@
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include <algorithm>
#include <string>
#include <sstream>
#include <vector>
#include "json/json.h"
#include "third_party/icu/source/common/unicode/unistr.h"
#include "webrtc/modules/video_capture/android/video_capture_android.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
@ -96,41 +95,67 @@ void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
jni->FindClass("org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
assert(j_info_class);
jmethodID j_initialize = jni->GetStaticMethodID(
j_info_class, "getDeviceInfo", "()Ljava/lang/String;");
jstring j_json_info = static_cast<jstring>(
jni->CallStaticObjectMethod(j_info_class, j_initialize));
j_info_class, "getDeviceInfo",
"()[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
jarray j_camera_caps = static_cast<jarray>(
jni->CallStaticObjectMethod(j_info_class, j_initialize));
const jchar* jchars = jni->GetStringChars(j_json_info, NULL);
icu::UnicodeString ustr(jchars, jni->GetStringLength(j_json_info));
jni->ReleaseStringChars(j_json_info, jchars);
std::string json_info;
ustr.toUTF8String(json_info);
const jsize capLength = jni->GetArrayLength(j_camera_caps);
Json::Value cameras;
Json::Reader reader(Json::Features::strictMode());
bool parsed = reader.parse(json_info, cameras);
if (!parsed) {
std::stringstream stream;
stream << "Failed to parse configuration:\n"
<< reader.getFormattedErrorMessages();
assert(false);
jfieldID widthField = jni->GetFieldID(j_cap_class, "width", "[I");
jfieldID heightField = jni->GetFieldID(j_cap_class, "height", "[I");
jfieldID maxFpsField = jni->GetFieldID(j_cap_class, "maxMilliFPS", "I");
jfieldID minFpsField = jni->GetFieldID(j_cap_class, "minMilliFPS", "I");
jfieldID orientationField = jni->GetFieldID(j_cap_class, "orientation", "I");
jfieldID frontFacingField = jni->GetFieldID(j_cap_class, "frontFacing", "Z");
jfieldID nameField =
jni->GetFieldID(j_cap_class, "name", "Ljava/lang/String;");
if (widthField == NULL
|| heightField == NULL
|| maxFpsField == NULL
|| minFpsField == NULL
|| orientationField == NULL
|| frontFacingField == NULL
|| nameField == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to get field Id.", __FUNCTION__);
return;
}
for (Json::ArrayIndex i = 0; i < cameras.size(); ++i) {
const Json::Value& camera = cameras[i];
for (jsize i = 0; i < capLength; i++) {
jobject capabilityElement = jni->GetObjectArrayElement(
(jobjectArray) j_camera_caps,
i);
AndroidCameraInfo info;
info.name = camera["name"].asString();
info.min_mfps = camera["min_mfps"].asInt();
info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
for (Json::ArrayIndex j = 0; j < sizes.size(); ++j) {
const Json::Value& size = sizes[j];
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
jstring camName = static_cast<jstring>(jni->GetObjectField(capabilityElement,
nameField));
const char* camChars = jni->GetStringUTFChars(camName, nullptr);
info.name = std::string(camChars);
jni->ReleaseStringUTFChars(camName, camChars);
info.min_mfps = jni->GetIntField(capabilityElement, minFpsField);
info.max_mfps = jni->GetIntField(capabilityElement, maxFpsField);
info.orientation = jni->GetIntField(capabilityElement, orientationField);
info.front_facing = jni->GetBooleanField(capabilityElement, frontFacingField);
jintArray widthResArray =
static_cast<jintArray>(jni->GetObjectField(capabilityElement, widthField));
jintArray heightResArray =
static_cast<jintArray>(jni->GetObjectField(capabilityElement, heightField));
const jsize numRes = jni->GetArrayLength(widthResArray);
jint *widths = jni->GetIntArrayElements(widthResArray, nullptr);
jint *heights = jni->GetIntArrayElements(heightResArray, nullptr);
for (jsize j = 0; j < numRes; ++j) {
info.resolutions.push_back(std::make_pair(widths[j], heights[j]));
}
g_camera_info->push_back(info);
jni->ReleaseIntArrayElements(widthResArray, widths, JNI_ABORT);
jni->ReleaseIntArrayElements(heightResArray, heights, JNI_ABORT);
}
}

Просмотреть файл

@ -0,0 +1,24 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
@WebRTCJNITarget
public class CaptureCapabilityAndroid {
public String name;
public int width[];
public int height[];
public int minMilliFPS;
public int maxMilliFPS;
public boolean frontFacing;
public int orientation;
}

Просмотреть файл

@ -24,9 +24,7 @@ import android.hardware.Camera.Size;
import android.hardware.Camera;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
public class VideoCaptureDeviceInfoAndroid {
private final static String TAG = "WEBRTC-JC";
@ -41,55 +39,103 @@ public class VideoCaptureDeviceInfoAndroid {
", Orientation "+ info.orientation;
}
// Returns information about all cameras on the device as a serialized JSON
// array of dictionaries encoding information about a single device. Since
// this reflects static information about the hardware present, there is no
// need to call this function more than once in a single process. It is
// Returns information about all cameras on the device.
// Since this reflects static information about the hardware present, there is
// no need to call this function more than once in a single process. It is
// marked "private" as it is only called by native code.
private static String getDeviceInfo() {
try {
JSONArray devices = new JSONArray();
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
String uniqueName = deviceUniqueName(i, info);
JSONObject cameraDict = new JSONObject();
devices.put(cameraDict);
List<Size> supportedSizes;
List<int[]> supportedFpsRanges;
try {
Camera camera = Camera.open(i);
Parameters parameters = camera.getParameters();
supportedSizes = parameters.getSupportedPreviewSizes();
supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
camera.release();
Log.d(TAG, uniqueName);
} catch (RuntimeException e) {
Log.e(TAG, "Failed to open " + uniqueName + ", skipping");
continue;
}
JSONArray sizes = new JSONArray();
for (Size supportedSize : supportedSizes) {
JSONObject size = new JSONObject();
size.put("width", supportedSize.width);
size.put("height", supportedSize.height);
sizes.put(size);
}
// Android SDK deals in integral "milliframes per second"
// (i.e. fps*1000, instead of floating-point frames-per-second) so we
// preserve that through the Java->C++->Java round-trip.
int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
cameraDict.put("name", uniqueName);
cameraDict.put("front_facing", isFrontFacing(info))
.put("orientation", info.orientation)
.put("sizes", sizes)
.put("min_mfps", mfps[Parameters.PREVIEW_FPS_MIN_INDEX])
.put("max_mfps", mfps[Parameters.PREVIEW_FPS_MAX_INDEX]);
@WebRTCJNITarget
private static CaptureCapabilityAndroid[] getDeviceInfo() {
ArrayList<CaptureCapabilityAndroid> allDevices = new ArrayList<CaptureCapabilityAndroid>();
int numCameras = 1;
if (android.os.Build.VERSION.SDK_INT >= 9) {
numCameras = Camera.getNumberOfCameras();
}
String ret = devices.toString(2);
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
}
for (int i = 0; i < numCameras; ++i) {
String uniqueName = null;
CameraInfo info = null;
if (android.os.Build.VERSION.SDK_INT >= 9) {
info = new CameraInfo();
Camera.getCameraInfo(i, info);
uniqueName = deviceUniqueName(i, info);
} else {
uniqueName = "Camera 0, Facing back, Orientation 90";
}
List<Size> supportedSizes = null;
List<int[]> supportedFpsRanges = null;
try {
Camera camera = null;
if (android.os.Build.VERSION.SDK_INT >= 9) {
camera = Camera.open(i);
} else {
camera = Camera.open();
}
Parameters parameters = camera.getParameters();
supportedSizes = parameters.getSupportedPreviewSizes();
if (android.os.Build.VERSION.SDK_INT >= 9) {
supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
}
// getSupportedPreviewFpsRange doesn't actually work on a bunch
// of Gingerbread devices.
if (supportedFpsRanges == null) {
supportedFpsRanges = new ArrayList<int[]>();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
if (frameRates != null) {
for (Integer rate: frameRates) {
int[] range = new int[2];
// minFPS = maxFPS, convert to milliFPS
range[0] = rate * 1000;
range[1] = rate * 1000;
supportedFpsRanges.add(range);
}
} else {
Log.e(TAG, "Camera doesn't know its own framerate, guessing 25fps.");
int[] range = new int[2];
// Your guess is as good as mine
range[0] = 25 * 1000;
range[1] = 25 * 1000;
supportedFpsRanges.add(range);
}
}
camera.release();
Log.d(TAG, uniqueName);
} catch (RuntimeException e) {
Log.e(TAG, "Failed to open " + uniqueName + ", skipping due to: "
+ e.getLocalizedMessage());
continue;
}
CaptureCapabilityAndroid device = new CaptureCapabilityAndroid();
int sizeLen = supportedSizes.size();
device.width = new int[sizeLen];
device.height = new int[sizeLen];
int j = 0;
for (Size size : supportedSizes) {
device.width[j] = size.width;
device.height[j] = size.height;
j++;
}
// Android SDK deals in integral "milliframes per second"
// (i.e. fps*1000, instead of floating-point frames-per-second) so we
// preserve that through the Java->C++->Java round-trip.
int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
device.name = uniqueName;
if (android.os.Build.VERSION.SDK_INT >= 9) {
device.frontFacing = isFrontFacing(info);
device.orientation = info.orientation;
device.minMilliFPS = mfps[Parameters.PREVIEW_FPS_MIN_INDEX];
device.maxMilliFPS = mfps[Parameters.PREVIEW_FPS_MAX_INDEX];
} else {
device.frontFacing = false;
device.orientation = 90;
device.minMilliFPS = mfps[0];
device.maxMilliFPS = mfps[1];
}
allDevices.add(device);
}
return allDevices.toArray(new CaptureCapabilityAndroid[0]);
}
}

Просмотреть файл

@ -102,10 +102,6 @@
},
}], # win
['OS=="android"', {
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
],
'sources': [
'android/device_info_android.cc',
'android/device_info_android.h',