Initial commit moving Face to new repo

This commit is contained in:
Margaret Maynard-Reid 2016-06-08 15:53:26 -07:00
Родитель 4f6d68670f
Коммит 35430b4454
151 изменённых файлов: 11274 добавлений и 2 удалений

18
.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,18 @@
# Generated files
bin/
gen/
out/
build/
# OS specific files
.DS_Store
# IntelliJ / Android studio configuration files
.idea/
*.iml
# Gradle generated files
.gradle/
# Local config (e.g. sdk path)
local.properties

6
ClientLibrary/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,6 @@
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build

Просмотреть файл

@ -0,0 +1,19 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:1.3.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
jcenter()
}
}

Просмотреть файл

@ -0,0 +1,29 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m
# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
//
// The following are used by Microsoft to upload Maven Package.
// You can ignore these in your sample use.
//
signing.keyId=
signing.password=
signing.secretKeyRingFile=
ossrhUsername=
ossrhPassword=

Двоичные данные
ClientLibrary/gradle/wrapper/gradle-wrapper.jar поставляемый Normal file

Двоичный файл не отображается.

6
ClientLibrary/gradle/wrapper/gradle-wrapper.properties поставляемый Normal file
Просмотреть файл

@ -0,0 +1,6 @@
#Wed Apr 10 15:27:10 PDT 2013
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-all.zip

164
ClientLibrary/gradlew поставляемый Normal file
Просмотреть файл

@ -0,0 +1,164 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >&-
APP_HOME="`pwd -P`"
cd "$SAVED" >&-
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
ClientLibrary/gradlew.bat поставляемый Normal file
Просмотреть файл

@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
ClientLibrary/lib/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
/build

Просмотреть файл

@ -0,0 +1,82 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 21
buildToolsVersion "21.1.2"
defaultConfig {
minSdkVersion 16
targetSdkVersion 21
}
}
dependencies {
compile 'com.google.code.gson:gson:2.3.1'
}
apply plugin: 'maven'
apply plugin: 'signing'
signing {
sign configurations.archives
}
// Group ID is the project name
group = "com.microsoft.projectoxford"
// Artifact name is the name of the technology
archivesBaseName = "face"
// Update your version
version = "1.1.0"
// Upload artifacts to maven central repository staging servers
uploadArchives {
repositories {
mavenDeployer {
beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
authentication(userName: ossrhUsername, password: ossrhPassword)
}
snapshotRepository(url: "https://oss.sonatype.org/content/repositories/snapshots/") {
authentication(userName: ossrhUsername, password: ossrhPassword)
}
pom.project {
// The readable name of the artifact
name 'Microsoft Project Oxford Face Client Library'
packaging 'jar'
// optionally artifactId can be defined here
// Descriptions of the artifacts.
description 'This client library allows the use of Microsoft\'s state-of-the-art cloud-based face algorithms to detect and recognize human faces in images. See https://github.com/Microsoft/ProjectOxford-ClientSDK/tree/master/Face for more information.'
// Project URL
url 'https://github.com/Microsoft/ProjectOxford-ClientSDK'
// Github information
scm {
connection 'scm:git:https://github.com/Microsoft/ProjectOxford-ClientSDK'
developerConnection 'scm:git:https://github.com/Microsoft/ProjectOxford-ClientSDK'
url 'scm:git:https://github.com/Microsoft/ProjectOxford-ClientSDK'
}
licenses {
license {
name 'MIT'
url 'https://github.com/Microsoft/ProjectOxford-ClientSDK/blob/master/LICENSE.md'
}
}
developers {
developer {
id 'projectoxfordSDK'
name 'Project Oxford Client SDK'
email 'projectoxfordsdk@microsoft.com'
}
}
}
}
}
}

Просмотреть файл

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest package="com.microsoft.projectoxford.face">
<library>
</library>
</manifest>

Просмотреть файл

@ -0,0 +1,441 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face;
import com.microsoft.projectoxford.face.contract.AddPersistedFaceResult;
import com.microsoft.projectoxford.face.contract.CreatePersonResult;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.FaceList;
import com.microsoft.projectoxford.face.contract.FaceListMetadata;
import com.microsoft.projectoxford.face.contract.FaceRectangle;
import com.microsoft.projectoxford.face.contract.Glasses;
import com.microsoft.projectoxford.face.contract.GroupResult;
import com.microsoft.projectoxford.face.contract.IdentifyResult;
import com.microsoft.projectoxford.face.contract.Person;
import com.microsoft.projectoxford.face.contract.PersonFace;
import com.microsoft.projectoxford.face.contract.PersonGroup;
import com.microsoft.projectoxford.face.contract.SimilarFace;
import com.microsoft.projectoxford.face.contract.SimilarPersistedFace;
import com.microsoft.projectoxford.face.contract.TrainingStatus;
import com.microsoft.projectoxford.face.contract.VerifyResult;
import com.microsoft.projectoxford.face.rest.ClientException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.UUID;
public interface FaceServiceClient {
/**
* Supported face attribute types
*/
public enum FaceAttributeType
{
/**
* Analyses age
*/
Age {
public String toString() {
return "age";
}
},
/**
* Analyses gender
*/
Gender {
public String toString() {
return "gender";
}
},
/**
* Analyses facial hair
*/
FacialHair {
public String toString() {
return "facialHair";
}
},
/**
* Analyses whether is smiling
*/
Smile {
public String toString() {
return "smile";
}
},
/**
* Analyses head pose
*/
HeadPose {
public String toString() {
return "headPose";
}
},
/**
* Analyses glasses type
*/
Glasses {
public String toString() { return "glasses"; }
}
}
/**
* Detects faces in an URL image.
* @param url url.
* @param returnFaceId If set to <c>true</c> [return face ID].
* @param returnFaceLandmarks If set to <c>true</c> [return face landmarks].
* @param returnFaceAttributes Return face attributes.
* @return detected faces.
* @throws ClientException
* @throws IOException
*/
Face[] detect(String url, boolean returnFaceId, boolean returnFaceLandmarks, FaceAttributeType[] returnFaceAttributes) throws ClientException, IOException;
/**
* Detects faces in an uploaded image.
* @param imageStream The image stream.
* @param returnFaceId If set to <c>true</c> [return face ID].
* @param returnFaceLandmarks If set to <c>true</c> [return face landmarks]
* @param returnFaceAttributes Return face attributes.
* @return detected faces.
* @throws ClientException
* @throws IOException
*/
Face[] detect(InputStream imageStream, boolean returnFaceId, boolean returnFaceLandmarks, FaceAttributeType[] returnFaceAttributes) throws ClientException, IOException;
/**
* Verifies whether the specified two faces belong to the same person.
* @param faceId1 The face id 1.
* @param faceId2 The face id 2.
* @return The verification result.
* @throws ClientException
* @throws IOException
*/
VerifyResult verify(UUID faceId1, UUID faceId2) throws ClientException, IOException;
/**
* Identities the faces in a given person group.
* @param personGroupId The person group id.
* @param faceIds The face ids.
* @param maxNumOfCandidatesReturned The maximum number of candidates returned for each face.
* @return The identification results.
* @throws ClientException
* @throws IOException
*/
IdentifyResult[] identity(String personGroupId, UUID[] faceIds, int maxNumOfCandidatesReturned) throws ClientException, IOException;
/**
* Trains the person group.
* @param personGroupId The person group id
* @throws ClientException
* @throws IOException
*/
void trainPersonGroup(String personGroupId) throws ClientException, IOException;
/**
* Gets person group training status.
* @param personGroupId The person group id.
* @return The person group training status.
* @throws ClientException
* @throws IOException
*/
TrainingStatus getPersonGroupTrainingStatus(String personGroupId) throws ClientException, IOException;
/**
* Creates the person group.
* @param personGroupId The person group identifier.
* @param name The name.
* @param userData The user data.
* @throws ClientException
* @throws IOException
*/
void createPersonGroup(String personGroupId, String name, String userData) throws ClientException, IOException;
/**
* Deletes a person group.
* @param personGroupId The person group id.
* @throws ClientException
* @throws IOException
*/
void deletePersonGroup(String personGroupId) throws ClientException, IOException;
/**
* Updates a person group.
* @param personGroupId The person group id.
* @param name The name.
* @param userData The user data.
* @throws ClientException
* @throws IOException
*/
void updatePersonGroup(String personGroupId, String name, String userData) throws ClientException, IOException;
/**
* Gets a person group.
* @param personGroupId The person group id.
* @return The person group entity.
* @throws ClientException
* @throws IOException
*/
PersonGroup getPersonGroup(String personGroupId) throws ClientException, IOException;
/**
* Gets all person groups.
* @return Person group entity array.
* @throws ClientException
* @throws IOException
*/
PersonGroup[] getPersonGroups() throws ClientException, IOException;
/**
* Creates a person.
* @param personGroupId The person group id.
* @param name The name.
* @param userData The user data.
* @return The CreatePersonResult entity.
* @throws ClientException
* @throws IOException
*/
CreatePersonResult createPerson(String personGroupId, String name, String userData) throws ClientException, IOException;
/**
* Gets a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @return The person entity.
* @throws ClientException
* @throws IOException
*/
Person getPerson(String personGroupId, UUID personId) throws ClientException, IOException;
/**
* Gets all persons inside a person group.
* @param personGroupId The person group id.
* @return The person entity array.
* @throws ClientException
* @throws IOException
*/
Person[] getPersons(String personGroupId) throws ClientException, IOException;
/**
* Adds a face to a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param url The face image URL.
* @param userData The user data.
* @param targetFace The target face.
* @return Add person face result.
* @throws ClientException
* @throws IOException
*/
AddPersistedFaceResult addPersonFace(String personGroupId, UUID personId, String url, String userData, FaceRectangle targetFace) throws ClientException, IOException;
/**
* Adds a face to a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param imageStream The face image stream
* @param userData The user data.
* @param targetFace The Target Face.
* @return Add person face result.
* @throws ClientException
* @throws IOException
*/
AddPersistedFaceResult addPersonFace(String personGroupId, UUID personId, InputStream imageStream, String userData, FaceRectangle targetFace) throws ClientException, IOException;
/**
* Gets a face of a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param persistedFaceId The persisted face id.
* @return The person face entity.
* @throws ClientException
* @throws IOException
*/
PersonFace getPersonFace(String personGroupId, UUID personId, UUID persistedFaceId) throws ClientException, IOException;
/**
* Updates a face of a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param persistedFaceId The persisted face id.
* @param userData The person face entity.
* @throws ClientException
* @throws IOException
*/
void updatePersonFace(String personGroupId, UUID personId, UUID persistedFaceId, String userData) throws ClientException, IOException;
/**
* Updates a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param name The name.
* @param userData The user data.
* @throws ClientException
* @throws IOException
*/
void updatePerson(String personGroupId, UUID personId, String name, String userData) throws ClientException, IOException;
/**
* Deletes a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @throws ClientException
* @throws IOException
*/
void deletePerson(String personGroupId, UUID personId) throws ClientException, IOException;
/**
* Deletes a face of a person.
* @param personGroupId The person group id.
* @param personId The person id.
* @param persistedFaceId The persisted face id.
* @throws ClientException
* @throws IOException
*/
void deletePersonFace(String personGroupId, UUID personId, UUID persistedFaceId) throws ClientException, IOException;
/**
* Finds the similar faces.
* @param faceId The face identifier.
* @param faceIds The face list identifier.
* @param maxNumOfCandidatesReturned The max number of candidates returned.
* @return The similar persisted faces.
* @throws ClientException
* @throws IOException
*/
SimilarFace[] findSimilar(UUID faceId, UUID[] faceIds, int maxNumOfCandidatesReturned) throws ClientException, IOException;
/**
* Finds the similar faces.
* @param faceId The face identifier.
* @param faceListId The face list identifier.
* @param maxNumOfCandidatesReturned The max number of candidates returned.
* @return The similar persisted faces.
* @throws ClientException
* @throws IOException
*/
SimilarPersistedFace[] findSimilar(UUID faceId, String faceListId, int maxNumOfCandidatesReturned) throws ClientException, IOException;
/**
* Groups the face.
* @param faceIds The face ids.
* @return Group result.
* @throws ClientException
* @throws IOException
*/
GroupResult group(UUID[] faceIds) throws ClientException, IOException;
/**
* Creates the face list.
* @param faceListId The face list identifier.
* @param name The name.
* @param userData The user data.
* @throws ClientException
* @throws IOException
*/
void createFaceList(String faceListId, String name, String userData) throws ClientException, IOException;
/**
* Gets the face list.
* @param faceListId The face list identifier.
* @return Face list object.
* @throws ClientException
* @throws IOException
*/
FaceList getFaceList(String faceListId) throws ClientException, IOException;
/**
* Lists the face lists.
* @return Face list metadata objects.
* @throws ClientException
* @throws IOException
*/
FaceListMetadata[] listFaceLists() throws ClientException, IOException;
/**
* Updates the face list.
* @param faceListId The face list identifier.
* @param name The name.
* @param userData The user data.
* @throws ClientException
* @throws IOException
*/
void updateFaceList(String faceListId, String name, String userData) throws ClientException, IOException;
/**
* Deletes the face list
* @param faceListId The face group identifier.
* @throws ClientException
* @throws IOException
*/
void deleteFaceList(String faceListId) throws ClientException, IOException;
/**
* Adds the face to face list.
* @param faceListId he face list identifier.
* @param url The face image URL.
* @param userData The user data.
* @param targetFace
* @return The target face.
* @throws ClientException
* @throws IOException
*/
AddPersistedFaceResult addFacesToFaceList(String faceListId, String url, String userData, FaceRectangle targetFace) throws ClientException, IOException;
/**
* Adds the face to face list
* @param faceListId The face list identifier.
* @param imageStream The face image stream.
* @param userData The user data.
* @param targetFace The target face.
* @return Add face result.
* @throws ClientException
* @throws IOException
*/
AddPersistedFaceResult AddFaceToFaceList(String faceListId, InputStream imageStream, String userData, FaceRectangle targetFace) throws ClientException, IOException;
/**
* Deletes the face from face list.
* @param faceListId The face list identifier.
* @param persistedFaceId The face identifier
* @throws ClientException
* @throws IOException
*/
void deleteFacesFromFaceList(String faceListId, UUID persistedFaceId) throws ClientException, IOException;
}

Просмотреть файл

@ -0,0 +1,547 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import com.microsoft.projectoxford.face.common.RequestMethod;
import com.microsoft.projectoxford.face.contract.AddPersistedFaceResult;
import com.microsoft.projectoxford.face.contract.CreatePersonResult;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.FaceList;
import com.microsoft.projectoxford.face.contract.FaceListMetadata;
import com.microsoft.projectoxford.face.contract.FaceRectangle;
import com.microsoft.projectoxford.face.contract.GroupResult;
import com.microsoft.projectoxford.face.contract.IdentifyResult;
import com.microsoft.projectoxford.face.contract.Person;
import com.microsoft.projectoxford.face.contract.PersonFace;
import com.microsoft.projectoxford.face.contract.PersonGroup;
import com.microsoft.projectoxford.face.contract.SimilarFace;
import com.microsoft.projectoxford.face.contract.SimilarPersistedFace;
import com.microsoft.projectoxford.face.contract.TrainingStatus;
import com.microsoft.projectoxford.face.contract.VerifyResult;
import com.microsoft.projectoxford.face.rest.ClientException;
import com.microsoft.projectoxford.face.rest.WebServiceRequest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class FaceServiceRestClient implements FaceServiceClient {
private final WebServiceRequest mRestCall;
private Gson mGson = new GsonBuilder().setDateFormat("M/d/yyyy h:m:s a").create();
private String mServiceHost = "https://api.projectoxford.ai/face/v1.0";
private static final String DETECT_QUERY = "detect";
private static final String VERIFY_QUERY = "verify";
private static final String TRAIN_QUERY = "train";
private static final String TRAINING_QUERY = "training";
private static final String IDENTIFY_QUERY = "identify";
private static final String PERSON_GROUPS_QUERY = "persongroups";
private static final String PERSONS_QUERY = "persons";
private static final String FACE_LISTS_QUERY = "facelists";
private static final String PERSISTED_FACES_QUERY = "persistedfaces";
private static final String GROUP_QUERY = "group";
private static final String FIND_SIMILARS_QUERY = "findsimilars";
private static final String STREAM_DATA = "application/octet-stream";
private static final String DATA = "data";
public FaceServiceRestClient(String subscriptionKey) {
mRestCall = new WebServiceRequest(subscriptionKey);
}
public FaceServiceRestClient(String serviceHost, String subscriptionKey) {
mServiceHost = serviceHost;
mRestCall = new WebServiceRequest(subscriptionKey);
}
@Override
public Face[] detect(String url, boolean returnFaceId, boolean returnFaceLandmarks, FaceAttributeType[] returnFaceAttributes) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
params.put("returnFaceId", returnFaceId);
params.put("returnFaceLandmarks", returnFaceLandmarks);
if (returnFaceAttributes != null && returnFaceAttributes.length > 0) {
StringBuilder faceAttributesStringBuilder = new StringBuilder();
boolean firstAttribute = true;
for (FaceAttributeType faceAttributeType: returnFaceAttributes)
{
if (firstAttribute) {
firstAttribute = false;
} else {
faceAttributesStringBuilder.append(",");
}
faceAttributesStringBuilder.append(faceAttributeType);
}
params.put("returnFaceAttributes", faceAttributesStringBuilder.toString());
}
String path = String.format("%s/%s", mServiceHost, DETECT_QUERY);
String uri = WebServiceRequest.getUrl(path, params);
params.clear();
params.put("url", url);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
Type listType = new TypeToken<List<Face>>() {
}.getType();
List<Face> faces = mGson.fromJson(json, listType);
return faces.toArray(new Face[faces.size()]);
}
@Override
public Face[] detect(InputStream imageStream, boolean returnFaceId, boolean returnFaceLandmarks, FaceAttributeType[] returnFaceAttributes) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
params.put("returnFaceId", returnFaceId);
params.put("returnFaceLandmarks", returnFaceLandmarks);
if (returnFaceAttributes != null && returnFaceAttributes.length > 0) {
StringBuilder faceAttributesStringBuilder = new StringBuilder();
boolean firstAttribute = true;
for (FaceAttributeType faceAttributeType: returnFaceAttributes)
{
if (firstAttribute) {
firstAttribute = false;
} else {
faceAttributesStringBuilder.append(",");
}
faceAttributesStringBuilder.append(faceAttributeType);
}
params.put("returnFaceAttributes", faceAttributesStringBuilder.toString());
}
String path = String.format("%s/%s", mServiceHost, DETECT_QUERY);
String uri = WebServiceRequest.getUrl(path, params);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int bytesRead;
byte[] bytes = new byte[1024];
while ((bytesRead = imageStream.read(bytes)) > 0) {
byteArrayOutputStream.write(bytes, 0, bytesRead);
}
byte[] data = byteArrayOutputStream.toByteArray();
params.clear();
params.put(DATA, data);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, STREAM_DATA);
Type listType = new TypeToken<List<Face>>() {
}.getType();
List<Face> faces = mGson.fromJson(json, listType);
return faces.toArray(new Face[faces.size()]);
}
@Override
public VerifyResult verify(UUID faceId1, UUID faceId2) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, VERIFY_QUERY);
params.put("faceId1", faceId1);
params.put("faceId2", faceId2);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
return mGson.fromJson(json, VerifyResult.class);
}
@Override
public IdentifyResult[] identity(String personGroupId, UUID[] faceIds, int maxNumOfCandidatesReturned) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, IDENTIFY_QUERY);
params.put("personGroupId", personGroupId);
params.put("faceIds", faceIds);
params.put("maxNumOfCandidatesReturned", maxNumOfCandidatesReturned);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
Type listType = new TypeToken<List<IdentifyResult>>() {
}.getType();
List<IdentifyResult> result = mGson.fromJson(json, listType);
return result.toArray(new IdentifyResult[result.size()]);
}
@Override
public void trainPersonGroup(String personGroupId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, TRAIN_QUERY);
mRestCall.request(uri, RequestMethod.POST, params, null);
}
@Override
public TrainingStatus getPersonGroupTrainingStatus(String personGroupId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, TRAINING_QUERY);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
return mGson.fromJson(json, TrainingStatus.class);
}
@Override
public void createPersonGroup(String personGroupId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId);
params.put("name", name);
if (userData != null) {
params.put("userData", userData);
}
mRestCall.request(uri, RequestMethod.PUT, params, null);
}
@Override
public void deletePersonGroup(String personGroupId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId);
mRestCall.request(uri, RequestMethod.DELETE, params, null);
}
@Override
public void updatePersonGroup(String personGroupId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId);
params.put("name", name);
if (userData != null) {
params.put("userData", userData);
}
mRestCall.request(uri, RequestMethod.PATCH, params, null);
}
@Override
public PersonGroup getPersonGroup(String personGroupId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
return mGson.fromJson(json, PersonGroup.class);
}
@Override
public PersonGroup[] getPersonGroups() throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, PERSON_GROUPS_QUERY);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
Type listType = new TypeToken<List<PersonGroup>>() {
}.getType();
List<PersonGroup> result = mGson.fromJson(json, listType);
return result.toArray(new PersonGroup[result.size()]);
}
@Override
public CreatePersonResult createPerson(String personGroupId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY);
params.put("name", name);
if (userData != null) {
params.put("userData", userData);
}
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
return mGson.fromJson(json, CreatePersonResult.class);
}
@Override
public Person getPerson(String personGroupId, UUID personId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId.toString());
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
return mGson.fromJson(json, Person.class);
}
@Override
public Person[] getPersons(String personGroupId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
Type listType = new TypeToken<List<Person>>() {
}.getType();
List<Person> result = mGson.fromJson(json, listType);
return result.toArray(new Person[result.size()]);
}
@Override
public AddPersistedFaceResult addPersonFace(String personGroupId, UUID personId, String url, String userData, FaceRectangle targetFace) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String path = String.format("%s/%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId, PERSISTED_FACES_QUERY);
if (userData != null && userData.length() > 0) {
params.put("userData", userData);
}
if (targetFace != null) {
String targetFaceString = String.format("%1d,%2d,%3d,%4d", targetFace.left, targetFace.top, targetFace.width, targetFace.height);
params.put("targetFace", targetFaceString);
}
String uri = WebServiceRequest.getUrl(path, params);
params.clear();
params.put("url", url);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
return mGson.fromJson(json, AddPersistedFaceResult.class);
}
@Override
public AddPersistedFaceResult addPersonFace(String personGroupId, UUID personId, InputStream imageStream, String userData, FaceRectangle targetFace) throws ClientException, IOException
{
Map<String, Object> params = new HashMap<>();
String path = String.format("%s/%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId, PERSISTED_FACES_QUERY);
if (userData != null && userData.length() > 0) {
params.put("userData", userData);
}
if (targetFace != null) {
String targetFaceString = String.format("%1d,%2d,%3d,%4d", targetFace.left, targetFace.top, targetFace.width, targetFace.height);
params.put("targetFace", targetFaceString);
}
String uri = WebServiceRequest.getUrl(path, params);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int bytesRead;
byte[] bytes = new byte[1024];
while ((bytesRead = imageStream.read(bytes)) > 0) {
byteArrayOutputStream.write(bytes, 0, bytesRead);
}
byte[] data = byteArrayOutputStream.toByteArray();
params.clear();
params.put(DATA, data);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, STREAM_DATA);
return mGson.fromJson(json, AddPersistedFaceResult.class);
}
@Override
public PersonFace getPersonFace(String personGroupId, UUID personId, UUID persistedFaceId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId, PERSISTED_FACES_QUERY, persistedFaceId);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
return mGson.fromJson(json, PersonFace.class);
}
@Override
public void updatePersonFace(String personGroupId, UUID personId, UUID persistedFaceId, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
if (userData != null) {
params.put("userData", userData);
}
String uri = String.format("%s/%s/%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId, PERSISTED_FACES_QUERY, persistedFaceId);
mRestCall.request(uri, RequestMethod.PATCH, params, null);
}
@Override
public void updatePerson(String personGroupId, UUID personId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
if(name != null) {
params.put("name", name);
}
if (userData != null) {
params.put("userData", userData);
}
String uri = String.format("%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId);
mRestCall.request(uri, RequestMethod.PATCH, params, null);
}
@Override
public void deletePerson(String personGroupId, UUID personId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId);
mRestCall.request(uri, RequestMethod.DELETE, params, null);
}
@Override
public void deletePersonFace(String personGroupId, UUID personId, UUID persistedFaceId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s/%s/%s/%s", mServiceHost, PERSON_GROUPS_QUERY, personGroupId, PERSONS_QUERY, personId, PERSISTED_FACES_QUERY, persistedFaceId);
mRestCall.request(uri, RequestMethod.DELETE, params, null);
}
@Override
public SimilarFace[] findSimilar(UUID faceId, UUID[] faceIds, int maxNumOfCandidatesReturned) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, FIND_SIMILARS_QUERY);
params.put("faceId", faceId);
params.put("faceIds", faceIds);
params.put("maxNumOfCandidatesReturned", maxNumOfCandidatesReturned);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
Type listType = new TypeToken<List<SimilarFace>>() {
}.getType();
List<SimilarFace> result = mGson.fromJson(json, listType);
return result.toArray(new SimilarFace[result.size()]);
}
@Override
public SimilarPersistedFace[] findSimilar(UUID faceId, String faceListId, int maxNumOfCandidatesReturned) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, FIND_SIMILARS_QUERY);
params.put("faceId", faceId);
params.put("faceListId", faceListId);
params.put("maxNumOfCandidatesReturned", maxNumOfCandidatesReturned);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
Type listType = new TypeToken<List<SimilarPersistedFace>>() {
}.getType();
List<SimilarPersistedFace> result = mGson.fromJson(json, listType);
return result.toArray(new SimilarPersistedFace[result.size()]);
}
@Override
public GroupResult group(UUID[] faceIds) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, GROUP_QUERY);
params.put("faceIds", faceIds);
String json = (String)this.mRestCall.request(uri, RequestMethod.POST, params, null);
return this.mGson.fromJson(json, GroupResult.class);
}
@Override
public void createFaceList(String faceListId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId);
params.put("name", name);
params.put("userData", userData);
this.mRestCall.request(uri, RequestMethod.PUT, params, null);
}
@Override
public FaceList getFaceList(String faceListId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
return mGson.fromJson(json, FaceList.class);
}
@Override
public FaceListMetadata[] listFaceLists() throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s", mServiceHost, FACE_LISTS_QUERY);
String json = (String)mRestCall.request(uri, RequestMethod.GET, params, null);
Type listType = new TypeToken<List<FaceListMetadata>>() {
}.getType();
List<FaceListMetadata> result = mGson.fromJson(json, listType);
return result.toArray(new FaceListMetadata[result.size()]);
}
@Override
public void updateFaceList(String faceListId, String name, String userData) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
if(name != null) {
params.put("name", name);
}
if (userData != null) {
params.put("userData", userData);
}
String uri = String.format("%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId);
mRestCall.request(uri, RequestMethod.PATCH, params, null);
}
@Override
public void deleteFaceList(String faceListId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId);
mRestCall.request(uri, RequestMethod.DELETE, params, null);
}
@Override
public AddPersistedFaceResult addFacesToFaceList(String faceListId, String url, String userData, FaceRectangle targetFace) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String path = String.format("%s/%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId, PERSISTED_FACES_QUERY);
if (userData != null && userData.length() > 0) {
params.put("userData", userData);
}
if (targetFace != null) {
String targetFaceString = String.format("%1d,%2d,%3d,%4d", targetFace.left, targetFace.top, targetFace.width, targetFace.height);
params.put("targetFace", targetFaceString);
}
String uri = WebServiceRequest.getUrl(path, params);
params.clear();
params.put("url", url);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, null);
return mGson.fromJson(json, AddPersistedFaceResult.class);
}
@Override
public AddPersistedFaceResult AddFaceToFaceList(String faceListId, InputStream imageStream, String userData, FaceRectangle targetFace) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String path = String.format("%s/%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId, PERSISTED_FACES_QUERY);
if (userData != null && userData.length() > 0) {
params.put("userData", userData);
}
if (targetFace != null) {
String targetFaceString = String.format("%1d,%2d,%3d,%4d", targetFace.left, targetFace.top, targetFace.width, targetFace.height);
params.put("targetFace", targetFaceString);
}
String uri = WebServiceRequest.getUrl(path, params);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int bytesRead;
byte[] bytes = new byte[1024];
while ((bytesRead = imageStream.read(bytes)) > 0) {
byteArrayOutputStream.write(bytes, 0, bytesRead);
}
byte[] data = byteArrayOutputStream.toByteArray();
params.clear();
params.put(DATA, data);
String json = (String)mRestCall.request(uri, RequestMethod.POST, params, STREAM_DATA);
return mGson.fromJson(json, AddPersistedFaceResult.class);
}
@Override
public void deleteFacesFromFaceList(String faceListId, UUID persistedFaceId) throws ClientException, IOException {
Map<String, Object> params = new HashMap<>();
String uri = String.format("%s/%s/%s/%s/%s", mServiceHost, FACE_LISTS_QUERY, faceListId, PERSISTED_FACES_QUERY, persistedFaceId);
mRestCall.request(uri, RequestMethod.DELETE, params, null);
}
}

Просмотреть файл

@ -0,0 +1,43 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.common;
import java.util.UUID;
public class ClientError {
public String code;
public String message;
public UUID requestId;
}

Просмотреть файл

@ -0,0 +1,37 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.common;
public enum RequestMethod {
GET, HEAD, POST, PUT, PATCH, DELETE, OPTIONS, TRACE;
}

Просмотреть файл

@ -0,0 +1,37 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.common;
public class ServiceError {
public ClientError error;
}

Просмотреть файл

@ -0,0 +1,42 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class AddPersistedFaceResult {
/**
* The persisted face identifier
*/
public UUID persistedFaceId;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class Candidate {
public UUID personId;
public double confidence;
}

Просмотреть файл

@ -0,0 +1,39 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class CreatePersonResult {
public UUID personId;
}

Просмотреть файл

@ -0,0 +1,45 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class Face {
public UUID faceId;
public FaceRectangle faceRectangle;
public FaceLandmarks faceLandmarks;
public FaceAttribute faceAttributes;
}

Просмотреть файл

@ -0,0 +1,47 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FaceAttribute {
public double age;
public String gender;
public double smile;
public FacialHair facialHair;
public HeadPose headPose;
public Glasses glasses;
}

Просмотреть файл

@ -0,0 +1,89 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FaceLandmarks {
public FeatureCoordinate pupilLeft;
public FeatureCoordinate pupilRight;
public FeatureCoordinate noseTip;
public FeatureCoordinate mouthLeft;
public FeatureCoordinate mouthRight;
public FeatureCoordinate eyebrowLeftOuter;
public FeatureCoordinate eyebrowLeftInner;
public FeatureCoordinate eyeLeftOuter;
public FeatureCoordinate eyeLeftTop;
public FeatureCoordinate eyeLeftBottom;
public FeatureCoordinate eyeLeftInner;
public FeatureCoordinate eyebrowRightInner;
public FeatureCoordinate eyebrowRightOuter;
public FeatureCoordinate eyeRightInner;
public FeatureCoordinate eyeRightTop;
public FeatureCoordinate eyeRightBottom;
public FeatureCoordinate eyeRightOuter;
public FeatureCoordinate noseRootLeft;
public FeatureCoordinate noseRootRight;
public FeatureCoordinate noseLeftAlarTop;
public FeatureCoordinate noseRightAlarTop;
public FeatureCoordinate noseLeftAlarOutTip;
public FeatureCoordinate noseRightAlarOutTip;
public FeatureCoordinate upperLipTop;
public FeatureCoordinate upperLipBottom;
public FeatureCoordinate underLipTop;
public FeatureCoordinate underLipBottom;
}

Просмотреть файл

@ -0,0 +1,37 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FaceList extends FaceListMetadata {
public FaceMetadata[] faces;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FaceListMetadata {
public String faceGroupId;
public String name;
public String userData;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class FaceMetadata {
public UUID faceId;
public String userData;
}

Просмотреть файл

@ -0,0 +1,43 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FaceRectangle {
public int width;
public int height;
public int left;
public int top;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FacialHair {
public double moustache;
public double beard;
public double sideburns;
}

Просмотреть файл

@ -0,0 +1,39 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class FeatureCoordinate {
public double x;
public double y;
}

Просмотреть файл

@ -0,0 +1,40 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK/
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
/**
* Glasses types
*/
public enum Glasses {
NoGlasses, Sunglasses, ReadingGlasses, SwimmingGoggles
}

Просмотреть файл

@ -0,0 +1,42 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.List;
import java.util.UUID;
public class GroupResult {
public List<UUID[]> groups;
public List<UUID> messyGroup;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class HeadPose {
public double roll;
public double yaw;
public double pitch;
}

Просмотреть файл

@ -0,0 +1,43 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class IdentifyResult {
public UUID faceId;
public List<Candidate> candidates = new ArrayList<>();
}

Просмотреть файл

@ -0,0 +1,45 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class Person {
public UUID personId;
public UUID[] faceIds;
public String name;
public String userData;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class PersonFace {
public UUID persistedFaceId;
public String userData;
}

Просмотреть файл

@ -0,0 +1,43 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class PersonGroup {
public String personGroupId;
public String name;
public String userData;
public TrainingStatus trainingStatus;
}

Просмотреть файл

@ -0,0 +1,41 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class SimilarFace {
public UUID faceId;
public double confidence;
}

Просмотреть файл

@ -0,0 +1,47 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import java.util.UUID;
public class SimilarPersistedFace {
/**
* The persisted face identifier
*/
public UUID persistedFaceId;
/**
* The confidence value.
*/
public double confidence;
}

Просмотреть файл

@ -0,0 +1,80 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
import com.google.gson.annotations.SerializedName;
import java.util.Date;
public class TrainingStatus {
public enum Status {
/**
* Training is succeeded.
*/
@SerializedName("succeeded")
Succeeded,
/**
* Training is failed.
*/
@SerializedName("failed")
Failed,
/**
* Training is in progress.
*/
@SerializedName("running")
Running
}
/**
* Training status.
*/
public Status status;
/**
* Creation date time.
*/
public Date createdDateTime;
/**
* Last action date time.
*/
public Date lastActionDateTime;
/**
* Message. Only when failed
*/
public String message;
}

Просмотреть файл

@ -0,0 +1,39 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.contract;
public class VerifyResult {
public boolean isIdentical;
public double confidence;
}

Просмотреть файл

@ -0,0 +1,58 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.rest;
import com.microsoft.projectoxford.face.common.ClientError;
public class ClientException extends Exception {
public ClientError error = new ClientError();
public ClientException(ClientError clientError) {
super(clientError.message);
error.code = clientError.code;
error.message = clientError.message;
}
public ClientException(String message, int statusCode) {
super(message);
Integer code = statusCode;
error.code = code.toString();
error.message = message;
}
public ClientException(String message) {
super(message);
}
}

Просмотреть файл

@ -0,0 +1,53 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.rest;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import java.net.URI;
public class HttpDeleteWithBody extends HttpEntityEnclosingRequestBase {
private static final String METHOD_NAME = "DELETE";
public String getMethod() { return METHOD_NAME; }
public HttpDeleteWithBody(final String uri) {
this(URI.create(uri));
}
public HttpDeleteWithBody(final URI uri) {
super();
setURI(uri);
}
public HttpDeleteWithBody() { super(); }
}

Просмотреть файл

@ -0,0 +1,48 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.rest;
import org.apache.http.client.methods.HttpPost;
public class HttpPatch extends HttpPost {
private static final String METHOD_PATCH = "PATCH";
public HttpPatch(final String url) {
super(url);
}
@Override
public String getMethod() {
return METHOD_PATCH;
}
}

Просмотреть файл

@ -0,0 +1,46 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.rest;
import com.google.gson.Gson;
public class ServiceException extends Exception {
public ServiceException(String message) {
super(message);
}
public ServiceException(Gson errorObject) {
super(errorObject.toString());
}
}

Просмотреть файл

@ -0,0 +1,278 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.rest;
import com.google.gson.Gson;
import com.microsoft.projectoxford.face.common.RequestMethod;
import com.microsoft.projectoxford.face.common.ServiceError;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Map;
public class WebServiceRequest {
private static final String HEADER_KEY = "ocp-apim-subscription-key";
private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON = "application/json";
private static final String OCTET_STREAM = "octet-stream";
private static final String DATA = "data";
private HttpClient mClient = new DefaultHttpClient();
private String mSubscriptionKey;
private Gson mGson = new Gson();
public WebServiceRequest(String key) {
this.mSubscriptionKey = key;
}
public Object request(String url, RequestMethod method, Map<String, Object> data, String contentType) throws ClientException, IOException {
switch (method) {
case GET:
return get(url);
case HEAD:
break;
case POST:
return post(url, data, contentType);
case PATCH:
return patch(url, data, contentType);
case DELETE:
return delete(url, data);
case PUT:
return put(url, data);
case OPTIONS:
break;
case TRACE:
break;
}
return null;
}
private Object get(String url) throws ClientException, IOException {
HttpGet request = new HttpGet(url);
request.setHeader(HEADER_KEY, mSubscriptionKey);
HttpResponse response = this.mClient.execute(request);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK) {
return readInput(response.getEntity().getContent());
} else {
String json = readInput(response.getEntity().getContent());
if (json != null) {
ServiceError error = mGson.fromJson(json, ServiceError.class);
if (error != null) {
throw new ClientException(error.error);
}
}
throw new ClientException("Error executing GET request!", statusCode);
}
}
private Object patch(String url, Map<String, Object> data, String contentType) throws ClientException, IOException {
HttpPatch request = new HttpPatch(url);
request.setHeader(HEADER_KEY, mSubscriptionKey);
String json = mGson.toJson(data).toString();
StringEntity entity = new StringEntity(json);
request.setEntity(entity);
request.setHeader(CONTENT_TYPE, APPLICATION_JSON);
HttpResponse response = mClient.execute(request);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK) {
return readInput(response.getEntity().getContent());
} else {
json = readInput(response.getEntity().getContent());
if (json != null) {
ServiceError error = mGson.fromJson(json, ServiceError.class);
if (error != null) {
throw new ClientException(error.error);
}
}
throw new ClientException("Error executing Patch request!", statusCode);
}
}
private Object post(String url, Map<String, Object> data, String contentType) throws ClientException, IOException {
HttpPost request = new HttpPost(url);
boolean isStream = false;
if (contentType != null && !(contentType.length() == 0)) {
request.setHeader(CONTENT_TYPE, contentType);
if (contentType.toLowerCase().contains(OCTET_STREAM)) {
isStream = true;
}
} else {
request.setHeader(CONTENT_TYPE, APPLICATION_JSON);
}
request.setHeader(HEADER_KEY, this.mSubscriptionKey);
if (!isStream) {
String json = mGson.toJson(data).toString();
StringEntity entity = new StringEntity(json);
request.setEntity(entity);
} else {
request.setEntity(new ByteArrayEntity((byte[]) data.get(DATA)));
}
HttpResponse response = mClient.execute(request);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_ACCEPTED) {
return readInput(response.getEntity().getContent());
} else {
String json = readInput(response.getEntity().getContent());
if (json != null) {
ServiceError error = mGson.fromJson(json, ServiceError.class);
if (error != null) {
throw new ClientException(error.error);
}
}
throw new ClientException("Error executing POST request!", statusCode);
}
}
private Object put(String url, Map<String, Object> data) throws ClientException, IOException {
HttpPut request = new HttpPut(url);
request.setHeader(HEADER_KEY, mSubscriptionKey);
String json = mGson.toJson(data).toString();
StringEntity entity = new StringEntity(json);
request.setEntity(entity);
request.setHeader(CONTENT_TYPE, APPLICATION_JSON);
HttpResponse response = mClient.execute(request);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == HttpStatus.SC_OK) {
return readInput(response.getEntity().getContent());
} else {
json = readInput(response.getEntity().getContent());
if (json != null) {
ServiceError error = mGson.fromJson(json, ServiceError.class);
if (error != null) {
throw new ClientException(error.error);
}
}
throw new ClientException("Error executing PUT request!", statusCode);
}
}
private Object delete(String url, Map<String, Object> data) throws ClientException, IOException {
HttpResponse response;
if (data == null || data.isEmpty()) {
HttpDelete request = new HttpDelete(url);
request.setHeader(HEADER_KEY, mSubscriptionKey);
response = mClient.execute(request);
} else {
HttpDeleteWithBody request = new HttpDeleteWithBody(url);
String json = mGson.toJson(data).toString();
StringEntity entity = new StringEntity(json);
request.setEntity(entity);
request.setHeader(CONTENT_TYPE, APPLICATION_JSON);
request.setHeader(HEADER_KEY, mSubscriptionKey);
response = mClient.execute(request);
}
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
String json = readInput(response.getEntity().getContent());
if (json != null) {
ServiceError error = mGson.fromJson(json, ServiceError.class);
if (error != null) {
throw new ClientException(error.error);
}
}
throw new ClientException("Error executing DELETE request!", statusCode);
}
return readInput(response.getEntity().getContent());
}
public static String getUrl(String path, Map<String, Object> params) {
StringBuffer url = new StringBuffer(path);
boolean start = true;
for (Map.Entry<String, Object> param : params.entrySet()) {
if (start) {
url.append("?");
start = false;
} else {
url.append("&");
}
try {
url.append(param.getKey());
url.append("=");
url.append(URLEncoder.encode(param.getValue().toString(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
return url.toString();
}
private String readInput(InputStream is) throws IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(is));
StringBuffer json = new StringBuffer();
String line;
while ((line = br.readLine()) != null) {
json.append(line);
}
return json.toString();
}
}

Просмотреть файл

@ -0,0 +1 @@
include ':lib'

Двоичные данные
Data/PersonGroup/Family1-Dad/Family1-Dad1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 44 KiB

Двоичные данные
Data/PersonGroup/Family1-Dad/Family1-Dad2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 46 KiB

Двоичные данные
Data/PersonGroup/Family1-Dad/Family1-Dad3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 34 KiB

Двоичные данные
Data/PersonGroup/Family1-Daughter/Family1-Daughter1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 26 KiB

Двоичные данные
Data/PersonGroup/Family1-Daughter/Family1-Daughter2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 27 KiB

Двоичные данные
Data/PersonGroup/Family1-Daughter/Family1-Daughter3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 18 KiB

Двоичные данные
Data/PersonGroup/Family1-Mom/Family1-Mom1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 33 KiB

Двоичные данные
Data/PersonGroup/Family1-Mom/Family1-Mom2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 26 KiB

Двоичные данные
Data/PersonGroup/Family1-Mom/Family1-Mom3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 30 KiB

Двоичные данные
Data/PersonGroup/Family1-Son/Family1-Son1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 37 KiB

Двоичные данные
Data/PersonGroup/Family1-Son/Family1-Son2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 35 KiB

Двоичные данные
Data/PersonGroup/Family1-Son/Family1-Son3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 37 KiB

Двоичные данные
Data/PersonGroup/Family2-Lady/Family2-Lady1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 61 KiB

Двоичные данные
Data/PersonGroup/Family2-Lady/Family2-Lady2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 40 KiB

Двоичные данные
Data/PersonGroup/Family2-Lady/Family2-Lady3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 40 KiB

Двоичные данные
Data/PersonGroup/Family2-Man/Family2-Man1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 39 KiB

Двоичные данные
Data/PersonGroup/Family2-Man/Family2-Man2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 52 KiB

Двоичные данные
Data/PersonGroup/Family2-Man/Family2-Man3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 55 KiB

Двоичные данные
Data/PersonGroup/Family3-Lady/Family3-Lady1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 54 KiB

Двоичные данные
Data/PersonGroup/Family3-Lady/Family3-Lady2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 63 KiB

Двоичные данные
Data/PersonGroup/Family3-Lady/Family3-Lady4.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 36 KiB

Двоичные данные
Data/PersonGroup/Family3-Man/Family3-Man1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 53 KiB

Двоичные данные
Data/PersonGroup/Family3-Man/Family3-Man2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 43 KiB

Двоичные данные
Data/PersonGroup/Family3-Man/Family3-Man3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 34 KiB

Двоичные данные
Data/detection1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 94 KiB

Двоичные данные
Data/detection2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 92 KiB

Двоичные данные
Data/detection3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 152 KiB

Двоичные данные
Data/detection4.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 94 KiB

Двоичные данные
Data/detection5.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 82 KiB

Двоичные данные
Data/detection6.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 117 KiB

Двоичные данные
Data/identification1.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 92 KiB

Двоичные данные
Data/identification2.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 80 KiB

Двоичные данные
Data/identification3.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 96 KiB

Просмотреть файл

@ -1,2 +1,97 @@
# CognitiveServices-ClientSDK-Android-Face
Cognitive Services Face client library for Android.
The client library
==================
The Face API client library is a thin Java client wrapper for Microsoft Cognitive Services (formerly Project Oxford)
Face REST APIs.
The easiest way to consume the client library is to add com.microsoft.projectoxford.face package from Maven Central Repository.
To find the latest version of client library, go to http://search.maven.org, and search for "com.microsoft.projectoxford".
To add the client library dependency from build.gradle file, add the following line in dependencies.
```
dependencies {
//
// Use the following line to include client library from Maven Central Repository
// Change the version number from the search.maven.org result
//
compile 'com.microsoft.projectoxford:face:1.1.0'
// Your other Dependencies...
}
```
To do add the client library dependency from Android Studio:
1. From Menu, Choose File \> Project Structure
2. Click on your app module
3. Click on Dependencies tab
4. Click "+" sign to add new dependency
5. Pick "Library dependency" from the drop down list
6. Type "com.microsoft.projectoxford" and hit the search icon from "Choose Library Dependency" dialog
7. Pick the Project Oxford client library that you intend to use.
8. Click "OK" to add the new dependency
The sample
==========
This sample is an Android application to demonstrate the use of Microsoft Cognitive Services (formerly Project Oxford)
Face API.
It demonstrates face detection, face verification, face grouping, finding
similar faces, and face identification functionalities.
Requirements
------------
Android OS must be Android 4.1 or higher (API Level 16 or higher)
Build the sample
----------------
1. First, you must obtain a Face API subscription key by following instructions in [Microsoft Cognitive Services subscription](<https://www.microsoft.com/cognitive-services/en-us/sign-up>).
2. Start Android Studio and open project from Face \> Android \> Sample folder.
3. In Android Studio -\> "Project" panel -\> "Android" view, open file
"app/res/values/strings.xml", and find the line
"Please\_add\_the\_subscription\_key\_here;". Replace the
"Please\_add\_the\_subscription\_key\_here" value with your subscription key
string from the first step. If you cannot find the file "strings.xml", it is
in folder "Sample\app\src\main\res\values\string.xml".
4. In Android Studio, select menu "Build \> Make Project" to build the sample.
Run the sample
--------------
In Android Studio, select menu "Run", and "Run app" to launch this sample app.
Once the app is launched, click on buttons to use samples of between different
scenarios, and follow the instructions on screen.
Microsoft will receive the images you upload and may use them to improve Face
API and related services. By submitting an image, you confirm you have consent
from everyone in it.
<img src="SampleScreenshots/SampleRunning1.png" width="30%"/>
<img src="SampleScreenshots/SampleRunning2.png" width="30%"/>
<img src="SampleScreenshots/SampleRunning3.png" width="30%"/>
Contributing
============
We welcome contributions and are always looking for new SDKs, input, and
suggestions. Feel free to file issues on the repo and we'll address them as we can. You can also learn more about how you can help on the [Contribution
Rules & Guidelines](</CONTRIBUTING.md>).
For questions, feedback, or suggestions about Microsoft Cognitive Services, feel free to reach out to us directly.
- [Cognitive Services UserVoice Forum](<https://cognitive.uservoice.com>)
License
=======
All Microsoft Cognitive Services SDKs and samples are licensed with the MIT License. For more details, see
[LICENSE](</LICENSE.md>).
Sample images are licensed separately, please refer to [LICENSE-IMAGE](</LICENSE-IMAGE.md>).

6
Sample/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1,6 @@
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build

1
Sample/app/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
/build

31
Sample/app/build.gradle Normal file
Просмотреть файл

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 21
buildToolsVersion "21.1.2"
defaultConfig {
applicationId "com.microsoft.projectoxford.faceapisample"
minSdkVersion 16
targetSdkVersion 21
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
//
// Use the following line to include client library for Face API from Maven Central Repository
//
compile 'com.microsoft.projectoxford:face:1.1.0'
compile 'com.android.support:appcompat-v7:21.0.3'
compile 'com.google.code.gson:gson:2.3.1'
}

17
Sample/app/proguard-rules.pro поставляемый Normal file
Просмотреть файл

@ -0,0 +1,17 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in C:\Users\wenbiluo\AppData\Local\Android\sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

Просмотреть файл

@ -0,0 +1,167 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.microsoft.projectoxford.face.samples" >
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application
android:name=".helper.SampleApp"
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:launchMode="singleTop"
android:screenOrientation="portrait" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".DetectionActivity"
android:label="@string/detection"
android:launchMode="singleTop"
android:parentActivityName=".MainActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".MainActivity" />
</activity>
<activity
android:name=".VerificationActivity"
android:label="@string/verification"
android:launchMode="singleTop"
android:parentActivityName=".MainActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".MainActivity" />
</activity>
<activity
android:name=".IdentificationActivity"
android:label="@string/identification"
android:launchMode="singleTop"
android:parentActivityName=".MainActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".MainActivity" />
</activity>
<activity
android:name=".GroupingActivity"
android:label="@string/grouping"
android:launchMode="singleTop"
android:parentActivityName=".MainActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".MainActivity" />
</activity>
<activity
android:name=".FindSimilarFaceActivity"
android:label="@string/find_similar_faces"
android:launchMode="singleTop"
android:parentActivityName=".MainActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".MainActivity" />
</activity>
<activity
android:name=".helper.SelectImageActivity"
android:label="@string/select_an_image"
android:screenOrientation="portrait" />
<activity
android:name=".persongroupmanagement.PersonGroupListActivity"
android:label="@string/person_group_list"
android:launchMode="singleTop"
android:parentActivityName=".IdentificationActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".IdentificationActivity" />
</activity>
<activity
android:name=".persongroupmanagement.PersonGroupActivity"
android:label="@string/person_group"
android:launchMode="singleTop"
android:windowSoftInputMode="adjustNothing"
android:parentActivityName=".persongroupmanagement.PersonGroupListActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".persongroupmanagement.PersonGroupListActivity" />
</activity>
<activity
android:name=".persongroupmanagement.PersonActivity"
android:label="@string/person"
android:launchMode="singleTop"
android:windowSoftInputMode="adjustNothing"
android:parentActivityName=".persongroupmanagement.PersonGroupActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".persongroupmanagement.PersonGroupActivity" />
</activity>
<activity
android:name=".persongroupmanagement.AddFaceToPersonActivity"
android:label="@string/add_face_to_person"
android:parentActivityName=".persongroupmanagement.PersonActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".persongroupmanagement.PersonActivity" />
</activity>
<activity
android:name=".log.DetectionLogActivity"
android:label="@string/detection_log"
android:parentActivityName=".DetectionActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".DetectionActivity" />
</activity>
<activity
android:name=".log.VerificationLogActivity"
android:label="@string/verification_log"
android:parentActivityName=".VerificationActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".VerificationActivity" />
</activity>
<activity
android:name=".log.FindSimilarFaceLogActivity"
android:label="@string/find_similar_face_log"
android:parentActivityName=".FindSimilarFaceActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".FindSimilarFaceActivity" />
</activity>
<activity
android:name=".log.GroupingLogActivity"
android:label="@string/grouping_log"
android:parentActivityName=".GroupingActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".GroupingActivity" />
</activity>
<activity
android:name=".log.IdentificationLogActivity"
android:label="@string/identification_log"
android:parentActivityName=".IdentificationActivity"
android:screenOrientation="portrait" >
<meta-data
android:name="android.support.PARENT_ACTIVITY"
android:value=".IdentificationActivity" />
</activity>
</application>
</manifest>

Просмотреть файл

@ -0,0 +1,380 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.samples.helper.ImageHelper;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import com.microsoft.projectoxford.face.samples.helper.SampleApp;
import com.microsoft.projectoxford.face.samples.helper.SelectImageActivity;
import com.microsoft.projectoxford.face.samples.log.DetectionLogActivity;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class DetectionActivity extends ActionBarActivity {
// Background task of face detection.
private class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
private boolean mSucceed = true;
@Override
protected Face[] doInBackground(InputStream... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try {
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
true, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
new FaceServiceClient.FaceAttributeType[] {
FaceServiceClient.FaceAttributeType.Age,
FaceServiceClient.FaceAttributeType.Gender,
FaceServiceClient.FaceAttributeType.Glasses,
FaceServiceClient.FaceAttributeType.Smile,
FaceServiceClient.FaceAttributeType.HeadPose
});
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
mProgressDialog.show();
addLog("Request: Detecting in image " + mImageUri);
}
@Override
protected void onProgressUpdate(String... progress) {
mProgressDialog.setMessage(progress[0]);
setInfo(progress[0]);
}
@Override
protected void onPostExecute(Face[] result) {
if (mSucceed) {
addLog("Response: Success. Detected " + (result == null ? 0 : result.length)
+ " face(s) in " + mImageUri);
}
// Show the result on screen when detection is done.
setUiAfterDetection(result, mSucceed);
}
}
// Flag to indicate which task is to be performed.
private static final int REQUEST_SELECT_IMAGE = 0;
// The URI of the image selected to detect.
private Uri mImageUri;
// The image selected to detect.
private Bitmap mBitmap;
// Progress dialog popped up when communicating with server.
ProgressDialog mProgressDialog;
// When the activity is created, set all the member variables to initial state.
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detection);
mProgressDialog = new ProgressDialog(this);
mProgressDialog.setTitle(getString(R.string.progress_dialog_title));
// Disable button "detect" as the image to detect is not selected.
setDetectButtonEnabledStatus(false);
LogHelper.clearDetectionLog();
}
// Save the activity state when it's going to stop.
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable("ImageUri", mImageUri);
}
// Recover the saved state when the activity is recreated.
@Override
protected void onRestoreInstanceState(@NonNull Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mImageUri = savedInstanceState.getParcelable("ImageUri");
if (mImageUri != null) {
mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
mImageUri, getContentResolver());
}
}
// Called when image selection is done.
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case REQUEST_SELECT_IMAGE:
if (resultCode == RESULT_OK) {
// If image is selected successfully, set the image URI and bitmap.
mImageUri = data.getData();
mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
mImageUri, getContentResolver());
if (mBitmap != null) {
// Show the image on screen.
ImageView imageView = (ImageView) findViewById(R.id.image);
imageView.setImageBitmap(mBitmap);
// Add detection log.
addLog("Image: " + mImageUri + " resized to " + mBitmap.getWidth()
+ "x" + mBitmap.getHeight());
}
// Clear the detection result.
FaceListAdapter faceListAdapter = new FaceListAdapter(null);
ListView listView = (ListView) findViewById(R.id.list_detected_faces);
listView.setAdapter(faceListAdapter);
// Clear the information panel.
setInfo("");
// Enable button "detect" as the image is selected and not detected.
setDetectButtonEnabledStatus(true);
}
break;
default:
break;
}
}
// Called when the "Select Image" button is clicked.
public void selectImage(View view) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, REQUEST_SELECT_IMAGE);
}
// Called when the "Detect" button is clicked.
public void detect(View view) {
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());
// Start a background task to detect faces in the image.
new DetectionTask().execute(inputStream);
// Prevent button click during detecting.
setAllButtonsEnabledStatus(false);
}
// View the log of service calls.
public void viewLog(View view) {
Intent intent = new Intent(this, DetectionLogActivity.class);
startActivity(intent);
}
// Show the result on screen when detection is done.
private void setUiAfterDetection(Face[] result, boolean succeed) {
// Detection is done, hide the progress dialog.
mProgressDialog.dismiss();
// Enable all the buttons.
setAllButtonsEnabledStatus(true);
// Disable button "detect" as the image has already been detected.
setDetectButtonEnabledStatus(false);
if (succeed) {
// The information about the detection result.
String detectionResult;
if (result != null) {
detectionResult = result.length + " face"
+ (result.length != 1 ? "s" : "") + " detected";
// Show the detected faces on original image.
ImageView imageView = (ImageView) findViewById(R.id.image);
imageView.setImageBitmap(ImageHelper.drawFaceRectanglesOnBitmap(
mBitmap, result, true));
// Set the adapter of the ListView which contains the details of the detected faces.
FaceListAdapter faceListAdapter = new FaceListAdapter(result);
// Show the detailed list of detected faces.
ListView listView = (ListView) findViewById(R.id.list_detected_faces);
listView.setAdapter(faceListAdapter);
} else {
detectionResult = "0 face detected";
}
setInfo(detectionResult);
}
mImageUri = null;
mBitmap = null;
}
// Set whether the buttons are enabled.
private void setDetectButtonEnabledStatus(boolean isEnabled) {
Button detectButton = (Button) findViewById(R.id.detect);
detectButton.setEnabled(isEnabled);
}
// Set whether the buttons are enabled.
private void setAllButtonsEnabledStatus(boolean isEnabled) {
Button selectImageButton = (Button) findViewById(R.id.select_image);
selectImageButton.setEnabled(isEnabled);
Button detectButton = (Button) findViewById(R.id.detect);
detectButton.setEnabled(isEnabled);
Button ViewLogButton = (Button) findViewById(R.id.view_log);
ViewLogButton.setEnabled(isEnabled);
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addDetectionLog(log);
}
// The adapter of the GridView which contains the details of the detected faces.
private class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
// Initialize with detection result.
FaceListAdapter(Face[] detectionResult) {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
if (detectionResult != null) {
faces = Arrays.asList(detectionResult);
for (Face face : faces) {
try {
// Crop face thumbnail with five main landmarks drawn from original image.
faceThumbnails.add(ImageHelper.generateFaceThumbnail(
mBitmap, face.faceRectangle));
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
setInfo(e.getMessage());
}
}
}
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face_with_description, parent, false);
}
convertView.setId(position);
// Show the face thumbnail.
((ImageView) convertView.findViewById(R.id.face_thumbnail)).setImageBitmap(
faceThumbnails.get(position));
// Show the face details.
DecimalFormat formatter = new DecimalFormat("#0.0");
String face_description = "Age: " + formatter.format(faces.get(position).faceAttributes.age) + "\n"
+ "Gender: " + faces.get(position).faceAttributes.gender + "\n"
+ "Head pose(in degree): roll(" + formatter.format(faces.get(position).faceAttributes.headPose.roll) + "), "
+ "yaw(" + formatter.format(faces.get(position).faceAttributes.headPose.yaw) + ")\n"
+ "Glasses: " + faces.get(position).faceAttributes.glasses + "\n"
+ "Smile: " + formatter.format(faces.get(position).faceAttributes.smile);
((TextView) convertView.findViewById(R.id.text_detected_face)).setText(face_description);
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,569 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.SimilarFace;
import com.microsoft.projectoxford.face.samples.helper.ImageHelper;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import com.microsoft.projectoxford.face.samples.helper.SampleApp;
import com.microsoft.projectoxford.face.samples.helper.SelectImageActivity;
import com.microsoft.projectoxford.face.samples.log.FindSimilarFaceLogActivity;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class FindSimilarFaceActivity extends ActionBarActivity {
// Background task for finding similar faces.
private class FindSimilarFaceTask extends AsyncTask<UUID, String, SimilarFace[]> {
private boolean mSucceed = true;
@Override
protected SimilarFace[] doInBackground(UUID... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
addLog("Request: Find faces similar to " + params[0].toString() +
" in " + (params.length - 1) + " face(s)");
try{
publishProgress("Finding Similar Faces...");
UUID[] faceIds = Arrays.copyOfRange(params, 1, params.length);
// Start find similar faces.
return faceServiceClient.findSimilar(
params[0],
faceIds, /* The first face ID to verify */
faceIds.length); /* The second face ID to verify */
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
mProgressDialog.show();
}
@Override
protected void onProgressUpdate(String... values) {
// Show the status of background find similar face task on screen.
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(SimilarFace[] result) {
if (mSucceed) {
String resultString = "Found "
+ (result == null ? "0": result.length)
+ " similar face" + ((result != null && result.length != 1)? "s": "");
addLog("Response: Success. " + resultString);
setInfo(resultString);
}
// Show the result on screen when verification is done.
setUiAfterFindSimilarFaces(result);
}
}
// Background task for face detection
class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
private boolean mSucceed = true;
int mRequestCode;
DetectionTask(int requestCode) {
mRequestCode = requestCode;
}
@Override
protected Face[] doInBackground(InputStream... params) {
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
false, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
null);
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
mProgressDialog.show();
}
@Override
protected void onProgressUpdate(String... values) {
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(Face[] result) {
if (mSucceed) {
addLog("Response: Success. Detected " + result.length + " Face(s) in image");
}
if (mRequestCode == REQUEST_ADD_FACE) {
setUiAfterDetectionForAddFace(result);
} else if (mRequestCode == REQUEST_SELECT_IMAGE) {
setUiAfterDetectionForSelectImage(result);
}
}
}
void setUiAfterFindSimilarFaces(SimilarFace[] result) {
mProgressDialog.dismiss();
setAllButtonsEnabledStatus(true);
// Show the result of face finding similar faces.
GridView similarFaces = (GridView) findViewById(R.id.similar_faces);
mSimilarFaceListAdapter = new SimilarFaceListAdapter(result);
similarFaces.setAdapter(mSimilarFaceListAdapter);
}
void setUiDuringBackgroundTask(String progress) {
mProgressDialog.setMessage(progress);
setInfo(progress);
}
void setUiAfterDetectionForAddFace(Face[] result) {
setAllButtonsEnabledStatus(true);
// Show the detailed list of original faces.
mFaceListAdapter.addFaces(result, mBitmap);
GridView listView = (GridView) findViewById(R.id.all_faces);
listView.setAdapter(mFaceListAdapter);
TextView textView = (TextView) findViewById(R.id.text_all_faces);
textView.setText(String.format(
"Face database: %d face%s in total",
mFaceListAdapter.faces.size(),
mFaceListAdapter.faces.size() != 1 ? "s" : ""));
refreshFindSimilarFaceButtonEnabledStatus();
mBitmap = null;
// Set the status bar.
setDetectionStatus();
}
void setUiAfterDetectionForSelectImage(Face[] result) {
setAllButtonsEnabledStatus(true);
// Show the detailed list of detected faces.
mTargetFaceListAdapter = new FaceListAdapter();
mTargetFaceListAdapter.addFaces(result, mTargetBitmap);
// Show the list of detected face thumbnails.
ListView listView = (ListView) findViewById(R.id.list_faces);
listView.setAdapter(mTargetFaceListAdapter);
// Set the default face ID to the ID of first face, if one or more faces are detected.
if (mTargetFaceListAdapter.faces.size() != 0) {
mFaceId = mTargetFaceListAdapter.faces.get(0).faceId;
// Show the thumbnail of the default face.
ImageView imageView = (ImageView) findViewById(R.id.image);
imageView.setImageBitmap(mTargetFaceListAdapter.faceThumbnails.get(0));
}
refreshFindSimilarFaceButtonEnabledStatus();
mTargetBitmap = null;
// Set the status bar.
setDetectionStatus();
}
private void setDetectionStatus() {
if (mBitmap == null && mTargetBitmap == null) {
mProgressDialog.dismiss();
setInfo("Detection is done");
} else {
mProgressDialog.setMessage("Detecting...");
setInfo("Detecting...");
}
}
// The faces in this image are added to the face collection in which to find similar faces.
Bitmap mBitmap;
// The faces in this image are added to the face collection in which to find similar faces.
Bitmap mTargetBitmap;
// The face collection view adapter.
FaceListAdapter mFaceListAdapter;
// The face collection view adapter.
FaceListAdapter mTargetFaceListAdapter;
// The face collection view adapter.
SimilarFaceListAdapter mSimilarFaceListAdapter;
// Flag to indicate which task is to be performed.
protected static final int REQUEST_ADD_FACE = 0;
// Flag to indicate which task is to be performed.
protected static final int REQUEST_SELECT_IMAGE = 1;
// The ID of the target face to find similar face.
private UUID mFaceId;
// Progress dialog popped up when communicating with server.
ProgressDialog mProgressDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_find_similar_face);
mFaceListAdapter = new FaceListAdapter();
mProgressDialog = new ProgressDialog(this);
mProgressDialog.setTitle(getString(R.string.progress_dialog_title));
setFindSimilarFaceButtonEnabledStatus(false);
initializeFaceList();
LogHelper.clearFindSimilarFaceLog();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_ADD_FACE) {
if(resultCode == RESULT_OK) {
mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
data.getData(), getContentResolver());
if (mBitmap != null) {
View originalFaces = findViewById(R.id.all_faces);
originalFaces.setVisibility(View.VISIBLE);
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream
= new ByteArrayInputStream(output.toByteArray());
setAllButtonsEnabledStatus(false);
addLog("Request: Detecting in image " + data.getData());
// Start a background task to detect faces in the image.
new DetectionTask(REQUEST_ADD_FACE).execute(inputStream);
}
}
} else if (requestCode == REQUEST_SELECT_IMAGE) {
if(resultCode == RESULT_OK) {
mTargetBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
data.getData(), getContentResolver());
if (mTargetBitmap != null) {
View originalFaces = findViewById(R.id.all_faces);
originalFaces.setVisibility(View.VISIBLE);
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
mTargetBitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream
= new ByteArrayInputStream(output.toByteArray());
setAllButtonsEnabledStatus(false);
addLog("Request: Detecting in image " + data.getData());
// Start a background task to detect faces in the image.
new DetectionTask(REQUEST_SELECT_IMAGE).execute(inputStream);
}
}
}
}
// Set whether the buttons are enabled.
private void setAllButtonsEnabledStatus(boolean isEnabled) {
Button addFaceButton = (Button) findViewById(R.id.add_faces);
addFaceButton.setEnabled(isEnabled);
Button selectImageButton = (Button) findViewById(R.id.select_image);
selectImageButton.setEnabled(isEnabled);
Button detectButton = (Button) findViewById(R.id.find_similar_faces);
detectButton.setEnabled(isEnabled);
Button logButton = (Button) findViewById(R.id.view_log);
logButton.setEnabled(isEnabled);
}
// Set the group button is enabled or not.
private void setFindSimilarFaceButtonEnabledStatus(boolean isEnabled) {
Button button = (Button) findViewById(R.id.find_similar_faces);
button.setEnabled(isEnabled);
}
// Set the group button is enabled or not.
private void refreshFindSimilarFaceButtonEnabledStatus() {
if (mFaceListAdapter.faces.size() != 0 && mFaceId != null) {
setFindSimilarFaceButtonEnabledStatus(true);
} else {
setFindSimilarFaceButtonEnabledStatus(false);
}
}
// Initialize the ListView which contains the thumbnails of the detected faces.
private void initializeFaceList() {
ListView listView = (ListView) findViewById(R.id.list_faces);
// When a detected face in the GridView is clicked, the face is selected to verify.
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
FaceListAdapter faceListAdapter = mTargetFaceListAdapter;
if (!faceListAdapter.faces.get(position).faceId.equals(mFaceId)) {
mFaceId = faceListAdapter.faces.get(position).faceId;
ImageView imageView = (ImageView) findViewById(R.id.image);
imageView.setImageBitmap(faceListAdapter.faceThumbnails.get(position));
// Clear the result of finding similar faces.
GridView similarFaces = (GridView) findViewById(R.id.similar_faces);
mSimilarFaceListAdapter = new SimilarFaceListAdapter(null);
similarFaces.setAdapter(mSimilarFaceListAdapter);
setInfo("");
}
// Show the list of detected face thumbnails.
ListView listView = (ListView) findViewById(R.id.list_faces);
listView.setAdapter(faceListAdapter);
}
});
}
public void addFaces(View view) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, REQUEST_ADD_FACE);
}
public void findSimilarFaces(View view) {
if (mFaceId == null || mFaceListAdapter.faces.size() == 0) {
setInfo("Parameters are not ready");
}
List<UUID> faceIds = new ArrayList<>();
faceIds.add(mFaceId);
for (Face face: mFaceListAdapter.faces) {
faceIds.add(face.faceId);
}
setAllButtonsEnabledStatus(false);
new FindSimilarFaceTask().execute(faceIds.toArray(new UUID[faceIds.size()]));
}
public void viewLog(View view) {
Intent intent = new Intent(this, FindSimilarFaceLogActivity.class);
startActivity(intent);
}
public void selectImage(View view) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, REQUEST_SELECT_IMAGE);
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addFindSimilarFaceLog(log);
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
Map<UUID, Bitmap> faceIdThumbnailMap;
FaceListAdapter() {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
faceIdThumbnailMap = new HashMap<>();
}
public void addFaces(Face[] detectionResult, Bitmap bitmap) {
if (detectionResult != null) {
List<Face> detectedFaces = Arrays.asList(detectionResult);
for (Face face: detectedFaces) {
faces.add(face);
try {
Bitmap faceThumbnail =ImageHelper.generateFaceThumbnail(
bitmap, face.faceRectangle);
faceThumbnails.add(faceThumbnail);
faceIdThumbnailMap.put(face.faceId, faceThumbnail);
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
setInfo(e.getMessage());
}
}
}
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater
= (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face, parent, false);
}
convertView.setId(position);
Bitmap thumbnailToShow = faceThumbnails.get(position);
if (faces.get(position).faceId.equals(mFaceId)) {
thumbnailToShow = ImageHelper.highlightSelectedFaceThumbnail(thumbnailToShow);
}
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.image_face)).setImageBitmap(thumbnailToShow);
return convertView;
}
}
// The adapter of the GridView which contains the details of the detected faces.
private class SimilarFaceListAdapter extends BaseAdapter {
// The detected faces.
List<SimilarFace> similarFaces;
// Initialize with detection result.
SimilarFaceListAdapter(SimilarFace[] findSimilarFaceResult) {
if (findSimilarFaceResult != null) {
similarFaces = Arrays.asList(findSimilarFaceResult);
} else {
similarFaces = new ArrayList<>();
}
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return similarFaces.size();
}
@Override
public Object getItem(int position) {
return similarFaces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face, parent, false);
}
convertView.setId(position);
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.image_face)).setImageBitmap(
mFaceListAdapter.faceIdThumbnailMap.get(similarFaces.get(position).faceId));
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,470 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.GroupResult;
import com.microsoft.projectoxford.face.samples.helper.EmbeddedGridView;
import com.microsoft.projectoxford.face.samples.helper.ImageHelper;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import com.microsoft.projectoxford.face.samples.helper.SampleApp;
import com.microsoft.projectoxford.face.samples.helper.SelectImageActivity;
import com.microsoft.projectoxford.face.samples.log.GroupingLogActivity;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class GroupingActivity extends ActionBarActivity {
// Background task for face grouping.
class GroupingTask extends AsyncTask<UUID, String, GroupResult> {
@Override
protected GroupResult doInBackground(UUID... params) {
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
addLog("Request: Grouping " + params.length + " face(s)");
try{
publishProgress("Grouping...");
// Start grouping, params are face IDs.
return faceServiceClient.group(params);
} catch (Exception e) {
addLog(e.getMessage());
publishProgress(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
mProgressDialog.show();
}
@Override
protected void onProgressUpdate(String... values) {
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(GroupResult result) {
if (result != null) {
addLog("Response: Success. Grouped into " + result.groups.size() + " face group(s).");
}
setUiAfterGrouping(result);
}
}
// Background task for face detection
class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
private boolean mSucceed = true;
@Override
protected Face[] doInBackground(InputStream... params) {
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
false, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
null);
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
mProgressDialog.show();
}
@Override
protected void onProgressUpdate(String... values) {
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(Face[] result) {
if (mSucceed) {
addLog("Response: Success. Detected " + result.length + " Face(s) in image");
}
setUiAfterDetection(result);
}
}
void setUiDuringBackgroundTask(String progress) {
mProgressDialog.setMessage(progress);
setInfo(progress);
}
void setUiAfterDetection(Face[] result) {
mProgressDialog.dismiss();
setAllButtonsEnabledStatus(true);
if (result != null) {
setInfo("Detection is done");
// Show the detailed list of original faces.
mFaceListAdapter.addFaces(result);
GridView listView = (GridView) findViewById(R.id.all_faces);
listView.setAdapter(mFaceListAdapter);
TextView textView = (TextView) findViewById(R.id.text_all_faces);
textView.setText(String.format(
"%d face%s in total",
mFaceListAdapter.faces.size(),
mFaceListAdapter.faces.size() != 1 ? "s" : ""));
}
if (mFaceListAdapter.faces.size() >= 2 && mFaceListAdapter.faces.size() <= 100) {
setGroupButtonEnabledStatus(true);
} else {
setGroupButtonEnabledStatus(false);
}
}
void setUiAfterGrouping(GroupResult result) {
mProgressDialog.dismiss();
setAllButtonsEnabledStatus(true);
if (result != null) {
setInfo("Grouping is done");
setGroupButtonEnabledStatus(false);
// Show the result of face grouping.
ListView groupedFaces = (ListView) findViewById(R.id.grouped_faces);
FaceGroupsAdapter faceGroupsAdapter = new FaceGroupsAdapter(result);
groupedFaces.setAdapter(faceGroupsAdapter);
}
}
// The faces in this image are added to face collection for grouping.
Bitmap mBitmap;
// The face collection view adapter.
FaceListAdapter mFaceListAdapter;
// Flag to indicate which task is to be performed.
protected static final int REQUEST_SELECT_IMAGE = 0;
// Progress dialog popped up when communicating with server.
ProgressDialog mProgressDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_grouping);
mProgressDialog = new ProgressDialog(this);
mProgressDialog.setTitle(getString(R.string.progress_dialog_title));
mFaceListAdapter = new FaceListAdapter();
setGroupButtonEnabledStatus(false);
LogHelper.clearGroupingLog();
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_SELECT_IMAGE) {
if(resultCode == RESULT_OK) {
mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(data.getData(), getContentResolver());
if (mBitmap != null) {
View originalFaces = findViewById(R.id.all_faces);
originalFaces.setVisibility(View.VISIBLE);
// Show the result of face grouping.
ListView groupedFaces = (ListView) findViewById(R.id.grouped_faces);
FaceGroupsAdapter faceGroupsAdapter = new FaceGroupsAdapter(null);
groupedFaces.setAdapter(faceGroupsAdapter);
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
mBitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());
setAllButtonsEnabledStatus(false);
addLog("Request: Detecting in image " + data.getData());
// Start a background task to detect faces in the image.
new DetectionTask().execute(inputStream);
}
}
}
}
public void addFaces(View view) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, REQUEST_SELECT_IMAGE);
}
public void group(View view) {
List<UUID> faceIds = new ArrayList<>();
for (Face face: mFaceListAdapter.faces) {
faceIds.add(face.faceId);
}
if (faceIds.size() > 0) {
new GroupingTask().execute(faceIds.toArray(new UUID[faceIds.size()]));
setAllButtonsEnabledStatus(false);
} else {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(R.string.no_face_to_group);
}
}
public void viewLog(View view) {
Intent intent = new Intent(this, GroupingLogActivity.class);
startActivity(intent);
}
// Set whether the buttons are enabled.
private void setAllButtonsEnabledStatus(boolean isEnabled) {
Button selectImageButton = (Button) findViewById(R.id.add_faces);
selectImageButton.setEnabled(isEnabled);
Button groupButton = (Button) findViewById(R.id.group);
groupButton.setEnabled(isEnabled);
Button ViewLogButton = (Button) findViewById(R.id.view_log);
ViewLogButton.setEnabled(isEnabled);
}
// Set the group button is enabled or not.
private void setGroupButtonEnabledStatus(boolean isEnabled) {
Button button = (Button) findViewById(R.id.group);
button.setEnabled(isEnabled);
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addGroupingLog(log);
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
Map<UUID, Bitmap> faceIdThumbnailMap;
FaceListAdapter() {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
faceIdThumbnailMap = new HashMap<>();
}
public void addFaces(Face[] detectionResult) {
if (detectionResult != null) {
List<Face> detectedFaces = Arrays.asList(detectionResult);
for (Face face: detectedFaces) {
faces.add(face);
try {
Bitmap faceThumbnail = ImageHelper.generateFaceThumbnail(
mBitmap, face.faceRectangle);
faceThumbnails.add(faceThumbnail);
faceIdThumbnailMap.put(face.faceId, faceThumbnail);
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
TextView textView = (TextView)findViewById(R.id.info);
textView.setText(e.getMessage());
}
}
}
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face, parent, false);
}
convertView.setId(position);
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.image_face)).setImageBitmap(faceThumbnails.get(position));
return convertView;
}
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private class FaceGroupsAdapter extends BaseAdapter {
// The face groups.
List<List<UUID> > faceGroups;
FaceGroupsAdapter(GroupResult result) {
faceGroups = new ArrayList<>();
if (result != null) {
for (UUID[] group: result.groups) {
faceGroups.add(Arrays.asList(group));
}
faceGroups.add(result.messyGroup);
}
}
@Override
public int getCount() {
return faceGroups.size();
}
@Override
public Object getItem(int position) {
return faceGroups.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face_group, parent, false);
}
convertView.setId(position);
String faceGroupName = "Group " + position + ": " + faceGroups.get(position).size() + " face(s)";
if (position == faceGroups.size() - 1) {
faceGroupName = "Messy Group: " + faceGroups.get(position).size() + " face(s)";
}
((TextView) convertView.findViewById(R.id.face_group_name)).setText(faceGroupName);
FacesAdapter facesAdapter = new FacesAdapter(faceGroups.get(position));
EmbeddedGridView gridView = (EmbeddedGridView) convertView.findViewById(R.id.faces);
gridView.setAdapter(facesAdapter);
return convertView;
}
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private class FacesAdapter extends BaseAdapter {
// The detected faces.
List<UUID> faces;
FacesAdapter(List<UUID> result) {
faces = new ArrayList<>();
faces.addAll(result);
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face, parent, false);
}
convertView.setId(position);
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.image_face)).setImageBitmap(
mFaceListAdapter.faceIdThumbnailMap.get(faces.get(position)));
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,607 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.IdentifyResult;
import com.microsoft.projectoxford.face.contract.TrainingStatus;
import com.microsoft.projectoxford.face.samples.helper.ImageHelper;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import com.microsoft.projectoxford.face.samples.helper.SampleApp;
import com.microsoft.projectoxford.face.samples.helper.SelectImageActivity;
import com.microsoft.projectoxford.face.samples.helper.StorageHelper;
import com.microsoft.projectoxford.face.samples.log.IdentificationLogActivity;
import com.microsoft.projectoxford.face.samples.persongroupmanagement.PersonGroupListActivity;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.UUID;
public class IdentificationActivity extends ActionBarActivity {
// Background task of face identification.
private class IdentificationTask extends AsyncTask<UUID, String, IdentifyResult[]> {
private boolean mSucceed = true;
String mPersonGroupId;
IdentificationTask(String personGroupId) {
this.mPersonGroupId = personGroupId;
}
@Override
protected IdentifyResult[] doInBackground(UUID... params) {
String logString = "Request: Identifying faces ";
for (UUID faceId: params) {
logString += faceId.toString() + ", ";
}
logString += " in group " + mPersonGroupId;
addLog(logString);
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Getting person group status...");
TrainingStatus trainingStatus = faceServiceClient.getPersonGroupTrainingStatus(
this.mPersonGroupId); /* personGroupId */
if (trainingStatus.status != TrainingStatus.Status.Succeeded) {
publishProgress("Person group training status is " + trainingStatus.status);
mSucceed = false;
return null;
}
publishProgress("Identifying...");
// Start identification.
return faceServiceClient.identity(
this.mPersonGroupId, /* personGroupId */
params, /* faceIds */
1); /* maxNumOfCandidatesReturned */
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
setUiBeforeBackgroundTask();
}
@Override
protected void onProgressUpdate(String... values) {
// Show the status of background detection task on screen.a
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(IdentifyResult[] result) {
// Show the result on screen when detection is done.
setUiAfterIdentification(result, mSucceed);
}
}
String mPersonGroupId;
boolean detected;
FaceListAdapter mFaceListAdapter;
PersonGroupListAdapter mPersonGroupListAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_identification);
detected = false;
progressDialog = new ProgressDialog(this);
progressDialog.setTitle(getString(R.string.progress_dialog_title));
LogHelper.clearIdentificationLog();
}
@Override
protected void onResume() {
super.onResume();
ListView listView = (ListView) findViewById(R.id.list_person_groups_identify);
mPersonGroupListAdapter = new PersonGroupListAdapter();
listView.setAdapter(mPersonGroupListAdapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
setPersonGroupSelected(position);
}
});
if (mPersonGroupListAdapter.personGroupIdList.size() != 0) {
setPersonGroupSelected(0);
} else {
setPersonGroupSelected(-1);
}
}
void setPersonGroupSelected(int position) {
TextView textView = (TextView) findViewById(R.id.text_person_group_selected);
if (position > 0) {
String personGroupIdSelected = mPersonGroupListAdapter.personGroupIdList.get(position);
mPersonGroupListAdapter.personGroupIdList.set(
position, mPersonGroupListAdapter.personGroupIdList.get(0));
mPersonGroupListAdapter.personGroupIdList.set(0, personGroupIdSelected);
ListView listView = (ListView) findViewById(R.id.list_person_groups_identify);
listView.setAdapter(mPersonGroupListAdapter);
setPersonGroupSelected(0);
} else if (position < 0) {
setIdentifyButtonEnabledStatus(false);
textView.setTextColor(Color.RED);
textView.setText(R.string.no_person_group_selected_for_identification_warning);
} else {
mPersonGroupId = mPersonGroupListAdapter.personGroupIdList.get(0);
String personGroupName = StorageHelper.getPersonGroupName(
mPersonGroupId, IdentificationActivity.this);
refreshIdentifyButtonEnabledStatus();
textView.setTextColor(Color.BLACK);
textView.setText(String.format("Person group to use: %s", personGroupName));
}
}
private void setUiBeforeBackgroundTask() {
progressDialog.show();
}
// Show the status of background detection task on screen.
private void setUiDuringBackgroundTask(String progress) {
progressDialog.setMessage(progress);
setInfo(progress);
}
// Show the result on screen when detection is done.
private void setUiAfterIdentification(IdentifyResult[] result, boolean succeed) {
progressDialog.dismiss();
setAllButtonsEnabledStatus(true);
setIdentifyButtonEnabledStatus(false);
if (succeed) {
// Set the information about the detection result.
setInfo("Identification is done");
if (result != null) {
mFaceListAdapter.setIdentificationResult(result);
String logString = "Response: Success. ";
for (IdentifyResult identifyResult: result) {
logString += "Face " + identifyResult.faceId.toString() + " is identified as "
+ (identifyResult.candidates.size() > 0
? identifyResult.candidates.get(0).personId.toString()
: "Unknown Person")
+ ". ";
}
addLog(logString);
// Show the detailed list of detected faces.
ListView listView = (ListView) findViewById(R.id.list_identified_faces);
listView.setAdapter(mFaceListAdapter);
}
}
}
// Background task of face detection.
private class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
@Override
protected Face[] doInBackground(InputStream... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
false, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
null);
} catch (Exception e) {
publishProgress(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
setUiBeforeBackgroundTask();
}
@Override
protected void onProgressUpdate(String... values) {
// Show the status of background detection task on screen.
setUiDuringBackgroundTask(values[0]);
}
@Override
protected void onPostExecute(Face[] result) {
progressDialog.dismiss();
setAllButtonsEnabledStatus(true);
if (result != null) {
// Set the adapter of the ListView which contains the details of detected faces.
mFaceListAdapter = new FaceListAdapter(result);
ListView listView = (ListView) findViewById(R.id.list_identified_faces);
listView.setAdapter(mFaceListAdapter);
if (result.length == 0) {
detected = false;
setInfo("No faces detected!");
} else {
detected = true;
setInfo("Click on the \"Identify\" button to identify the faces in image.");
}
} else {
detected = false;
}
refreshIdentifyButtonEnabledStatus();
}
}
// Flag to indicate which task is to be performed.
private static final int REQUEST_SELECT_IMAGE = 0;
// The image selected to detect.
private Bitmap mBitmap;
// Progress dialog popped up when communicating with server.
ProgressDialog progressDialog;
// Called when image selection is done.
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode)
{
case REQUEST_SELECT_IMAGE:
if(resultCode == RESULT_OK) {
detected = false;
// If image is selected successfully, set the image URI and bitmap.
Uri imageUri = data.getData();
mBitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
imageUri, getContentResolver());
if (mBitmap != null) {
// Show the image on screen.
ImageView imageView = (ImageView) findViewById(R.id.image);
imageView.setImageBitmap(mBitmap);
}
// Clear the identification result.
FaceListAdapter faceListAdapter = new FaceListAdapter(null);
ListView listView = (ListView) findViewById(R.id.list_identified_faces);
listView.setAdapter(faceListAdapter);
// Clear the information panel.
setInfo("");
// Start detecting in image.
detect(mBitmap);
}
break;
default:
break;
}
}
// Start detecting in image.
private void detect(Bitmap bitmap) {
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());
setAllButtonsEnabledStatus(false);
// Start a background task to detect faces in the image.
new DetectionTask().execute(inputStream);
}
// Called when the "Select Image" button is clicked.
public void selectImage(View view) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, REQUEST_SELECT_IMAGE);
}
// Called when the "Detect" button is clicked.
public void identify(View view) {
// Start detection task only if the image to detect is selected.
if (detected && mPersonGroupId != null) {
// Start a background task to identify faces in the image.
List<UUID> faceIds = new ArrayList<>();
for (Face face: mFaceListAdapter.faces) {
faceIds.add(face.faceId);
}
setAllButtonsEnabledStatus(false);
new IdentificationTask(mPersonGroupId).execute(
faceIds.toArray(new UUID[faceIds.size()]));
} else {
// Not detected or person group exists.
setInfo("Please select an image and create a person group first.");
}
}
public void managePersonGroups(View view) {
Intent intent = new Intent(this, PersonGroupListActivity.class);
startActivity(intent);
refreshIdentifyButtonEnabledStatus();
}
public void viewLog(View view) {
Intent intent = new Intent(this, IdentificationLogActivity.class);
startActivity(intent);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addIdentificationLog(log);
}
// Set whether the buttons are enabled.
private void setAllButtonsEnabledStatus(boolean isEnabled) {
Button selectImageButton = (Button) findViewById(R.id.manage_person_groups);
selectImageButton.setEnabled(isEnabled);
Button groupButton = (Button) findViewById(R.id.select_image);
groupButton.setEnabled(isEnabled);
Button identifyButton = (Button) findViewById(R.id.identify);
identifyButton.setEnabled(isEnabled);
Button viewLogButton = (Button) findViewById(R.id.view_log);
viewLogButton.setEnabled(isEnabled);
}
// Set the group button is enabled or not.
private void setIdentifyButtonEnabledStatus(boolean isEnabled) {
Button button = (Button) findViewById(R.id.identify);
button.setEnabled(isEnabled);
}
// Set the group button is enabled or not.
private void refreshIdentifyButtonEnabledStatus() {
if (detected && mPersonGroupId != null) {
setIdentifyButtonEnabledStatus(true);
} else {
setIdentifyButtonEnabledStatus(false);
}
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// The adapter of the GridView which contains the details of the detected faces.
private class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
List<IdentifyResult> mIdentifyResults;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
// Initialize with detection result.
FaceListAdapter(Face[] detectionResult) {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
mIdentifyResults = new ArrayList<>();
if (detectionResult != null) {
faces = Arrays.asList(detectionResult);
for (Face face: faces) {
try {
// Crop face thumbnail with five main landmarks drawn from original image.
faceThumbnails.add(ImageHelper.generateFaceThumbnail(
mBitmap, face.faceRectangle));
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
setInfo(e.getMessage());
}
}
}
}
public void setIdentificationResult(IdentifyResult[] identifyResults) {
mIdentifyResults = Arrays.asList(identifyResults);
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(
R.layout.item_face_with_description, parent, false);
}
convertView.setId(position);
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.face_thumbnail)).setImageBitmap(
faceThumbnails.get(position));
if (mIdentifyResults.size() == faces.size()) {
// Show the face details.
DecimalFormat formatter = new DecimalFormat("#0.00");
if (mIdentifyResults.get(position).candidates.size() > 0) {
String personId =
mIdentifyResults.get(position).candidates.get(0).personId.toString();
String personName = StorageHelper.getPersonName(
personId, mPersonGroupId, IdentificationActivity.this);
String identity = "Person: " + personName + "\n"
+ "Confidence: " + formatter.format(
mIdentifyResults.get(position).candidates.get(0).confidence);
((TextView) convertView.findViewById(R.id.text_detected_face)).setText(
identity);
} else {
((TextView) convertView.findViewById(R.id.text_detected_face)).setText(
R.string.face_cannot_be_identified);
}
}
return convertView;
}
}
// The adapter of the ListView which contains the person groups.
private class PersonGroupListAdapter extends BaseAdapter {
List<String> personGroupIdList;
// Initialize with detection result.
PersonGroupListAdapter() {
personGroupIdList = new ArrayList<>();
Set<String> personGroupIds
= StorageHelper.getAllPersonGroupIds(IdentificationActivity.this);
for (String personGroupId: personGroupIds) {
personGroupIdList.add(personGroupId);
if (mPersonGroupId != null && personGroupId.equals(mPersonGroupId)) {
personGroupIdList.set(
personGroupIdList.size() - 1,
mPersonGroupListAdapter.personGroupIdList.get(0));
mPersonGroupListAdapter.personGroupIdList.set(0, personGroupId);
}
}
}
@Override
public int getCount() {
return personGroupIdList.size();
}
@Override
public Object getItem(int position) {
return personGroupIdList.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_person_group, parent, false);
}
convertView.setId(position);
// set the text of the item
String personGroupName = StorageHelper.getPersonGroupName(
personGroupIdList.get(position), IdentificationActivity.this);
int personNumberInGroup = StorageHelper.getAllPersonIds(
personGroupIdList.get(position), IdentificationActivity.this).size();
((TextView)convertView.findViewById(R.id.text_person_group)).setText(
String.format(
"%s (Person count: %d)",
personGroupName,
personNumberInGroup));
if (position == 0) {
((TextView)convertView.findViewById(R.id.text_person_group)).setTextColor(
Color.parseColor("#3399FF"));
}
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,80 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.AlertDialog;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.View;
public class MainActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getString(R.string.subscription_key).startsWith("Please")) {
new AlertDialog.Builder(this)
.setTitle(getString(R.string.add_subscription_key_tip_title))
.setMessage(getString(R.string.add_subscription_key_tip))
.setCancelable(false)
.show();
}
}
public void detection(View view) {
Intent intent = new Intent(this, DetectionActivity.class);
startActivity(intent);
}
public void verification(View view) {
Intent intent = new Intent(this, VerificationActivity.class);
startActivity(intent);
}
public void grouping(View view) {
Intent intent = new Intent(this, GroupingActivity.class);
startActivity(intent);
}
public void findSimilarFace(View view) {
Intent intent = new Intent(this, FindSimilarFaceActivity.class);
startActivity(intent);
}
public void identification(View view) {
Intent intent = new Intent(this, IdentificationActivity.class);
startActivity(intent);
}
}

Просмотреть файл

@ -0,0 +1,537 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.VerifyResult;
import com.microsoft.projectoxford.face.samples.helper.ImageHelper;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import com.microsoft.projectoxford.face.samples.helper.SampleApp;
import com.microsoft.projectoxford.face.samples.helper.SelectImageActivity;
import com.microsoft.projectoxford.face.samples.log.VerificationLogActivity;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
public class VerificationActivity extends ActionBarActivity {
// Background task for face verification.
private class VerificationTask extends AsyncTask<Void, String, VerifyResult> {
// The IDs of two face to verify.
private UUID mFaceId0;
private UUID mFaceId1;
VerificationTask (UUID faceId0, UUID faceId1) {
mFaceId0 = faceId0;
mFaceId1 = faceId1;
}
@Override
protected VerifyResult doInBackground(Void... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Verifying...");
// Start verification.
return faceServiceClient.verify(
mFaceId0, /* The first face ID to verify */
mFaceId1); /* The second face ID to verify */
} catch (Exception e) {
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
progressDialog.show();
addLog("Request: Verifying face " + mFaceId0 + " and face " + mFaceId1);
}
@Override
protected void onProgressUpdate(String... progress) {
progressDialog.setMessage(progress[0]);
setInfo(progress[0]);
}
@Override
protected void onPostExecute(VerifyResult result) {
if (result != null) {
addLog("Response: Success. Face " + mFaceId0 + " and face "
+ mFaceId1 + (result.isIdentical ? " " : " don't ")
+ "belong to the same person");
}
// Show the result on screen when verification is done.
setUiAfterVerification(result);
}
}
// Background task of face detection.
private class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
// Index indicates detecting in which of the two images.
private int mIndex;
private boolean mSucceed = true;
DetectionTask(int index) {
mIndex = index;
}
@Override
protected Face[] doInBackground(InputStream... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
false, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
null);
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
progressDialog.show();
addLog("Request: Detecting in image" + mIndex);
}
@Override
protected void onProgressUpdate(String... progress) {
progressDialog.setMessage(progress[0]);
setInfo(progress[0]);
}
@Override
protected void onPostExecute(Face[] result) {
// Show the result on screen when detection is done.
setUiAfterDetection(result, mIndex, mSucceed);
}
}
// Flag to indicate which task is to be performed.
private static final int REQUEST_SELECT_IMAGE_0 = 0;
private static final int REQUEST_SELECT_IMAGE_1 = 1;
// The IDs of the two faces to be verified.
private UUID mFaceId0;
private UUID mFaceId1;
// The two images from where we get the two faces to verify.
private Bitmap mBitmap0;
private Bitmap mBitmap1;
// The adapter of the ListView which contains the detected faces from the two images.
protected FaceListAdapter mFaceListAdapter0;
protected FaceListAdapter mFaceListAdapter1;
// Progress dialog popped up when communicating with server.
ProgressDialog progressDialog;
// When the activity is created, set all the member variables to initial state.
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_verification);
// Initialize the two ListViews which contain the thumbnails of the detected faces.
initializeFaceList(0);
initializeFaceList(1);
progressDialog = new ProgressDialog(this);
progressDialog.setTitle(getString(R.string.progress_dialog_title));
clearDetectedFaces(0);
clearDetectedFaces(1);
// Disable button "verify" as the two face IDs to verify are not ready.
setVerifyButtonEnabledStatus(false);
LogHelper.clearVerificationLog();
}
// Called when image selection is done. Begin detecting if the image is selected successfully.
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Index indicates which of the two images is selected.
int index;
if (requestCode == REQUEST_SELECT_IMAGE_0) {
index = 0;
} else if (requestCode == REQUEST_SELECT_IMAGE_1) {
index = 1;
} else {
return;
}
if(resultCode == RESULT_OK) {
// If image is selected successfully, set the image URI and bitmap.
Bitmap bitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
data.getData(), getContentResolver());
if (bitmap != null) {
// Image is select but not detected, disable verification button.
setVerifyButtonEnabledStatus(false);
clearDetectedFaces(index);
// Set the image to detect.
if (index == 0) {
mBitmap0 = bitmap;
mFaceId0 = null;
} else {
mBitmap1 = bitmap;
mFaceId1 = null;
}
// Add verification log.
addLog("Image" + index + ": " + data.getData() + " resized to " + bitmap.getWidth()
+ "x" + bitmap.getHeight());
// Start detecting in image.
detect(bitmap, index);
}
}
}
// Clear the detected faces indicated by index.
private void clearDetectedFaces(int index) {
ListView faceList = (ListView) findViewById(
index == 0 ? R.id.list_faces_0: R.id.list_faces_1);
faceList.setVisibility(View.GONE);
ImageView imageView =
(ImageView) findViewById(index == 0 ? R.id.image_0: R.id.image_1);
imageView.setImageResource(android.R.color.transparent);
}
// Called when the "Select Image0" button is clicked.
public void selectImage0(View view) {
selectImage(0);
}
// Called when the "Select Image1" button is clicked.
public void selectImage1(View view) {
selectImage(1);
}
// Called when the "Verify" button is clicked.
public void verify(View view) {
setAllButtonEnabledStatus(false);
new VerificationTask(mFaceId0, mFaceId1).execute();
}
// View the log of service calls.
public void viewLog(View view) {
Intent intent = new Intent(this, VerificationLogActivity.class);
startActivity(intent);
}
// Select the image indicated by index.
private void selectImage(int index) {
Intent intent = new Intent(this, SelectImageActivity.class);
startActivityForResult(intent, index == 0 ? REQUEST_SELECT_IMAGE_0: REQUEST_SELECT_IMAGE_1);
}
// Set the select image button is enabled or not.
private void setSelectImageButtonEnabledStatus(boolean isEnabled, int index) {
Button button;
if (index == 0) {
button = (Button) findViewById(R.id.select_image_0);
} else {
button = (Button) findViewById(R.id.select_image_1);
}
button.setEnabled(isEnabled);
Button viewLog = (Button) findViewById(R.id.view_log);
viewLog.setEnabled(isEnabled);
}
// Set the verify button is enabled or not.
private void setVerifyButtonEnabledStatus(boolean isEnabled) {
Button button = (Button) findViewById(R.id.verify);
button.setEnabled(isEnabled);
}
// Set all the buttons are enabled or not.
private void setAllButtonEnabledStatus(boolean isEnabled) {
Button selectImage0 = (Button) findViewById(R.id.select_image_0);
selectImage0.setEnabled(isEnabled);
Button selectImage1 = (Button) findViewById(R.id.select_image_1);
selectImage1.setEnabled(isEnabled);
Button verify = (Button) findViewById(R.id.verify);
verify.setEnabled(isEnabled);
Button viewLog = (Button) findViewById(R.id.view_log);
viewLog.setEnabled(isEnabled);
}
// Initialize the ListView which contains the thumbnails of the detected faces.
private void initializeFaceList(final int index) {
ListView listView =
(ListView) findViewById(index == 0 ? R.id.list_faces_0: R.id.list_faces_1);
// When a detected face in the GridView is clicked, the face is selected to verify.
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
FaceListAdapter faceListAdapter =
index == 0 ? mFaceListAdapter0: mFaceListAdapter1;
if (!faceListAdapter.faces.get(position).faceId.equals(
index == 0 ? mFaceId0: mFaceId1)) {
if (index == 0) {
mFaceId0 = faceListAdapter.faces.get(position).faceId;
} else {
mFaceId1 = faceListAdapter.faces.get(position).faceId;
}
ImageView imageView =
(ImageView) findViewById(index == 0 ? R.id.image_0: R.id.image_1);
imageView.setImageBitmap(faceListAdapter.faceThumbnails.get(position));
setInfo("");
}
// Show the list of detected face thumbnails.
ListView listView = (ListView) findViewById(
index == 0 ? R.id.list_faces_0: R.id.list_faces_1);
listView.setAdapter(faceListAdapter);
}
});
}
// Show the result on screen when verification is done.
private void setUiAfterVerification(VerifyResult result) {
// Verification is done, hide the progress dialog.
progressDialog.dismiss();
// Enable all the buttons.
setAllButtonEnabledStatus(true);
// Show verification result.
if (result != null) {
DecimalFormat formatter = new DecimalFormat("#0.00");
String verificationResult = (result.isIdentical ? "The same person": "Different persons")
+ ". The confidence is " + formatter.format(result.confidence);
setInfo(verificationResult);
}
}
// Show the result on screen when detection in image that indicated by index is done.
private void setUiAfterDetection(Face[] result, int index, boolean succeed) {
setSelectImageButtonEnabledStatus(true, index);
if (succeed) {
addLog("Response: Success. Detected "
+ result.length + " face(s) in image" + index);
setInfo(result.length + " face" + (result.length != 1 ? "s": "") + " detected");
// Show the detailed list of detected faces.
FaceListAdapter faceListAdapter = new FaceListAdapter(result, index);
// Set the default face ID to the ID of first face, if one or more faces are detected.
if (faceListAdapter.faces.size() != 0) {
if (index == 0) {
mFaceId0 = faceListAdapter.faces.get(0).faceId;
} else {
mFaceId1 = faceListAdapter.faces.get(0).faceId;
}
// Show the thumbnail of the default face.
ImageView imageView = (ImageView) findViewById(index == 0 ? R.id.image_0: R.id.image_1);
imageView.setImageBitmap(faceListAdapter.faceThumbnails.get(0));
}
// Show the list of detected face thumbnails.
ListView listView = (ListView) findViewById(
index == 0 ? R.id.list_faces_0: R.id.list_faces_1);
listView.setAdapter(faceListAdapter);
listView.setVisibility(View.VISIBLE);
// Set the face list adapters and bitmaps.
if (index == 0) {
mFaceListAdapter0 = faceListAdapter;
mBitmap0 = null;
} else {
mFaceListAdapter1 = faceListAdapter;
mBitmap1 = null;
}
}
if (result != null && result.length == 0) {
setInfo("No face detected!");
}
if ((index == 0 && mBitmap1 == null) || (index == 1 && mBitmap0 == null)) {
progressDialog.dismiss();
}
if (mFaceId0 != null && mFaceId1 != null) {
setVerifyButtonEnabledStatus(true);
}
}
// Start detecting in image specified by index.
private void detect(Bitmap bitmap, int index) {
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());
// Start a background task to detect faces in the image.
new DetectionTask(index).execute(inputStream);
setSelectImageButtonEnabledStatus(false, index);
// Set the status to show that detection starts.
setInfo("Detecting...");
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addVerificationLog(log);
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
int mIndex;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
// Initialize with detection result and index indicating on which image the result is got.
FaceListAdapter(Face[] detectionResult, int index) {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
mIndex = index;
if (detectionResult != null) {
faces = Arrays.asList(detectionResult);
for (Face face: faces) {
try {
// Crop face thumbnail without landmarks drawn.
faceThumbnails.add(ImageHelper.generateFaceThumbnail(
index == 0 ? mBitmap0: mBitmap1, face.faceRectangle));
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
setInfo(e.getMessage());
}
}
}
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_face, parent, false);
}
convertView.setId(position);
Bitmap thumbnailToShow = faceThumbnails.get(position);
if (mIndex == 0 && faces.get(position).faceId.equals(mFaceId0)) {
thumbnailToShow = ImageHelper.highlightSelectedFaceThumbnail(thumbnailToShow);
} else if (mIndex == 1 && faces.get(position).faceId.equals(mFaceId1)){
thumbnailToShow = ImageHelper.highlightSelectedFaceThumbnail(thumbnailToShow);
}
// Show the face thumbnail.
((ImageView)convertView.findViewById(R.id.image_face)).setImageBitmap(thumbnailToShow);
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,52 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.GridView;
public class EmbeddedGridView extends GridView {
public EmbeddedGridView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int newHeightMeasureSpec =
MeasureSpec.makeMeasureSpec(MEASURED_SIZE_MASK, MeasureSpec.AT_MOST);
super.onMeasure(widthMeasureSpec, newHeightMeasureSpec);
getLayoutParams().height = getMeasuredHeight();
}
}

Просмотреть файл

@ -0,0 +1,323 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import android.content.ContentResolver;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.media.ExifInterface;
import android.net.Uri;
import android.provider.MediaStore;
import com.microsoft.projectoxford.face.contract.Face;
import com.microsoft.projectoxford.face.contract.FaceRectangle;
import java.io.IOException;
import java.io.InputStream;
/**
* Defined several functions to load, draw, save, resize, and rotate images.
*/
public class ImageHelper {
// The maximum side length of the image to detect, to keep the size of image less than 4MB.
// Resize the image if its side length is larger than the maximum.
private static final int IMAGE_MAX_SIDE_LENGTH = 1280;
// Ratio to scale a detected face rectangle, the face rectangle scaled up looks more natural.
private static final double FACE_RECT_SCALE_RATIO = 1.3;
// Decode image from imageUri, and resize according to the expectedMaxImageSideLength
// If expectedMaxImageSideLength is
// (1) less than or equal to 0,
// (2) more than the actual max size length of the bitmap
// then return the original bitmap
// Else, return the scaled bitmap
public static Bitmap loadSizeLimitedBitmapFromUri(
Uri imageUri,
ContentResolver contentResolver) {
try {
// Load the image into InputStream.
InputStream imageInputStream = contentResolver.openInputStream(imageUri);
// For saving memory, only decode the image meta and get the side length.
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
Rect outPadding = new Rect();
BitmapFactory.decodeStream(imageInputStream, outPadding, options);
// Calculate shrink rate when loading the image into memory.
int maxSideLength =
options.outWidth > options.outHeight ? options.outWidth: options.outHeight;
options.inSampleSize = 1;
options.inSampleSize = calculateSampleSize(maxSideLength, IMAGE_MAX_SIDE_LENGTH);
options.inJustDecodeBounds = false;
if (imageInputStream != null) {
imageInputStream.close();
}
// Load the bitmap and resize it to the expected size length
imageInputStream = contentResolver.openInputStream(imageUri);
Bitmap bitmap = BitmapFactory.decodeStream(imageInputStream, outPadding, options);
maxSideLength = bitmap.getWidth() > bitmap.getHeight()
? bitmap.getWidth(): bitmap.getHeight();
double ratio = IMAGE_MAX_SIDE_LENGTH / (double) maxSideLength;
if (ratio < 1) {
bitmap = Bitmap.createScaledBitmap(
bitmap,
(int)(bitmap.getWidth() * ratio),
(int)(bitmap.getHeight() * ratio),
false);
}
return rotateBitmap(bitmap, getImageRotationAngle(imageUri, contentResolver));
} catch (Exception e) {
return null;
}
}
// Draw detected face rectangles in the original image. And return the image drawn.
// If drawLandmarks is set to be true, draw the five main landmarks of each face.
public static Bitmap drawFaceRectanglesOnBitmap(
Bitmap originalBitmap, Face[] faces, boolean drawLandmarks) {
Bitmap bitmap = originalBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.GREEN);
int stokeWidth = Math.max(originalBitmap.getWidth(), originalBitmap.getHeight()) / 100;
if (stokeWidth == 0) {
stokeWidth = 1;
}
paint.setStrokeWidth(stokeWidth);
if (faces != null) {
for (Face face : faces) {
FaceRectangle faceRectangle =
calculateFaceRectangle(bitmap, face.faceRectangle, FACE_RECT_SCALE_RATIO);
canvas.drawRect(
faceRectangle.left,
faceRectangle.top,
faceRectangle.left + faceRectangle.width,
faceRectangle.top + faceRectangle.height,
paint);
if (drawLandmarks) {
int radius = face.faceRectangle.width / 30;
if (radius == 0) {
radius = 1;
}
paint.setStyle(Paint.Style.FILL);
paint.setStrokeWidth(radius);
canvas.drawCircle(
(float) face.faceLandmarks.pupilLeft.x,
(float) face.faceLandmarks.pupilLeft.y,
radius,
paint);
canvas.drawCircle(
(float) face.faceLandmarks.pupilRight.x,
(float) face.faceLandmarks.pupilRight.y,
radius,
paint);
canvas.drawCircle(
(float) face.faceLandmarks.noseTip.x,
(float) face.faceLandmarks.noseTip.y,
radius,
paint);
canvas.drawCircle(
(float) face.faceLandmarks.mouthLeft.x,
(float) face.faceLandmarks.mouthLeft.y,
radius,
paint);
canvas.drawCircle(
(float) face.faceLandmarks.mouthRight.x,
(float) face.faceLandmarks.mouthRight.y,
radius,
paint);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(stokeWidth);
}
}
}
return bitmap;
}
// Highlight the selected face thumbnail in face list.
public static Bitmap highlightSelectedFaceThumbnail(Bitmap originalBitmap) {
Bitmap bitmap = originalBitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.parseColor("#3399FF"));
int stokeWidth = Math.max(originalBitmap.getWidth(), originalBitmap.getHeight()) / 10;
if (stokeWidth == 0) {
stokeWidth = 1;
}
bitmap.getWidth();
paint.setStrokeWidth(stokeWidth);
canvas.drawRect(
0,
0,
bitmap.getWidth(),
bitmap.getHeight(),
paint);
return bitmap;
}
// Crop the face thumbnail out from the original image.
// For better view for human, face rectangles are resized to the rate faceRectEnlargeRatio.
public static Bitmap generateFaceThumbnail(
Bitmap originalBitmap,
FaceRectangle faceRectangle) throws IOException {
FaceRectangle faceRect =
calculateFaceRectangle(originalBitmap, faceRectangle, FACE_RECT_SCALE_RATIO);
return Bitmap.createBitmap(
originalBitmap, faceRect.left, faceRect.top, faceRect.width, faceRect.height);
}
// Return the number of times for the image to shrink when loading it into memory.
// The SampleSize can only be a final value based on powers of 2.
private static int calculateSampleSize(int maxSideLength, int expectedMaxImageSideLength) {
int inSampleSize = 1;
while (maxSideLength > 2 * expectedMaxImageSideLength) {
maxSideLength /= 2;
inSampleSize *= 2;
}
return inSampleSize;
}
// Get the rotation angle of the image taken.
private static int getImageRotationAngle(
Uri imageUri, ContentResolver contentResolver) throws IOException {
int angle = 0;
Cursor cursor = contentResolver.query(imageUri,
new String[] { MediaStore.Images.ImageColumns.ORIENTATION }, null, null, null);
if (cursor != null) {
if (cursor.getCount() == 1) {
cursor.moveToFirst();
angle = cursor.getInt(0);
}
cursor.close();
} else {
ExifInterface exif = new ExifInterface(imageUri.getPath());
int orientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_270:
angle = 270;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
angle = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_90:
angle = 90;
break;
default:
break;
}
}
return angle;
}
// Rotate the original bitmap according to the given orientation angle
private static Bitmap rotateBitmap(Bitmap bitmap, int angle) {
// If the rotate angle is 0, then return the original image, else return the rotated image
if (angle != 0) {
Matrix matrix = new Matrix();
matrix.postRotate(angle);
return Bitmap.createBitmap(
bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
} else {
return bitmap;
}
}
// Resize face rectangle, for better view for human
// To make the rectangle larger, faceRectEnlargeRatio should be larger than 1, recommend 1.3
private static FaceRectangle calculateFaceRectangle(
Bitmap bitmap, FaceRectangle faceRectangle, double faceRectEnlargeRatio) {
// Get the resized side length of the face rectangle
double sideLength = faceRectangle.width * faceRectEnlargeRatio;
sideLength = Math.min(sideLength, bitmap.getWidth());
sideLength = Math.min(sideLength, bitmap.getHeight());
// Make the left edge to left more.
double left = faceRectangle.left
- faceRectangle.width * (faceRectEnlargeRatio - 1.0) * 0.5;
left = Math.max(left, 0.0);
left = Math.min(left, bitmap.getWidth() - sideLength);
// Make the top edge to top more.
double top = faceRectangle.top
- faceRectangle.height * (faceRectEnlargeRatio - 1.0) * 0.5;
top = Math.max(top, 0.0);
top = Math.min(top, bitmap.getHeight() - sideLength);
// Shift the top edge to top more, for better view for human
double shiftTop = faceRectEnlargeRatio - 1.0;
shiftTop = Math.max(shiftTop, 0.0);
shiftTop = Math.min(shiftTop, 1.0);
top -= 0.15 * shiftTop * faceRectangle.height;
top = Math.max(top, 0.0);
// Set the result.
FaceRectangle result = new FaceRectangle();
result.left = (int)left;
result.top = (int)top;
result.width = (int)sideLength;
result.height = (int)sideLength;
return result;
}
}

Просмотреть файл

@ -0,0 +1,142 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Locale;
/**
* Defined several functions to log service calls.
*/
public class LogHelper {
// Get all the detection log items.
public static List<String> getDetectionLog() {
return mDetectionLog;
}
// Add a new detection log item.
public static void addDetectionLog(String log) {
mDetectionLog.add(LogHelper.getLogHeader() + log);
}
// Clear all detection log items.
public static void clearDetectionLog() {
mDetectionLog.clear();
}
// Get all the verification log items.
public static List<String> getVerificationLog() {
return mVerificationLog;
}
// Add a new verification log item.
public static void addVerificationLog(String log) {
mVerificationLog.add(LogHelper.getLogHeader() + log);
}
// Clear all verification log items.
public static void clearVerificationLog() {
mVerificationLog.clear();
}
// Get all the grouping log items.
public static List<String> getGroupingLog() {
return mGroupingLog;
}
// Add a new grouping log item.
public static void addGroupingLog(String log) {
mGroupingLog.add(LogHelper.getLogHeader() + log);
}
// Clear all grouping log items.
public static void clearGroupingLog() {
mGroupingLog.clear();
}
// Get all the find similar face log items.
public static List<String> getFindSimilarFaceLog() {
return mFindSimilarFaceLog;
}
// Add a new find similar face log item.
public static void addFindSimilarFaceLog(String log) {
mFindSimilarFaceLog.add(LogHelper.getLogHeader() + log);
}
// Clear all find similar face log items.
public static void clearFindSimilarFaceLog() {
mFindSimilarFaceLog.clear();
}
// Get all the identification log items.
public static List<String> getIdentificationLog() {
return mIdentificationLog;
}
// Add a new identification log item.
public static void addIdentificationLog(String log) {
mIdentificationLog.add(LogHelper.getLogHeader() + log);
}
// Clear all identification log items.
public static void clearIdentificationLog() {
mIdentificationLog.clear();
}
// Detection log items.
private static List<String> mDetectionLog = new ArrayList<>();
// Verification log items.
private static List<String> mVerificationLog = new ArrayList<>();
// Grouping log items.
private static List<String> mGroupingLog = new ArrayList<>();
// Find Similar face log items.
private static List<String> mFindSimilarFaceLog = new ArrayList<>();
// Identification log items.
private static List<String> mIdentificationLog = new ArrayList<>();
// Get the current time and add to log.
private static String getLogHeader() {
DateFormat dateFormat = new SimpleDateFormat("HH:mm:ss", Locale.US);
return "[" + dateFormat.format(Calendar.getInstance().getTime()) + "] ";
}
}

Просмотреть файл

@ -0,0 +1,53 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import android.app.Application;
import com.microsoft.projectoxford.face.FaceServiceClient;
import com.microsoft.projectoxford.face.FaceServiceRestClient;
import com.microsoft.projectoxford.face.samples.R;
public class SampleApp extends Application {
@Override
public void onCreate() {
super.onCreate();
sFaceServiceClient = new FaceServiceRestClient(getString(R.string.subscription_key));
}
public static FaceServiceClient getFaceServiceClient() {
return sFaceServiceClient;
}
private static FaceServiceClient sFaceServiceClient;
}

Просмотреть файл

@ -0,0 +1,136 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.v7.app.ActionBarActivity;
import android.view.View;
import android.widget.TextView;
import com.microsoft.projectoxford.face.samples.R;
import java.io.File;
import java.io.IOException;
// The activity for the user to select a image and to detect faces in the image.
public class SelectImageActivity extends ActionBarActivity {
// Flag to indicate the request of the next task to be performed
private static final int REQUEST_TAKE_PHOTO = 0;
private static final int REQUEST_SELECT_IMAGE_IN_ALBUM = 1;
// The URI of photo taken with camera
private Uri mUriPhotoTaken;
// When the activity is created, set all the member variables to initial state.
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_select_image);
}
// Save the activity state when it's going to stop.
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable("ImageUri", mUriPhotoTaken);
}
// Recover the saved state when the activity is recreated.
@Override
protected void onRestoreInstanceState(@NonNull Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mUriPhotoTaken = savedInstanceState.getParcelable("ImageUri");
}
// Deal with the result of selection of the photos and faces.
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode)
{
case REQUEST_TAKE_PHOTO:
case REQUEST_SELECT_IMAGE_IN_ALBUM:
if (resultCode == RESULT_OK) {
Uri imageUri;
if (data == null || data.getData() == null) {
imageUri = mUriPhotoTaken;
} else {
imageUri = data.getData();
}
Intent intent = new Intent();
intent.setData(imageUri);
setResult(RESULT_OK, intent);
finish();
}
break;
default:
break;
}
}
// When the button of "Take a Photo with Camera" is pressed.
public void takePhoto(View view) {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if(intent.resolveActivity(getPackageManager()) != null) {
// Save the photo taken to a temporary file.
File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES);
try {
File file = File.createTempFile("IMG_", ".jpg", storageDir);
mUriPhotoTaken = Uri.fromFile(file);
intent.putExtra(MediaStore.EXTRA_OUTPUT, mUriPhotoTaken);
startActivityForResult(intent, REQUEST_TAKE_PHOTO);
} catch (IOException e) {
setInfo(e.getMessage());
}
}
}
// When the button of "Select a Photo in Album" is pressed.
public void selectImageInAlbum(View view) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("image/*");
if (intent.resolveActivity(getPackageManager()) != null) {
startActivityForResult(intent, REQUEST_SELECT_IMAGE_IN_ALBUM);
}
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
}

Просмотреть файл

@ -0,0 +1,205 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.helper;
import android.content.Context;
import android.content.SharedPreferences;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Defined several functions to manage local storage.
*/
public class StorageHelper {
public static Set<String> getAllPersonGroupIds(Context context) {
SharedPreferences personGroupIdSet =
context.getSharedPreferences("PersonGroupIdSet", Context.MODE_PRIVATE);
return personGroupIdSet.getStringSet("PersonGroupIdSet", new HashSet<String>());
}
public static String getPersonGroupName(String personGroupId, Context context) {
SharedPreferences personGroupIdNameMap =
context.getSharedPreferences("PersonGroupIdNameMap", Context.MODE_PRIVATE);
return personGroupIdNameMap.getString(personGroupId, "");
}
public static void setPersonGroupName(String personGroupIdToAdd, String personGroupName, Context context) {
SharedPreferences personGroupIdNameMap =
context.getSharedPreferences("PersonGroupIdNameMap", Context.MODE_PRIVATE);
SharedPreferences.Editor personGroupIdNameMapEditor = personGroupIdNameMap.edit();
personGroupIdNameMapEditor.putString(personGroupIdToAdd, personGroupName);
personGroupIdNameMapEditor.commit();
Set<String> personGroupIds = getAllPersonGroupIds(context);
Set<String> newPersonGroupIds = new HashSet<>();
for (String personGroupId: personGroupIds) {
newPersonGroupIds.add(personGroupId);
}
newPersonGroupIds.add(personGroupIdToAdd);
SharedPreferences personGroupIdSet =
context.getSharedPreferences("PersonGroupIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor personGroupIdSetEditor = personGroupIdSet.edit();
personGroupIdSetEditor.putStringSet("PersonGroupIdSet", newPersonGroupIds);
personGroupIdSetEditor.commit();
}
public static void deletePersonGroups(List<String> personGroupIdsToDelete, Context context) {
SharedPreferences personGroupIdNameMap =
context.getSharedPreferences("PersonGroupIdNameMap", Context.MODE_PRIVATE);
SharedPreferences.Editor personGroupIdNameMapEditor = personGroupIdNameMap.edit();
for (String personGroupId: personGroupIdsToDelete) {
personGroupIdNameMapEditor.remove(personGroupId);
}
personGroupIdNameMapEditor.commit();
Set<String> personGroupIds = getAllPersonGroupIds(context);
Set<String> newPersonGroupIds = new HashSet<>();
for (String personGroupId: personGroupIds) {
if (!personGroupIdsToDelete.contains(personGroupId)) {
newPersonGroupIds.add(personGroupId);
}
}
SharedPreferences personGroupIdSet =
context.getSharedPreferences("PersonGroupIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor personGroupIdSetEditor = personGroupIdSet.edit();
personGroupIdSetEditor.putStringSet("PersonGroupIdSet", newPersonGroupIds);
personGroupIdSetEditor.commit();
}
public static Set<String> getAllPersonIds(String personGroupId, Context context) {
SharedPreferences personIdSet =
context.getSharedPreferences(personGroupId + "PersonIdSet", Context.MODE_PRIVATE);
return personIdSet.getStringSet("PersonIdSet", new HashSet<String>());
}
public static String getPersonName(String personId, String personGroupId, Context context) {
SharedPreferences personIdNameMap =
context.getSharedPreferences(personGroupId + "PersonIdNameMap", Context.MODE_PRIVATE);
return personIdNameMap.getString(personId, "");
}
public static void setPersonName(String personIdToAdd, String personName, String personGroupId, Context context) {
SharedPreferences personIdNameMap =
context.getSharedPreferences(personGroupId + "PersonIdNameMap", Context.MODE_PRIVATE);
SharedPreferences.Editor personIdNameMapEditor = personIdNameMap.edit();
personIdNameMapEditor.putString(personIdToAdd, personName);
personIdNameMapEditor.commit();
Set<String> personIds = getAllPersonIds(personGroupId, context);
Set<String> newPersonIds = new HashSet<>();
for (String personId: personIds) {
newPersonIds.add(personId);
}
newPersonIds.add(personIdToAdd);
SharedPreferences personIdSet =
context.getSharedPreferences(personGroupId + "PersonIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor personIdSetEditor = personIdSet.edit();
personIdSetEditor.putStringSet("PersonIdSet", newPersonIds);
personIdSetEditor.commit();
}
public static void deletePersons(List<String> personIdsToDelete, String personGroupId, Context context) {
SharedPreferences personIdNameMap =
context.getSharedPreferences(personGroupId + "PersonIdNameMap", Context.MODE_PRIVATE);
SharedPreferences.Editor personIdNameMapEditor = personIdNameMap.edit();
for (String personId: personIdsToDelete) {
personIdNameMapEditor.remove(personId);
}
personIdNameMapEditor.commit();
Set<String> personIds = getAllPersonIds(personGroupId, context);
Set<String> newPersonIds = new HashSet<>();
for (String personId: personIds) {
if (!personIdsToDelete.contains(personId)) {
newPersonIds.add(personId);
}
}
SharedPreferences personIdSet =
context.getSharedPreferences(personGroupId + "PersonIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor personIdSetEditor = personIdSet.edit();
personIdSetEditor.putStringSet("PersonIdSet", newPersonIds);
personIdSetEditor.commit();
}
public static Set<String> getAllFaceIds(String personId, Context context) {
SharedPreferences faceIdSet =
context.getSharedPreferences(personId + "FaceIdSet", Context.MODE_PRIVATE);
return faceIdSet.getStringSet("FaceIdSet", new HashSet<String>());
}
public static String getFaceUri(String faceId, Context context) {
SharedPreferences faceIdUriMap =
context.getSharedPreferences("FaceIdUriMap", Context.MODE_PRIVATE);
return faceIdUriMap.getString(faceId, "");
}
public static void setFaceUri(String faceIdToAdd, String faceUri, String personId, Context context) {
SharedPreferences faceIdUriMap =
context.getSharedPreferences("FaceIdUriMap", Context.MODE_PRIVATE);
SharedPreferences.Editor faceIdUriMapEditor = faceIdUriMap.edit();
faceIdUriMapEditor.putString(faceIdToAdd, faceUri);
faceIdUriMapEditor.commit();
Set<String> faceIds = getAllFaceIds(personId, context);
Set<String> newFaceIds = new HashSet<>();
for (String faceId: faceIds) {
newFaceIds.add(faceId);
}
newFaceIds.add(faceIdToAdd);
SharedPreferences faceIdSet =
context.getSharedPreferences(personId + "FaceIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor faceIdSetEditor = faceIdSet.edit();
faceIdSetEditor.putStringSet("FaceIdSet", newFaceIds);
faceIdSetEditor.commit();
}
public static void deleteFaces(List<String> faceIdsToDelete, String personId, Context context) {
Set<String> faceIds = getAllFaceIds(personId, context);
Set<String> newFaceIds = new HashSet<>();
for (String faceId: faceIds) {
if (!faceIdsToDelete.contains(faceId)) {
newFaceIds.add(faceId);
}
}
SharedPreferences faceIdSet =
context.getSharedPreferences(personId + "FaceIdSet", Context.MODE_PRIVATE);
SharedPreferences.Editor faceIdSetEditor = faceIdSet.edit();
faceIdSetEditor.putStringSet("FaceIdSet", newFaceIds);
faceIdSetEditor.commit();
}
}

Просмотреть файл

@ -0,0 +1,105 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.log;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.samples.R;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import java.util.List;
public class DetectionLogActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detection_log);
LogAdapter logAdapter = new LogAdapter();
ListView listView = (ListView) findViewById(R.id.log);
listView.setAdapter(logAdapter);
}
// The adapter of the ListView which contains the detection log.
private class LogAdapter extends BaseAdapter {
// The detection log.
List<String> log;
LogAdapter() {
log = LogHelper.getDetectionLog();
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return log.size();
}
@Override
public Object getItem(int position) {
return log.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_log, parent, false);
}
convertView.setId(position);
((TextView)convertView.findViewById(R.id.log)).setText(log.get(position));
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,105 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.log;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.samples.R;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import java.util.List;
public class FindSimilarFaceLogActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_find_similar_face_log);
LogAdapter logAdapter = new LogAdapter();
ListView listView = (ListView) findViewById(R.id.log);
listView.setAdapter(logAdapter);
}
// The adapter of the ListView which contains the find similar face log.
private class LogAdapter extends BaseAdapter {
// The find similar face log.
List<String> log;
LogAdapter() {
log = LogHelper.getFindSimilarFaceLog();
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return log.size();
}
@Override
public Object getItem(int position) {
return log.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_log, parent, false);
}
convertView.setId(position);
((TextView)convertView.findViewById(R.id.log)).setText(log.get(position));
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,104 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.log;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.samples.R;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import java.util.List;
public class GroupingLogActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_grouping_log);
LogAdapter logAdapter = new LogAdapter();
ListView listView = (ListView) findViewById(R.id.log);
listView.setAdapter(logAdapter);
}
// The adapter of the ListView which contains the grouping log.
private class LogAdapter extends BaseAdapter {
// The grouping log.
List<String> log;
LogAdapter() {
log = LogHelper.getGroupingLog();
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return log.size();
}
@Override
public Object getItem(int position) {
return log.get(position);
}
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_log, parent, false);
}
convertView.setId(position);
((TextView)convertView.findViewById(R.id.log)).setText(log.get(position));
return convertView;
}
}
}

Просмотреть файл

@ -0,0 +1,105 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Cognitive Services (formerly Project Oxford): https://www.microsoft.com/cognitive-services
//
// Microsoft Cognitive Services (formerly Project Oxford) GitHub:
// https://github.com/Microsoft/ProjectOxford-ClientSDK
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.projectoxford.face.samples.log;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.microsoft.projectoxford.face.samples.R;
import com.microsoft.projectoxford.face.samples.helper.LogHelper;
import java.util.List;
public class IdentificationLogActivity extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_identification_log);
LogAdapter logAdapter = new LogAdapter();
ListView listView = (ListView) findViewById(R.id.log);
listView.setAdapter(logAdapter);
}
// The adapter of the ListView which contains the identification log.
private class LogAdapter extends BaseAdapter {
// The identification log.
List<String> log;
LogAdapter() {
log = LogHelper.getIdentificationLog();
}
@Override
public boolean isEnabled(int position) {
return false;
}
@Override
public int getCount() {
return log.size();
}
@Override
public Object getItem(int position) {
return log.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater layoutInflater =
(LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = layoutInflater.inflate(R.layout.item_log, parent, false);
}
convertView.setId(position);
((TextView)convertView.findViewById(R.id.log)).setText(log.get(position));
return convertView;
}
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше