merge from master
После Ширина: | Высота: | Размер: 4.0 KiB |
После Ширина: | Высота: | Размер: 3.2 KiB |
После Ширина: | Высота: | Размер: 8.3 KiB |
После Ширина: | Высота: | Размер: 3.4 KiB |
После Ширина: | Высота: | Размер: 2.5 KiB |
После Ширина: | Высота: | Размер: 71 KiB |
После Ширина: | Высота: | Размер: 77 KiB |
После Ширина: | Высота: | Размер: 19 KiB |
После Ширина: | Высота: | Размер: 55 KiB |
После Ширина: | Высота: | Размер: 36 KiB |
После Ширина: | Высота: | Размер: 66 KiB |
После Ширина: | Высота: | Размер: 14 KiB |
После Ширина: | Высота: | Размер: 10 KiB |
После Ширина: | Высота: | Размер: 45 KiB |
После Ширина: | Высота: | Размер: 74 KiB |
После Ширина: | Высота: | Размер: 35 KiB |
После Ширина: | Высота: | Размер: 43 KiB |
После Ширина: | Высота: | Размер: 61 KiB |
После Ширина: | Высота: | Размер: 48 KiB |
После Ширина: | Высота: | Размер: 832 KiB |
После Ширина: | Высота: | Размер: 638 KiB |
После Ширина: | Высота: | Размер: 890 KiB |
После Ширина: | Высота: | Размер: 20 KiB |
После Ширина: | Высота: | Размер: 51 KiB |
После Ширина: | Высота: | Размер: 48 KiB |
После Ширина: | Высота: | Размер: 42 KiB |
После Ширина: | Высота: | Размер: 44 KiB |
После Ширина: | Высота: | Размер: 344 KiB |
После Ширина: | Высота: | Размер: 18 KiB |
После Ширина: | Высота: | Размер: 13 KiB |
После Ширина: | Высота: | Размер: 54 KiB |
После Ширина: | Высота: | Размер: 41 KiB |
После Ширина: | Высота: | Размер: 50 KiB |
После Ширина: | Высота: | Размер: 52 KiB |
После Ширина: | Высота: | Размер: 17 KiB |
После Ширина: | Высота: | Размер: 48 KiB |
После Ширина: | Высота: | Размер: 31 KiB |
После Ширина: | Высота: | Размер: 34 KiB |
После Ширина: | Высота: | Размер: 14 KiB |
После Ширина: | Высота: | Размер: 40 KiB |
После Ширина: | Высота: | Размер: 32 KiB |
После Ширина: | Высота: | Размер: 34 KiB |
После Ширина: | Высота: | Размер: 9.0 KiB |
После Ширина: | Высота: | Размер: 29 KiB |
После Ширина: | Высота: | Размер: 34 KiB |
После Ширина: | Высота: | Размер: 32 KiB |
После Ширина: | Высота: | Размер: 25 KiB |
После Ширина: | Высота: | Размер: 54 KiB |
После Ширина: | Высота: | Размер: 27 KiB |
После Ширина: | Высота: | Размер: 26 KiB |
После Ширина: | Высота: | Размер: 178 KiB |
После Ширина: | Высота: | Размер: 36 KiB |
После Ширина: | Высота: | Размер: 18 KiB |
После Ширина: | Высота: | Размер: 373 KiB |
После Ширина: | Высота: | Размер: 27 KiB |
После Ширина: | Высота: | Размер: 5.5 KiB |
После Ширина: | Высота: | Размер: 5.7 KiB |
После Ширина: | Высота: | Размер: 4.1 KiB |
После Ширина: | Высота: | Размер: 2.4 KiB |
После Ширина: | Высота: | Размер: 7.5 KiB |
После Ширина: | Высота: | Размер: 7.6 MiB |
После Ширина: | Высота: | Размер: 6.3 MiB |
|
@ -0,0 +1,225 @@
|
|||
# Developing Intelligent Apps with Azure
|
||||
|
||||
## Key Takeaway
|
||||
|
||||
- With the flexible Azure platform and a wide portfolio of AI productivity tools, developers can build the next generation of smart applications where their data lives, in the intelligent cloud, on-premises, and on the intelligent edge.
|
||||
|
||||
- Teams can achieve more with the comprehensive set of flexible and trusted AI services - from pre-built APIs, such as Cognitive Services and Conversational AI with Bot tools, to building custom models with Azure Machine Learning for any scenario.
|
||||
|
||||
# Before you begin
|
||||
|
||||
1. **Visual Studio 2017 or 2019 Preview 1** with Xamarin workloads installed.
|
||||
|
||||
1. **Android Emulator** with Hyper-V Windows Feature installed as per this [document](https://docs.microsoft.com/en-us/xamarin/android/get-started/installation/android-emulator/hardware-acceleration?tabs=vswin&pivots=windows).
|
||||
|
||||
1. **Android SDK 28** installed (Android Pie 9).
|
||||
|
||||
1. Active Azure subscription.
|
||||
|
||||
1. Working on local machine is preferred in order to use camera through emulator. However, if you are working on a VM, the emulator will not be able to access your camera. Follow [these steps](https://stackoverflow.com/questions/50698027/how-can-i-use-my-webcam-inside-an-azure-windows-server-virtual-machine) to enable local camera access for your VM.
|
||||
|
||||
1. Install [Azure CLI](https://azurecliprod.blob.core.windows.net/msi/azure-cli-2.0.45.msi) version 2.0.45.
|
||||
|
||||
## Walkthrough: Configure AIVisualProvision App
|
||||
|
||||
AI Visual Provision mobile app uses Azure Cognitive Services (Computer Vision and Custom Vision) to deploy whiteboard drawings to an Azure architecture. It uses Cognitive Services to detect the Azure services among Azure Service logos and handwriting using the phone camera. The captured image is analyzed and the identified services are deployed to Azure taking away all the pain and complexity from the process. You can see how to create Azure Cognitive Services and configure them in this mobile app.
|
||||
|
||||
1. Open your Visual Studio 2019 Preview in Administrator mode and click on **Clone or checkout code**.
|
||||
|
||||
![](Images/LandingPage_VS2019_1.png)
|
||||
|
||||
1. Copy and paste the URL: https://github.com/Microsoft/AIVisualProvision in **Code repository location** textbox and click **Clone** button.
|
||||
|
||||
![](Images/CloningRepo_2.png)
|
||||
|
||||
1. While the cloning is still in progress, the code is already available for use. It has brought up the folder view. Before loading the solution in Visual Studio 2019 Preview, navigate to ***AIVisualProvision/Source/VisualProvision.iOS/*** and **delete** the folder ***Assets.xcassets*** as per workaround mentioned [here](https://developercommunity.visualstudio.com/content/problem/398522/vs-2019-preview-xamarin-load-fails.html) (which is supposed to be fixed in coming update). Double click on the solution ***VisualProvision.sln*** to load it in Solution Explorer.
|
||||
|
||||
![](Images/FolderView_3.png)
|
||||
|
||||
![](Images/DeleteAssets.xcassets_4.png)
|
||||
|
||||
1. Now you can see that 4 projects are successfully loaded under the solution.
|
||||
|
||||
![](Images/LoadSolution_5.png)
|
||||
|
||||
1. Navigate to **AppSettings.cs** file under **VisualProvision** project. You need to provide the **Client ID** and **tenantId** if you are running the app in Emulator in the Debug mode so that you don't have to enter it all the time you debug. Also, the **CustomVisionPredictionUrl**, **CustomVisionPredictionKey**, **ComputerVisionEndpoint** and **ComputerVisionKey** has to be entered.
|
||||
|
||||
![](Images/AppSettingsFile_6.png)
|
||||
|
||||
1. In order to get the **Client ID** and **tenantId**, type **az login** in the command prompt and press Enter. Authorize your login in the browser.
|
||||
|
||||
Type **az ad sp create-for-rbac -n “MySampleApp” -p P2SSWORD** in the command prompt to get the Service Principal Client ID and the Service Principal Client Secret.
|
||||
|
||||
Copy and note down the **appId** which is the **Client ID**
|
||||
|
||||
Copy and note down the **tenantId**
|
||||
|
||||
**P2SSWORD** is the Client Secret or **Password**, which will be required to run the app.
|
||||
|
||||
![](Images/SPNDetails_7.png)
|
||||
|
||||
1. Switch to Visual Studio 2019 and paste the **Client ID** and **TenantId** in the **AppSettings.cs** file.
|
||||
|
||||
![](Images/PasteSPNDetails_8.png)
|
||||
|
||||
1. You need a Computer Vision service in order to use the handwriting recognition features of the app. To create your own Computer Vision instance navigate to this [page](https://azure.microsoft.com/en-us/try/cognitive-services). Click **Get API Key** button under Computer Vision.
|
||||
|
||||
![](Images/ComputerVision1_9.png)
|
||||
|
||||
You can refer to this [link](https://docs.microsoft.com/en-us/azure/cognitive-services/computer-vision/vision-api-how-to-topics/howtosubscribe) for more details on Computer Vision service.
|
||||
|
||||
1. Click **Sign In** and login to your Azure account.
|
||||
|
||||
![](Images/ComputerVision2_10.png)
|
||||
|
||||
1. In the Azure portal, you will be prompted to create a new **Computer Vision** service. Provide **Name**, **Subscription**, **Location**, **Pricing tier** and **Resource group**. Click **Create**.
|
||||
|
||||
![](Images/CreateComputerVision_11.png)
|
||||
|
||||
1. Once you have provisioned the service, note down the service endpoint **URL** and endpoint **Key** as per the Images below:
|
||||
|
||||
![](Images/ComputerVision3_12.png)
|
||||
|
||||
![](Images/ComputerVision4_13.png)
|
||||
|
||||
1. Switch to Visual Studio 2019 and paste the copied **URL** under **ComputerVisionEndpoint** and **Key** under **ComputerVisionKey** in the **AppSettings.cs** file.
|
||||
|
||||
![](Images/PasteComputerVision_14.png)
|
||||
|
||||
1. If you wish to use only the handwriting recognition service in your app, skip to the next section to Build and Run the app. But, if you wish to add in-app logo recognition to your app, continue with next step.
|
||||
|
||||
1. The in-app logo recognition is accomplished by using Azure Custom Vision. In order to use the service in the app, you need to create a new Custom Vision project and train it with the Images provided in the repo under the **documents/training_dataset** folder. Go to [Azure Custom Vision](https://azure.microsoft.com/en-us/services/cognitive-services/custom-vision-service/) page and click **Get started**, and **Sign In** using your Azure credentials.
|
||||
|
||||
![](Images/CustomVision1_15.png)
|
||||
|
||||
![](Images/CustomVision2_16.png)
|
||||
|
||||
1. Agree to terms by clicking **Yes** and **I agree** respectively.
|
||||
|
||||
![](Images/CustomVision3_17.png)
|
||||
|
||||
![](Images/CustomVision4_18.png)
|
||||
|
||||
1. You will be taken to the landing page of your Azure Custom Vision account. Click **New Project** and provide a **Name**. Leave rest options as it is and click **Create project**.
|
||||
|
||||
![](Images/CustomVision5_19.png)
|
||||
|
||||
1. Click on **Add Images**. Upload an image of **Azure App Service** logo and tag it as **APP_SERVICE**. If you don't have the magnets or Images handy, refer to included 2 PDFs ([sheet1](https://github.com/Microsoft/AIVisualProvision/blob/master/Documents/AzureMagnets1.pdf), [sheet2](https://github.com/Microsoft/AIVisualProvision/blob/master/Documents/AzureMagnets2.pdf)) with all the magnet logos. Make sure you cut only the App Service image from the pdf. Select **Done** once the Images have been uploaded.
|
||||
|
||||
![](Images/CustomVision6_20.png)
|
||||
|
||||
![](Images/CustomVision7_21.png)
|
||||
|
||||
![](Images/CustomVision8_22.png)
|
||||
|
||||
1. In order to **Train** you need to have at least 2 tags and 5 Images for every tag. Return to the previous step of this section and repeat the step to add at least 5 Images for **APP_SERVICE** and **SQL_DATABASE** tags. To train the classifier, select the **Train** button.
|
||||
|
||||
![](Images/CustomVision9_23.png)
|
||||
|
||||
You should use the following set of tags, as they are the expected tags in the application. Tags are located at **VisualProvision\Services\Recognition\RecognitionService.cs** file.
|
||||
|
||||
![](Images/CustomVision10_24.png)
|
||||
|
||||
1. The classifier uses all of the current Images to create a model that identifies the visual qualities of each tag.
|
||||
|
||||
![](Images/CustomVision11_25.png)
|
||||
|
||||
1. Optionally, to verify the accuracy click on **Quick Test** and provide any similar **Image URL** of either App Service or SQL Database.
|
||||
|
||||
![](Images/CustomVision12_26.png)
|
||||
|
||||
1. Under **Performance** tab, click on **Prediction URL** and note down the service endpoint **URL** and endpoint **Key** as per the Images below:
|
||||
|
||||
![](Images/CustomVision13_27.png)
|
||||
|
||||
1. Switch to Visual Studio 2019 and paste the copied **URL** under **CustomVisionPredictionUrl** appended by **/image**, and **Key** under **CustomVisionPredictionKey** in the **AppSettings.cs** file. Click **Save**.
|
||||
|
||||
![](Images/CustomVision14_28.png)
|
||||
|
||||
|
||||
## Walkthrough: Build & Run AIVisualProvision App
|
||||
|
||||
1. Right click on the solution and select **Rebuild Solution**.
|
||||
|
||||
![](Images/BuildSolution.png)
|
||||
|
||||
1. On a whiteboard, design a simple web app architecture consisting of below resources:
|
||||
- App service to host the app
|
||||
- SQL database for the data
|
||||
- Key Vault to store certificates and sensitive data
|
||||
|
||||
1. Go to **Tools > Android > Android Device Manager**. Right click on your Android emulator and select **Edit**.
|
||||
|
||||
![](Images/Run_AllowCamera1.png)
|
||||
|
||||
1. Under Property **hw.camera.back** and **hw.camera.front**, select Value **webcam0** so that computer/laptop camera can be accessed by the emulator. Click **Save** and close the window.
|
||||
|
||||
![](Images/Run_AllowCamera2.png)
|
||||
|
||||
1. In the **Android Device Manager**, click **Start**. Android Emulator will show up.
|
||||
|
||||
![](Images/Run_ClickStartEmulator.png)
|
||||
|
||||
![](Images/Run_StartEmulator.png)
|
||||
|
||||
1. Click **Run** to deploy your app to **Android Emulator** (launches in Debug mode). Wait for a while until AI Visual Provision app shows up on the emulator.
|
||||
|
||||
![](Images/Run_Emulator.png)
|
||||
|
||||
1. Optionally, if you are having trouble with emulator you can navigate to ***AIVisualProvision\Source\VisualProvision.Android\bin\Debug*** folder and install **com.microsoft.aiprovision-Signed.apk** on your Android mobile device and continue with below steps.
|
||||
|
||||
![](Images/CopySignedAPK.png)
|
||||
|
||||
1. Click **Allow** in order to access camera and photos.
|
||||
|
||||
![](Images/Run_AllowCameraAccess1.png)
|
||||
|
||||
![](Images/Run_AllowCameraAccess2.png)
|
||||
|
||||
Note: If you get the error: "Guest isn't online after 7 seconds, retrying..", open the **Xamarin Android SDK Manager** in Visual Studio by going to Tools > Android > SDK Manager. Update the Android SDK Tools and Android Emulator to the latest Version.
|
||||
|
||||
1. Enter the password as **P2ssword** which was noted down earlier. Client ID and Tenant ID will be auto-populated. Click **Login**.
|
||||
|
||||
![](Images/Run_EnterPassword.png)
|
||||
|
||||
![](Images/Run_Login.png)
|
||||
|
||||
1. Select your **Azure Subscription** and click **Continue**.
|
||||
|
||||
![](Images/Run_SelectSubscription.png)
|
||||
|
||||
![](Images/Run_SelectSubscription2.png)
|
||||
|
||||
1. Take a picture of whiteboard which has your architecture diagram consisting of Azure Service logos or handwritten **App Service** and **SQL Database**. Whereas the **Key Vault** is mentioned in handwriting. For example, refer below architecture.
|
||||
|
||||
![](Images/Whiteboard.png)
|
||||
|
||||
1. You can see that Azure resources have been identified by the app. Click **Next**.
|
||||
|
||||
![](Images/Run_IdentifyResources.png)
|
||||
|
||||
1. Select **Region** and **Resource group**. Click **Deploy**.
|
||||
|
||||
![](Images/Run_Deploy.png)
|
||||
|
||||
1. You can see the progress of your deployment right from your emulator or mobile device.
|
||||
|
||||
![](Images/Run_DeployInProgress.png)
|
||||
|
||||
![](Images/Run_DeployInProgress2.png)
|
||||
|
||||
![](Images/Run_SuccessfulDeployment.png)
|
||||
|
||||
1. Switch to your **Azure portal** and navigate to the newly created resource group to see the Azure resources created through your emulator or mobile device.
|
||||
|
||||
![](Images/Run_PostDeploymentRG.png)
|
||||
|
||||
1. For some reason if you are unable to complete this lab, you can download the mobile apps from the App Center through the following links to try it out:
|
||||
|
||||
- [AI Visual Provision iOS App](https://install.appcenter.ms/orgs/appcenterdemos/apps/aivisualprovisionios/distribution_groups/public)
|
||||
|
||||
- [AI Visual Provision Android App](https://install.appcenter.ms/orgs/appcenterdemos/apps/aivisualprovisionandroid/distribution_groups/public)
|
||||
|
||||
## Summary
|
||||
|
||||
Today you have more power at your fingertips than entire generations that came before you. Powerful mobile applications can be built using the power of Azure, AI and .NET
|
49
README.md
|
@ -4,6 +4,13 @@
|
|||
|
||||
As part of Connect(); 2018 we released the AI Vision Provision. This sample app leverages the power of Azure Cognitive Services (Computer Vision and Custom Vision) to analyze Azure Service logos and handwriting in order to identify potential services and then deploy them to Azure all from the comfort of your phone.
|
||||
|
||||
[![Scott Hanselman's Keynote](http://img.youtube.com/vi/5_iE7azx7Vo/0.jpg)](https://youtu.be/5_iE7azx7Vo?t=2154)
|
||||
|
||||
You can also download the mobile apps following these links:
|
||||
|
||||
* [![Build status](https://build.appcenter.ms/v0.1/apps/8ee242da-48ab-47f7-bb0f-b71e150189d4/branches/master/badge)](https://appcenter.ms) [AI Visual Provision iOS App](https://aka.ms/aivisualprovisionios)
|
||||
* [![Build status](https://build.appcenter.ms/v0.1/apps/9ef36c04-4657-41f5-bd29-9988da4bf84b/branches/master/badge)](https://appcenter.ms) [AI Visual Provision Android App](https://aka.ms/aivisualprovisionandroid)
|
||||
|
||||
The following services are currently supported:
|
||||
* Azure Functions
|
||||
* Azure Key Vault (handwriting only)
|
||||
|
@ -11,22 +18,9 @@ The following services are currently supported:
|
|||
* Azure Storage Account
|
||||
* Azure Web Apps
|
||||
|
||||
## Xamarin.Forms App (AI Visual Provision)
|
||||
## Demo Script and Blog Posts
|
||||
|
||||
Xamarin.Forms enables you to build native UIs for iOS, Android, macOS, Linux, and Windows from a single, shared codebase. You can dive into app development with Xamarin.Forms by following our free self-guided learning from Xamarin University. This project exercises the following patterns and features:
|
||||
|
||||
* Xamarin.Forms
|
||||
* XAML UI
|
||||
* Converters
|
||||
* Custom Controls
|
||||
* Custom Renderers
|
||||
* Data Binding
|
||||
* Effects
|
||||
* IoC
|
||||
* MVVM
|
||||
* Plugins for Xamarin
|
||||
* Styles
|
||||
* .NET Standard
|
||||
Check out the demo script **[Developing Intelligent Apps with Azure](Documents/DemoScript)** and a blog post **[Tutorial: Recognize Azure service logos in camera pictures](https://docs.microsoft.com/en-us/azure/cognitive-services/Custom-Vision-Service/logo-detector-mobile)** about this sample mobile application.
|
||||
|
||||
## Screens
|
||||
|
||||
|
@ -93,9 +87,11 @@ Finally, open the `MagnetsMobileClient\VisualProvision\AppSettings.cs` file and
|
|||
|
||||
### Azure Custom Vision - training the model
|
||||
|
||||
The in-app logo recognition is accomplished using [Azure Custom Vision](https://azure.microsoft.com/en-us/services/cognitive-services/custom-vision-service/). In order to use the service in the app, you need to create a new Custom Vision project and train it with the images provided in the repo under the **documents/training_dataset** folder.
|
||||
If you have our Azure magnet sheets then this is the best and fastest way to train your Custom Vision project. . But this is a one-off requirement and you wont need
|
||||
to do it again unless you want to add a new logo. If you don't have the magnets then you can use any other image that contains the actual Azure service logos to train your model.
|
||||
The in-app logo recognition is accomplished by using [Azure Custom Vision](https://azure.microsoft.com/en-us/services/cognitive-services/custom-vision-service/). In order to use the service in the app, you need to create a new Custom Vision project and train it with the images provided in the repo under the **documents/training_dataset** folder. If you've never used the Custom Vision service before, then you can follow this tutorial [here](https://docs.microsoft.com/en-us/azure/cognitive-services/custom-vision-service/getting-started-build-a-classifier) and there's a great [Channel9 video](https://channel9.msdn.com/Shows/AI-Show/Azure-Custom-Vision-How-to-Train-and-Identify-Unique-Designs-or-Image-Content) that shows you how to do it end-to-end.
|
||||
|
||||
If you have the Azure magnet sheets then this is the best and fastest way to train your Custom Vision project. If you don't have the magnets handy, we've included 2 PDFs ([sheet1](https://github.com/Microsoft/AIVisualProvision/blob/master/Documents/AzureMagnets1.pdf), [sheet2](https://github.com/Microsoft/AIVisualProvision/blob/master/Documents/AzureMagnets2.pdf)) with all our magnet logos in the **documents** section of this repo.
|
||||
|
||||
Note that the Custom Vision project creation and model training are a one-off requirement and you wont need to do this work again unless you want to add a new logo. New images can be added to improve the models accuracy and this is the only case where retraining will be required.
|
||||
|
||||
> IMPORTANT! To further increase the prediction accuracy you may want to train the model using the same or as similar as possible conditions as your expected working environment.
|
||||
|
||||
|
@ -125,6 +121,23 @@ Once you've trained your model, you need to retrieve the API keys. The image bel
|
|||
|
||||
Use the settings highlighted in the image to populate the `CustomVisionPredictionUrl` and `CustomVisionPredictionKey` variables in the `MagnetsMobileClient\VisualProvision\AppSettings.cs` file.
|
||||
|
||||
## Xamarin.Forms App (AI Visual Provision)
|
||||
|
||||
Xamarin.Forms enables you to build native UIs for iOS, Android, macOS, Linux, and Windows from a single, shared codebase. You can dive into app development with Xamarin.Forms by following our free self-guided learning from Xamarin University. This project exercises the following patterns and features:
|
||||
|
||||
* Xamarin.Forms
|
||||
* XAML UI
|
||||
* Converters
|
||||
* Custom Controls
|
||||
* Custom Renderers
|
||||
* Data Binding
|
||||
* Effects
|
||||
* IoC
|
||||
* MVVM
|
||||
* Plugins for Xamarin
|
||||
* Styles
|
||||
* .NET Standard
|
||||
|
||||
### App Center
|
||||
|
||||
You can optionally add use App Center to build, test and distribute the sample app to your phone.
|
||||
|
|
|
@ -9,10 +9,9 @@
|
|||
# Environment variables :
|
||||
#
|
||||
# - APPCENTER_TOKEN. You need an AppCenter API token. Instructions on how to get it in https://docs.microsoft.com/en-us/appcenter/api-docs/
|
||||
# - XAMARIN_UITEST_VERSION. Version of the Xamarin.UITest NuGet package the project is using. Defaults to 2.2.7
|
||||
# - APPCENTER_PROJECT_NAME. URL of App Center project. For example: AppCenterDemos/AIVisualProvisionAndroid
|
||||
# - DEVICES. ID or IDs of devices or device sets previously created in AppCenter. Defaults to "Pixel 2" (7c5a701f)
|
||||
# - CUSTOM_LOCALE. Locale. Defaults to "en_US"
|
||||
# - CUSTOM_TEST_SERIES. Name of test series. Defaults to "connect18"
|
||||
# - ENABLE_UITESTS. Set to true if you want to run UI Tests
|
||||
#
|
||||
# NOTE: UI_TEST_TOOLS_DIR is where "test-cloud.exe" is. By default in AppCenter is /Users/vsts/.nuget/packages/xamarin.uitest/<xamarin uitest version>/tools
|
||||
|
||||
|
@ -21,41 +20,18 @@ if [ -z "$ENABLE_UITESTS" ]; then
|
|||
exit 0
|
||||
fi
|
||||
|
||||
UITEST_PROJECT_PATH="$APPCENTER_SOURCE_DIRECTORY/MagnetsMobileClient/VisualProvision.UITest"
|
||||
UITEST_PROJECT_PATH="$APPCENTER_SOURCE_DIRECTORY/Source/VisualProvision.UITest"
|
||||
UITEST_CSPROJ_NAME="VisualProvision.UITest.csproj"
|
||||
APPCENTER_PROJECT_NAME="ImageDeploy/Android"
|
||||
APK_PATH="$APPCENTER_OUTPUT_DIRECTORY/com.microsoft.aiprovision.apk"
|
||||
|
||||
DEFAULT_DEVICES="7c5a701f"
|
||||
DEFAULT_XAMARIN_UITEST_VERSION="2.2.7"
|
||||
DEFAULT_UI_TEST_TOOLS_DIR__PART_1="/Users/vsts/.nuget/packages/xamarin.uitest/"
|
||||
DEFAULT_UI_TEST_TOOLS_DIR__PART_2="/tools"
|
||||
DEFAULT_LOCALE="en_US"
|
||||
DEFAULT_TEST_SERIES="connect18"
|
||||
UI_TEST_TOOLS_DIR="/Users/vsts/.nuget/packages/xamarin.uitest/2.2.7/tools"
|
||||
|
||||
if [ -z "$APPCENTER_TOKEN" ]; then
|
||||
echo "ERROR! AppCenter API token is not set. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$XAMARIN_UITEST_VERSION" ]; then
|
||||
echo "WARNING! XAMARIN_UITEST_VERSION environment variable not set. Setting it to its default. Check the version of Xamarin.UITest you are using in your project"
|
||||
UI_TEST_TOOLS_DIR="$DEFAULT_UI_TEST_TOOLS_DIR__PART_1$DEFAULT_XAMARIN_UITEST_VERSION$DEFAULT_UI_TEST_TOOLS_DIR__PART_2"
|
||||
else
|
||||
echo "Xamarin UITest version is set to $XAMARIN_UITEST_VERSION"
|
||||
UI_TEST_TOOLS_DIR="$DEFAULT_UI_TEST_TOOLS_DIR__PART_1$XAMARIN_UITEST_VERSION$DEFAULT_UI_TEST_TOOLS_DIR__PART_2"
|
||||
fi
|
||||
|
||||
if [ -z "$CUSTOM_LOCALE" ]; then
|
||||
echo "CUSTOM_LOCALE environment variable not set. Setting it to its default $DEFAULT_LOCALE"
|
||||
CUSTOM_LOCALE="$DEFAULT_LOCALE"
|
||||
fi
|
||||
|
||||
if [ -z "$CUSTOM_TEST_SERIES" ]; then
|
||||
echo "CUSTOM_TEST_SERIES environment variable not set. Setting it to its default $DEFAULT_TEST_SERIES"
|
||||
CUSTOM_TEST_SERIES="$DEFAULT_TEST_SERIES"
|
||||
fi
|
||||
|
||||
if [ -z "$DEVICES" ]; then
|
||||
echo "WARNING! Devices variable not set. You need to previously create a device set, and specify it here, eg: <project_name>/samsunggalaxys"
|
||||
echo "Defaulting to Google Pixel 2 (7c5a701f)"
|
||||
|
@ -72,4 +48,4 @@ echo "### Compiling UITest project"
|
|||
msbuild $UITEST_PROJECT_PATH/$UITEST_CSPROJ_NAME /t:build /p:Configuration=Release
|
||||
|
||||
echo "### Launching AppCenter test run"
|
||||
appcenter test run uitest --app $APPCENTER_PROJECT_NAME --devices $DEVICES --app-path $APK_PATH --test-series $CUSTOM_TEST_SERIES --locale $CUSTOM_LOCALE --build-dir $UITEST_PROJECT_PATH/bin/Release --uitest-tools-dir $UI_TEST_TOOLS_DIR --token $APPCENTER_TOKEN
|
||||
appcenter test run uitest --app $APPCENTER_PROJECT_NAME --devices $DEVICES --app-path $APK_PATH --test-series "master" --locale "en_US" --build-dir $UITEST_PROJECT_PATH/bin/Release --uitest-tools-dir $UI_TEST_TOOLS_DIR --token $APPCENTER_TOKEN --async
|
||||
|
|
|
@ -12,15 +12,17 @@ namespace VisualProvision.UITest
|
|||
{
|
||||
}
|
||||
|
||||
// In order to run this test, please change "TestSettings" constants, indicating a valid configuration
|
||||
/*
|
||||
[Test]
|
||||
public async Task SuccessSignInTestAsync()
|
||||
{
|
||||
await new LoginPage()
|
||||
.EnterCredentials(TestSettings.ValidClientId, TestSettings.ValidTenantId, TestSettings.ValidPwd)
|
||||
.SignIn();
|
||||
|
||||
new SubscriptionPage();
|
||||
}
|
||||
*/
|
||||
|
||||
[Test]
|
||||
public async Task FailedSignInTestAsync()
|
||||
|
|
|
@ -9,53 +9,29 @@
|
|||
# Environment variables :
|
||||
#
|
||||
# - APPCENTER_TOKEN. You need an AppCenter API token. Instructions on how to get it in https://docs.microsoft.com/en-us/appcenter/api-docs/
|
||||
# - XAMARIN_UITEST_VERSION. Version of the Xamarin.UITest NuGet package the project is using. Defaults to 2.2.7
|
||||
# - APPCENTER_PROJECT_NAME. URL of App Center project. For example: AppCenterDemos/AIVisualProvisioniOS
|
||||
# - DEVICES. ID or IDs of devices or device sets previously created in AppCenter. Defaults to "iPhone 8, iOS 12.1" (de95e76a)
|
||||
# - CUSTOM_LOCALE. Locale. Defaults to "en_US"
|
||||
# - CUSTOM_TEST_SERIES. Name of test series. Defaults to "connect18"
|
||||
# - ENABLE_UITESTS. Set to true if you want to run UI Tests
|
||||
#
|
||||
# NOTE: UI_TEST_TOOLS_DIR is where "test-cloud.exe" is. By default in AppCenter is /Users/vsts/.nuget/packages/xamarin.uitest/<xamarin uitest version>/tools
|
||||
# NOTE: UI_TEST_TOOLS_DIR is where "test-cloud.exe" is. By default in AppCenter is /Users/vsts/.nuget/packages/xamarin.uitest/2.2.7/tools
|
||||
|
||||
if [ -z "$ENABLE_UITESTS" ]; then
|
||||
echo "ERROR! You need to define in AppCenter the ENABLE_UITESTS environment variable. UI Tests will not run. Exiting..."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
UITEST_PROJECT_PATH="$APPCENTER_SOURCE_DIRECTORY/MagnetsMobileClient/VisualProvision.UITest"
|
||||
UITEST_PROJECT_PATH="$APPCENTER_SOURCE_DIRECTORY/Source/VisualProvision.UITest"
|
||||
UITEST_CSPROJ_NAME="VisualProvision.UITest.csproj"
|
||||
APPCENTER_PROJECT_NAME="ImageDeploy/iOS"
|
||||
IPA_PATH="$APPCENTER_OUTPUT_DIRECTORY/VisualProvision.iOS.ipa"
|
||||
|
||||
DEFAULT_DEVICES="de95e76a"
|
||||
DEFAULT_XAMARIN_UITEST_VERSION="2.2.7"
|
||||
DEFAULT_UI_TEST_TOOLS_DIR__PART_1="/Users/vsts/.nuget/packages/xamarin.uitest/"
|
||||
DEFAULT_UI_TEST_TOOLS_DIR__PART_2="/tools"
|
||||
DEFAULT_LOCALE="en_US"
|
||||
DEFAULT_TEST_SERIES="connect18"
|
||||
UI_TEST_TOOLS_DIR="/Users/vsts/.nuget/packages/xamarin.uitest/2.2.7/tools"
|
||||
|
||||
if [ -z "$APPCENTER_TOKEN" ]; then
|
||||
echo "ERROR! AppCenter API token is not set. Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$XAMARIN_UITEST_VERSION" ]; then
|
||||
echo "WARNING! XAMARIN_UITEST_VERSION environment variable not set. Setting it to its default. Check the version of Xamarin.UITest you are using in your project"
|
||||
UI_TEST_TOOLS_DIR="$DEFAULT_UI_TEST_TOOLS_DIR__PART_1$DEFAULT_XAMARIN_UITEST_VERSION$DEFAULT_UI_TEST_TOOLS_DIR__PART_2"
|
||||
else
|
||||
echo "Xamarin UITest version is set to $XAMARIN_UITEST_VERSION"
|
||||
UI_TEST_TOOLS_DIR="$DEFAULT_UI_TEST_TOOLS_DIR__PART_1$XAMARIN_UITEST_VERSION$DEFAULT_UI_TEST_TOOLS_DIR__PART_2"
|
||||
fi
|
||||
|
||||
if [ -z "$CUSTOM_LOCALE" ]; then
|
||||
echo "CUSTOM_LOCALE environment variable not set. Setting it to its default $DEFAULT_LOCALE"
|
||||
CUSTOM_LOCALE="$DEFAULT_LOCALE"
|
||||
fi
|
||||
|
||||
if [ -z "$CUSTOM_TEST_SERIES" ]; then
|
||||
echo "CUSTOM_TEST_SERIES environment variable not set. Setting it to its default $DEFAULT_TEST_SERIES"
|
||||
CUSTOM_TEST_SERIES="$DEFAULT_TEST_SERIES"
|
||||
fi
|
||||
|
||||
if [ -z "$DEVICES" ]; then
|
||||
echo "WARNING! Devices variable not set. You need to previously create a device set, and specify it here, eg: <project_name>/iPhonesWithNotch"
|
||||
echo "Defaulting to iPhone 8, iOS 12.1 (de95e76a)"
|
||||
|
@ -72,4 +48,4 @@ echo "### Compiling UITest project"
|
|||
msbuild $UITEST_PROJECT_PATH/$UITEST_CSPROJ_NAME /t:build /p:Configuration=Release
|
||||
|
||||
echo "### Launching AppCenter test run"
|
||||
appcenter test run uitest --app $APPCENTER_PROJECT_NAME --devices $DEVICES --app-path $IPA_PATH --test-series $CUSTOM_TEST_SERIES --locale $CUSTOM_LOCALE --build-dir $UITEST_PROJECT_PATH/bin/Release --uitest-tools-dir $UI_TEST_TOOLS_DIR --token $APPCENTER_TOKEN
|
||||
appcenter test run uitest --app $APPCENTER_PROJECT_NAME --devices $DEVICES --app-path $IPA_PATH --test-series "master" --locale "en_US" --build-dir $UITEST_PROJECT_PATH/bin/Release --uitest-tools-dir $UI_TEST_TOOLS_DIR --token $APPCENTER_TOKEN --async
|
||||
|
|
|
@ -17,23 +17,20 @@
|
|||
public const string TenantId = "INSERT YOUR TENANTID HERE";
|
||||
// </snippet_serviceprincipal>
|
||||
|
||||
// App Center
|
||||
public const string AppCenterAndroid = "INSERT YOUR APP CENTER IDENTIFIER FOR ANDROID HERE";
|
||||
public const string AppCenterIos = "INSERT YOUR APP CENTER IDENTIFIER FOR IOS APP HERE";
|
||||
// App Center (Feel free to change the following IDs with your App Center IDs).
|
||||
public const string AppCenterAndroid = "c8fbe0d5-f676-40b9-927d-19f70365f7de";
|
||||
public const string AppCenterIos = "a43c421b-70ac-4742-905a-24dc760696de";
|
||||
|
||||
// <snippet_cusvis_keys>
|
||||
// Custom Vision
|
||||
// URL example: https://southcentralus.api.cognitive.microsoft.com/customvision/v2.0/Prediction/{GUID}/image
|
||||
public const string CustomVisionPredictionUrl = "INSERT YOUR COMPUTER VISION API URL HERE FOR MAGNETS RECOGNITION";
|
||||
public const string CustomVisionPredictionKey = "INSERT YOUR COMPUTER VISION PREDICTION KEY HERE FOR MAGNETS RECOGNITION";
|
||||
// </snippet_cusvis_keys>
|
||||
|
||||
// <snippet_comvis_keys>
|
||||
// Computer Vision
|
||||
// Endpoint example: https://westus.api.cognitive.microsoft.com/
|
||||
public const string ComputerVisionEndpoint = "INSERT COMPUTER VISION ENDPOINT HERE FOR HANDWRITING";
|
||||
public const string ComputerVisionKey = "INSERT YOUR COMPUTER VISION KEY HERE FOR HANDWRITING";
|
||||
// </snippet_comvis_keys>
|
||||
|
||||
}
|
||||
}
|
|
@ -43,9 +43,8 @@ namespace VisualProvision.ViewModels
|
|||
* during development
|
||||
*/
|
||||
|
||||
ClientId.Value = AppSettings.ClientId;
|
||||
TenantId.Value = AppSettings.TenantId;
|
||||
|
||||
ClientId.Value = AppSettings.ClientId;
|
||||
TenantId.Value = AppSettings.TenantId;
|
||||
|
||||
ClientId.IsValid = TenantId.IsValid = Password.IsValid = true;
|
||||
#else
|
||||
|
|