This commit is contained in:
Коммит
2724643ba3
|
@ -0,0 +1,4 @@
|
|||
*.dcm filter=lfs diff=lfs merge=lfs -text
|
||||
*.nii.gz filter=lfs diff=lfs merge=lfs -text
|
||||
*.png filter=lfs diff=lfs merge=lfs -text
|
||||
*.zip filter=lfs diff=lfs merge=lfs -text
|
|
@ -0,0 +1,64 @@
|
|||
name: BuildAndTest.CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
env:
|
||||
solution: './Source/Microsoft.Gateway/Microsoft.Gateway.sln'
|
||||
buildPlatform: x64
|
||||
buildConfiguration: Release
|
||||
|
||||
jobs:
|
||||
build-test:
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Setup Nuget
|
||||
uses: nuget/setup-nuget@v1
|
||||
with:
|
||||
nuget-version: 'latest'
|
||||
|
||||
- name: Setup MSBuild
|
||||
uses: microsoft/setup-msbuild@v1.0.2
|
||||
|
||||
- name: Nuget Restore
|
||||
run: nuget restore ${{ env.solution }}
|
||||
|
||||
- name: Download Dependencies
|
||||
shell: Powershell
|
||||
run: |
|
||||
cd .\Source\Microsoft.Gateway
|
||||
.\download_dcmtk.ps1
|
||||
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: 'csharp'
|
||||
|
||||
- name: Build Solution
|
||||
run: msbuild.exe /nologo /t:build /p:UseSharedCompilation=false /p:Configuration=${{ env.buildConfiguration }} /p:Platform=${{ env.buildPlatform }} ${{ env.solution }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
||||
- name: Setup VSTest Path
|
||||
uses: darenm/Setup-VSTest@v1
|
||||
|
||||
- name: Test Solution
|
||||
shell: Powershell
|
||||
run: |
|
||||
$erroractionpreference = "stop"
|
||||
|
||||
$TestFiles = $(Get-ChildItem $PATH -include *tests*.dll -exclude "*TestAdapter*","*TestPlatform*","*MSTest*" -recurse | where {$_.FullName -notlike "*obj*"}).fullname
|
||||
$TestArgs = $TestFiles + "/Platform:x64"
|
||||
|
||||
vstest.console.exe $TestArgs
|
||||
exit $lastexitcode
|
|
@ -0,0 +1,365 @@
|
|||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
|
||||
# Visual Studio 2015 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUNIT
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
**/Properties/launchSettings.json
|
||||
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_i.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# JustCode is a .NET coding add-in
|
||||
.JustCode
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# TODO: Comment the next line if you want to checkin your web deploy settings
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/packages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/packages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/packages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
*.fakes
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Typescript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# JetBrains Rider
|
||||
.idea/
|
||||
*.sln.iml
|
||||
|
||||
# CodeRush
|
||||
.cr/
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
.ionide/
|
||||
|
||||
# Others
|
||||
*.dir/
|
||||
*.sdf
|
||||
*.opensdf
|
||||
*.cache
|
||||
*.opendb
|
||||
*.ipch
|
||||
*.exe
|
||||
packages
|
||||
*.db
|
||||
.vs
|
||||
*.messages
|
||||
*.filters
|
||||
*.psess
|
||||
__pycache__
|
||||
*~
|
||||
ecf/
|
||||
/.vscode
|
||||
|
||||
/Source/Microsoft.Gateway/Microsoft.InnerEye.Listener.Wix/Service1.Generated.wxs
|
||||
/Source/Microsoft.Gateway/Microsoft.InnerEye.Listener.Wix/Service2.Generated.wxs
|
||||
/Source/Microsoft.Gateway/dcmtk-3.6.5-win64-dynamic
|
||||
*.zip
|
||||
/Source/Microsoft.Gateway/dicom3tools
|
|
@ -0,0 +1,62 @@
|
|||
# .NET Desktop
|
||||
# Build and run tests for .NET Desktop or Windows classic desktop solutions.
|
||||
# Add steps that publish symbols, save build artifacts, and more:
|
||||
# https://docs.microsoft.com/azure/devops/pipelines/apps/windows/dot-net
|
||||
|
||||
trigger:
|
||||
- master
|
||||
|
||||
pool:
|
||||
vmImage: 'windows-latest'
|
||||
|
||||
variables:
|
||||
solution: '**/Source/Microsoft.Gateway/Microsoft.Gateway.sln'
|
||||
buildPlatform: 'x64'
|
||||
buildConfiguration: 'Release'
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
lfs: true
|
||||
- task: NuGetToolInstaller@1
|
||||
|
||||
|
||||
- task: NuGetCommand@2
|
||||
inputs:
|
||||
command: 'restore'
|
||||
restoreSolution: '$(solution)'
|
||||
feedsToUse: 'config'
|
||||
nugetConfigPath: 'Source/Microsoft.Gateway/NuGet.config'
|
||||
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
name: 'Download DCMTK'
|
||||
filePath: '.\Source\Microsoft.Gateway\download_dcmtk.ps1'
|
||||
workingDirectory: '.\Source\Microsoft.Gateway\'
|
||||
failOnStderr: true
|
||||
|
||||
- task: VSBuild@1
|
||||
inputs:
|
||||
solution: '$(solution)'
|
||||
platform: '$(buildPlatform)'
|
||||
configuration: '$(buildConfiguration)'
|
||||
|
||||
- task: VSTest@2
|
||||
inputs:
|
||||
testSelector: 'testAssemblies'
|
||||
testAssemblyVer2: |
|
||||
**\*test*.dll
|
||||
!**\*TestAdapter.dll
|
||||
!**\obj\**
|
||||
searchFolder: '$(System.DefaultWorkingDirectory)/Source'
|
||||
platform: '$(buildPlatform)'
|
||||
configuration: '$(buildConfiguration)'
|
||||
|
||||
- task: CredScan@3
|
||||
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
inputs:
|
||||
scanType: 'Register'
|
||||
verbosity: 'Verbose'
|
||||
alertWarningLevel: 'High'
|
||||
failOnAlert: true
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# Microsoft Open Source Code of Conduct
|
||||
|
||||
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
|
||||
|
||||
Resources:
|
||||
|
||||
- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
|
||||
- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
|
||||
- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
|
|
@ -0,0 +1,3 @@
|
|||
# Deploying resources on Azure Stack Hub
|
||||
|
||||
@Edwin to complete
|
|
@ -0,0 +1,38 @@
|
|||
# How to run end to end demo on local environment?
|
||||
|
||||
Here are some quick steps to run end to end demo on your local environment.
|
||||
|
||||
Do SSH into the GPU VM, first command is docker images to get the image id of the modified head and neck container. Then run it interactively using
|
||||
|
||||
1. Start the GPU VM which has Inferencing container. Get the public IP and copy it.
|
||||
2. Do SSH to this VM using - SSH <userName>:IP address
|
||||
3. If prompted enter "yes"
|
||||
4. Now it will ask for password. Enter the password:
|
||||
5. After successful login it will open the VM shell. In the shell run below command.
|
||||
6. docker run -it --entrypoint=/bin/bash -p 8086:5000 -e AZURE_STORAGE_ACCOUNT_NAME=name -e AZURE_STORAGE_KEY=<accountKey> -e AZURE_STORAGE_ENDPOINT=<endpoint> --gpus all <image>
|
||||
7. conda activate nnenv
|
||||
8. python web-api.py
|
||||
9. Clone https://msdsip@dev.azure.com/msdsip/AshInnerEye/_git/Gateway
|
||||
10. Clone https://msdsip@dev.azure.com/msdsip/AshInnerEye/_git/AshInnerEye
|
||||
11. Set platform to x64 and build the project
|
||||
12. Generate self signed certificate using below command in PowerShell window. Make sure you run it as Administrator.
|
||||
`New-SelfSignedCertificate -CertStoreLocation Cert:\LocalMachine\My -DnsName "mysite.local" -FriendlyName "InnerEyeDryRun" -NotAfter (Get-Date).AddYears(10)`
|
||||
13. Copy the thumbprint and replace "KeyVaultAuthCertThumbprint" key value of Inferencing API and Worker Project in config file.
|
||||
a. Microsoft.InnerEye.Azure.Segmentation.API.Console
|
||||
b. Microsoft.InnerEye.Azure.Segmentation.Worker.Console
|
||||
14. Replace the other keys in same file.
|
||||
15. Build both projects.
|
||||
16. Now run both project Inferencing API and Engine exe. from bin directory
|
||||
a. Microsoft.InnerEye.Azure.Segmentation.Worker.Console.exe
|
||||
b. Microsoft.InnerEye.Azure.Segmentation.API.Console.exe
|
||||
17. Next thing is to run gateway receiver and processer:
|
||||
a. Microsoft.InnerEye.Listener.Processor.exe
|
||||
b. Microsoft.InnerEye.Listener.Receiver.exe
|
||||
18. Now you have to navigate to headandNeck images folder, ideally this should be in the code where you have cloned the repo:
|
||||
a. *:\AshInnerEye\InnerEyeCloud\Source\Images\HeadAndNeck\image
|
||||
19. Open above path in PowerShell window.
|
||||
20. Run these command on PowerShell - `storescu <YOUR_IP> 104 -v --scan-directories -aec RGPelvisCT -aet Scanner .`
|
||||
21. Open a suitable path in PowerShell where you want to store result.
|
||||
22. Run on powershell `storescp 1105 -v -aet PACS -od . --sort-on-study-uid st`
|
||||
23. Wait for results.
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# How to run end to end demo on Azure Stack hub?
|
||||
|
||||
Here are the steps:
|
||||
|
||||
1. TBD - share image over DICOM
|
||||
2. Then, open the image (test H&N volume is located in *<TBD>*) and ensure you can see it in the app
|
||||
3. Select '...' button that is shown when hovering over image in the explorer panel, select "Export", choose destination, check "Send Series", make sure that "Send Structure Set" is unchecked, and click "Export".
|
||||
4. Observe the console output from solution components to ensure that the process is underway. Once process is complete you should see a yellow "new image received" icon in the left sidebar of the app.
|
||||
5. Go into the Incoming tab on the app and press "Open" next to the latest segmentation received.
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
# Inferencing APIs
|
||||
|
||||
## Gateway Dicom – Inferencing API
|
||||
|
||||
Inferencing API is one the main component of the Inner Eye architecture. Currently we have set of API calls, which are grouped into several functional groups and its part of InnerEye Cloud (classic cloud service) application.
|
||||
(As part of architecture, Inferencing API is highlighted as below)
|
||||
|
||||
![ash_architecture.png](https://dev.azure.com/msdsip/8520c5e0-ef36-49bc-983d-12972ea056e0/_apis/git/repositories/cecb2ded-12e0-46f2-a2fe-7bf99a94811f/Items?path=%2F.attachments%2Fash_architecture-461fa2d7-8655-4ce9-b5b9-e6572b51030f.png&download=false&resolveLfs=true&%24format=octetStream&api-version=5.0-preview.1&sanitize=true&versionDescriptor.version=wikiMaster)
|
||||
|
||||
Below is the distribution of set of API call into as per their functional groups. Out of which we are working on Point 4 Inferencing API also test Point 5 for health check.
|
||||
|
||||
**1. DICOM Configuration**
|
||||
|
||||
These APIs configure DICOM endpoints that the Gateway can work with as well as routing rules for data that comes from these endpoints.
|
||||
These APIs configure DICOM endpoints that the Gateway can work with as well as routing rules for data that comes from these endpoints.
|
||||
**OSS implementation:** the configuration will be done via JSON files. These APIs are scrapped.
|
||||
|
||||
*/api/v1/config/gateway/update* - Gets the expected version the Gateway should be updated to.
|
||||
*/api/v1/config/gateway/receive* - Gets the current gateway "receive" configuration.
|
||||
*/api/v1/config/gateway/processor* - Gets the current gateway "processor" configuration.
|
||||
*/api/v1/config/gateway/destination/{callingAET}* - Gets the destination DicomEndPoint to send results to given the AET of the caller
|
||||
*/api/v1/config/gateway/destination/{callingAET}/{calledAET}* - Gets the destination DicomEndPoint to send results to given the AET of the caller and the calling AET (the way our gateway is being called)
|
||||
*/api/v1/config/aetconfig/{calledAET}/{callingAET}* - Download a collection of DICOM constraints based on called AET and calling AET
|
||||
|
||||
**2. Data Upload**
|
||||
|
||||
This API endpoint provides a way to upload data for persisting the images for subsequent machine learning.
|
||||
**OSS implementation:** These API need to be updated to conform with DICOMWeb implementation.
|
||||
*/api/v1/storage* - Upload DICOM series to long term storage. In V1 this API call needs to be replaced with a call to a DICOM Web STOW-RS
|
||||
|
||||
**3. Feedback**
|
||||
|
||||
These APIs facilitate a workflow where a corrected segmentation is sent back for further analysis. This is not used in V1; the APIs below should be removed.
|
||||
OSS implementation: These API need to be removed
|
||||
*/api/v1/feedback* - Upload a collection of DICOM files (segmentation masks).
|
||||
*/ping* - check if API is still up. Keep for V1
|
||||
*/api/ping* - check if API is still up, with authentication. Remove for V1
|
||||
|
||||
**4. Inferencing**
|
||||
|
||||
These APIs have to do with inferencing:
|
||||
• Get the list of registered models
|
||||
• Send image for inferencing
|
||||
• Get progress
|
||||
• Retrieve result
|
||||
|
||||
**OSS implementation:** Most of these APIs remain and are essential to V1 operation
|
||||
**/api/v1/models** - Returns a list of all models from Azure model blob container. This call is not needed for V1 implementation. This part was under discussion and based on meetings and discussion, for demos we are going to used two static model configurations.
|
||||
**/api/v1/models/{modelId}/segmentation/{segmentationId}** - Checks the segmentation status for a given segmentation of a given model.
|
||||
**/api/v1/models/{modelId}/segmentation/{segmentationId}/result** - Gets the result for a completed segmentation.
|
||||
**/api/v1/models/{modelId}/segmentation** - Starts a segmentation. The content of the request should be a compressed zip file with a list of DICOM files with a folder per ChannelId E.g. ct\1.dcm, flair\1.dcm
|
||||
|
||||
**5. Health check**
|
||||
|
||||
*/ping* - check if API is still up. Keep for V1
|
||||
*/api/ping* - check if API is still up, with authentication. Remove for V1
|
|
@ -0,0 +1,3 @@
|
|||
# Inferencing Container
|
||||
|
||||
@Mark, @Edwin - Help out
|
|
@ -0,0 +1,7 @@
|
|||
# Inferencing Engine
|
||||
|
||||
**What does Inferencing do and the flow related to the architecture?**
|
||||
|
||||
Inferencing Engine works to transform the Nifti images to Dicom RT files. Inferencing Engine reads the Dicom Image from the Queue which is present in the byte form. The image is transformed into nifty image and pushes it to the Inferencing container where it send back the Nifti seg mask image. The Segmentation processor take the nifti images and transforms to Dicom RT file. These Dicom RT images are pushed to blob and the progress of this task is saved in Table storage.
|
||||
|
||||
@Mark - To help out
|
|
@ -0,0 +1,3 @@
|
|||
# More about InnerEye project
|
||||
|
||||
@Michela
|
|
@ -0,0 +1,328 @@
|
|||
# Getting started
|
||||
Here is a page with intros to DICOM and subject domain: https://dev.azure.com/msdsip/AshInnerEye/_wiki/wikis/AshInnerEye.wiki/24/Start-Here
|
||||
|
||||
# Environment
|
||||
|
||||
**Stack Hub environment portal**: https://portal.ppe2.stackpoc.com/#@avanadestackpoc.onmicrosoft.com
|
||||
Resource group used for demo: **rgcnabgroup**
|
||||
|
||||
# Chapter 1: Resource deployment
|
||||
When it comes to resources deployment on Azure Environment we mostly think about using ARM templates, which is the good thing which stack hub also supports. We can deploy our resources using ARM template. In this setup we are taking help of [CNAB](https://cnab.io/) to bundle our infrastructure and deploy it on Azure Stack Hub.
|
||||
|
||||
# Prerequisites
|
||||
|
||||
- Azure Stack Hub subscription
|
||||
|
||||
- Docker (Here is a link if you need to install [Docker Installation Instructions](https://docs.docker.com/get-docker/) )
|
||||
|
||||
- Porter (Here is a link if you need to install: Porter Installation Instructions [[Porter Installation Instructions]](https://porter.sh/install/))
|
||||
|
||||
> **NOTE:** be sure to add porter to your PATH
|
||||
|
||||
- Service Principal that has been granted contributor access to your Azure Stack Hub subscription
|
||||
|
||||
- You will need the following information for the service principal
|
||||
- Client ID
|
||||
- Client secret
|
||||
- Object ID (this is different than the object id and can be found on the enterprise application area of your Azure Active Directory)
|
||||
- Tenant ID
|
||||
|
||||
- Your user account needs to have owner access to the subscription. (This is needed to assign access to the service principal for deployment)
|
||||
|
||||
# Step 1: Prepare for Installation
|
||||
|
||||
### Create CNAB Parameter File
|
||||
|
||||
Locate the file named `azure-stack-profile.template.txt` and open it for editing. You will need to provide some values so the CNAB package can register your Azure Stack environment and deploy into it. Save the file as `azure-stack-profile.txt` after you have assigned the required values.
|
||||
|
||||
```
|
||||
azure_stack_tenant_arm="Your Azure Stack Tenant Endpoint"
|
||||
azure_stack_storage_suffix="Your Azure Stack Storage Suffix"
|
||||
azure_stack_keyvault_suffix="Your Azure Stack KeyVault Suffix"
|
||||
azure_stack_location="Your Azure Stack’s location identifier here."
|
||||
azure_stack_resource_group="Your desired Azure Stack resource group name to create"
|
||||
slicer_ip="IP address for your clinical endpoint for receiving DICOM RT"
|
||||
```
|
||||
|
||||
### Generate Credentials
|
||||
|
||||
Open a new shell window and make sure you are in the root directory of this repo. Run the command below to generate credentials required for deployment. Follow the prompts to assign values for the credentials needed. Select "specific value" from the interactive menu for each of the required credential fields. A description of each credential is provided below.
|
||||
|
||||
```
|
||||
porter generate credentials
|
||||
```
|
||||
|
||||
| Item | Description |
|
||||
| :-------------------------- | :----------------------------------------------------------- |
|
||||
| AZURE_STACK_SP_CLIENT_ID | The client id for the service principal that is registered with your Azure Stack Hub Subscription |
|
||||
| AZURE_STACK_SP_PASSWORD | The secret associated with the service principal that is registered with your Azure Stack Hub Subscription |
|
||||
| AZURE_STACK_SP_TENANT_DNS | The dns for the Azure Active Directory that is tied to your Azure Stack Hub (e.g. [mycomany.onmicrosoft.com](http://mycomany.onmicrosoft.com/) ) |
|
||||
| AZURE_STACK_SUBSCRIPTION_ID | The subscription id for the subscription on your Azure Stack Hub that you want to deploy into |
|
||||
| VM_PASSWORD | The password you would like to use for the login to the VM that is deployed as part of this CNAB package |
|
||||
|
||||
# Step 2: Build CNAB
|
||||
|
||||
Run the command below to build the Porter CNAB package.
|
||||
|
||||
```
|
||||
porter build
|
||||
```
|
||||
|
||||
# Step 3: Install CNAB
|
||||
|
||||
### Install CNAB Package
|
||||
|
||||
Run the below command to install the CNAB package. This will create a new resource group on you Azure Stack subscription and will deploy the solution into it.
|
||||
|
||||
```
|
||||
porter install InnerEye --cred InnerEye --param-file "azure-stack-profile.txt"
|
||||
```
|
||||
|
||||
### (Optional) Uninstall CNAB Package
|
||||
|
||||
If you wish to remove the solution from your Azure Stack Hub, run the below command. Please note that this will delete the entire resource group that the solution was deployed into. If you have created any other custom resources in this resource group, they will also be deleted.
|
||||
|
||||
```
|
||||
porter uninstall InnerEye --cred InnerEye --param-file "azure-stack-profile.txt"
|
||||
```
|
||||
|
||||
# Step 4: Start Inferencing Container(s)
|
||||
|
||||
- Get the IP of the Inferencing Container VM from the Azure Stack Hub Portal
|
||||
- Connect to the VM via ssh
|
||||
- Navigate to the app directory
|
||||
- Make any necessary modifications to the model_inference_config.json file
|
||||
- Start the containers by running the below commands
|
||||
|
||||
```
|
||||
python setup-inferencing.py model_inference_config.json
|
||||
```
|
||||
|
||||
# Step 5: Start the Gateway
|
||||
|
||||
- Get the IP of the Inferencing Container VM from the Azure Stack Hub Portal
|
||||
- Connect to the VM via Remote Desktop Protocol (RDP)
|
||||
- Open the gateway.msi file on the desktop
|
||||
|
||||
## Summary of Deployment Components
|
||||
|
||||
- KeyVault and grants read access to Service Principal
|
||||
- Storage Account
|
||||
- GPU Linux VM to host inferencing containers
|
||||
- App service plan to host Inferencing API and Inferencing Engine
|
||||
- Inferencing API app service
|
||||
- Inferencing Engine app service
|
||||
- Gateway VM
|
||||
|
||||
# Chapter 2: Building and deploying the code
|
||||
We can always use Azure DevOps CICD pipelines to build and deploy the code on Infrastructure created. In this chapter we will talk about how to build and deploy code using local environment.
|
||||
|
||||
# Prerequisites
|
||||
|
||||
- Cloned git repositories.
|
||||
- Visual Studio 2017
|
||||
- Azure Stack Hub Storage Account Details
|
||||
- Gateway VM Details
|
||||
- Inferencing container Details
|
||||
|
||||
### Clone the repos
|
||||
|
||||
To clone the code to local environment please follow below steps:
|
||||
|
||||
1. First you need to clone the InnerEye Cloud Solution.
|
||||
|
||||
```
|
||||
git clone https://msdsip@dev.azure.com/msdsip/AshInnerEye/_git/AshInnerEye
|
||||
```
|
||||
|
||||
2. After cloning the InnerEye Cloud Solution, second repository to clone is Gateway.
|
||||
|
||||
```
|
||||
git clone https://msdsip@dev.azure.com/msdsip/AshInnerEye/_git/Gateway
|
||||
```
|
||||
|
||||
- > **NOTE:** Make sure you clone both the repos at root folder of any directory, this is to avoid max-length path issue.
|
||||
|
||||
### Building the solutions
|
||||
|
||||
#### Building InnerEye Cloud Solution
|
||||
|
||||
1. Open Visual Studio 2017
|
||||
|
||||
2. Open InnerEye Cloud.sln to open InnerEye Cloud Solution from the repo cloned.
|
||||
|
||||
3. Once opened, set the solution configuration to x64.
|
||||
|
||||
4. Open Web.config for Microsoft.InnerEye.Azure.Segmentation.API.AppService
|
||||
|
||||
5. Update the following app settings
|
||||
|
||||
```
|
||||
<appSettings>
|
||||
<add key="webpages:Version" value="3.0.0.0" />
|
||||
<add key="webpages:Enabled" value="false" />
|
||||
<add key="ClientValidationEnabled" value="true" />
|
||||
<add key="UnobtrusiveJavaScriptEnabled" value="true" />
|
||||
<add key="AccountName" value="" />
|
||||
<add key="StorageConnectionString" value="" />
|
||||
</appSettings>
|
||||
```
|
||||
|
||||
Here:
|
||||
|
||||
1. AccoutName is storage Account Name of Azure Stack Hub
|
||||
2. StorageConnectionString is Connection string of Azure Stack Hub storage account.
|
||||
|
||||
6. Once this is updated. once the Web.Config for Microsoft.InnerEye.Azure.Segmentation.Worker.AppService
|
||||
|
||||
```
|
||||
<appSettings>
|
||||
<add key="webpages:Version" value="3.0.0.0" />
|
||||
<add key="webpages:Enabled" value="false" />
|
||||
<add key="ClientValidationEnabled" value="true" />
|
||||
<add key="UnobtrusiveJavaScriptEnabled" value="true" />
|
||||
<add key="InferencingContainerEndpoint" value="" />
|
||||
<add key="InferencingContainerEndpointPort" value="" />
|
||||
<add key="AccountName" value="" />
|
||||
<add key="StorageConnectionString" value="" />
|
||||
</appSettings>
|
||||
```
|
||||
|
||||
Here:
|
||||
|
||||
1. AccoutName is storage Account Name of Azure Stack Hub
|
||||
2. StorageConnectionString is Connection string of Azure Stack Hub storage account.
|
||||
3. InferencingContainerEndpoint is IP address of Inferencing container VM
|
||||
4. InferencingContainerEndpointPort is port number where Inferencing container is hosted.
|
||||
|
||||
7. When both Web.config files are ready build the solution from the build menu of Visual Studio.
|
||||
|
||||
#### Building Gateway Solution
|
||||
|
||||
1. Open the new instance of Visual Studio 2017
|
||||
2. Open Microsoft.Gateway.sln from the Gateway repo cloned.
|
||||
3. Modify the InnerEyeSegementationClient.cs to add inferencing API endpoint.
|
||||
4. Set the project configuration to x64
|
||||
5. Build the solution from the build menu of Visual Studio.
|
||||
|
||||
### Deploying the solutions
|
||||
|
||||
#### Deploying InnerEye Cloud Solution
|
||||
|
||||
1. Either you can download Publish Profile of deployed Inferencing API or you can also create new one using Visual Studio Publish option.
|
||||
2. To download publish profile go to Azure Stack Hub portal and open Inferencing API resource.
|
||||
3. Click on Get Publish profile button from overview page.
|
||||
4. Once downloaded switch to Visual Studio window which has InnerEye Cloud solution opened.
|
||||
5. Right click on Microsoft.InnerEye.Azure.Segmentation.API.AppService and select Publish.
|
||||
6. Import the downloaded publish profile.
|
||||
7. Set the release configuration to x64 and click publish.
|
||||
8. This will deploy the Microsoft.InnerEye.Azure.Segmentation.API.AppService to hub.
|
||||
9. Now switch to browser window and open Inferencing Engine App Service.
|
||||
10. Once open go to overview page and download the publish profile by clicking on Get Publish Profile button.
|
||||
11. Once downloaded right click on Microsoft.InnerEye.Azure.Segmentation.Worker.AppService and click publish.
|
||||
12. Import the downloaded publish profile.
|
||||
13. Set the release configuration to x64 and click Publish.
|
||||
14. This will publish Microsoft.InnerEye.Azure.Segmentation.Worker.AppService
|
||||
|
||||
#### Deploying Gateway Solution
|
||||
|
||||
1. Gateway needs to run as Windows Service but you can also run executables of Gateway solution independently.
|
||||
2. To run gateway from installer copy the build contents from Gateway solution to the Virtual Machine dedicated for Gateway.
|
||||
3. Search and open Microsoft.InnerEye.Gateway.msi
|
||||
4. This will install Gateway as Windows Service in virtual machine.
|
||||
5. If you do not want to run Gateway Services as Windows Service then
|
||||
6. From the build contents go to Microsoft.InnerEye.Listener.Processor and look for executable.
|
||||
7. Open the executable file.
|
||||
8. Go to Microsoft.InnerEye.Listener.Receiver and look for executable.
|
||||
9. Open the executable.
|
||||
|
||||
# Chapter 3: Testing and Configuring Deployed Environment
|
||||
|
||||
Below are the instructions to debug and monitored resources after the solution has been deployed below.
|
||||
|
||||
## VM: Inferencing container (Linux)
|
||||
|
||||
Once the machine is running, ensure that the _headandneckmodel_ container is running by running `docker ps` and ensuring you have the container "_headandneckmodel_" up. If everything is up you should be able to navigate to http://38.102.181.60:8081/ and see output there.
|
||||
|
||||
If the container is not running or needs to be restarted, run `docker kill` and then do the following:
|
||||
|
||||
1. Run `docker run -it --entrypoint=/bin/bash -p 8081:5000 -e AZURE_STORAGE_ACCOUNT_NAME=<ACCOUNT_NAME> -e AZURE_STORAGE_KEY=<ACCOUNT_KEY> -e AZURE_STORAGE_ENDPOINT=ppe2.stackpoc.com --gpus all headandneckmodel:latest`
|
||||
2. Run `conda activate nnenv`
|
||||
3. Run `python web-api.py` - this should launch the web server that is wrapping the inferencing container
|
||||
|
||||
If the container is already running:
|
||||
* Use `docker logs <container ID>` to retrieve logs from the container shell
|
||||
* Use `docker attach <container ID>` to connect to the interactive shell of the inferencing container
|
||||
|
||||
## Inferencing Engine AppService
|
||||
Runs model invoker. Makes calls to the model server. Converts from DICOM to NIFTI.
|
||||
|
||||
In sample environment runs on: app-inferapi
|
||||
|
||||
## Inferencing API AppService
|
||||
Runs API, kicks off model inferencing via communication with the inferencing engine.
|
||||
|
||||
In sample environment runs on: infereng
|
||||
|
||||
## Gateway VM (Windows)
|
||||
|
||||
Gateway should be launched like so:
|
||||
1. **GatewayProcessor** (C:\Users\azureuser\source\repos\Gateway\Source\Microsoft.Gateway\Microsoft.InnerEye.Listener.Processor\bin\x64\Debug\Microsoft.InnerEye.Listener.Processor.exe)
|
||||
2. **GatewayReceiver** (C:\Users\azureuser\source\repos\Gateway\Source\Microsoft.Gateway\Microsoft.InnerEye.Listener.Receiver\bin\x64\Debug\Microsoft.InnerEye.Listener.Receiver.exe)
|
||||
|
||||
Before launching make sure that the API App Service is running as the Gateway checks its health first and wouldn't start if it can't find the app service.
|
||||
|
||||
Watch startup logs to make sure there are no errors. Note that the inferencing container should have its server running before the segmentation processor and API are launched.
|
||||
|
||||
### Gateway Configuration
|
||||
|
||||
Gateway configuration files are located in: C:\Users\azureuser\source\repos\Gateway\Source\Microsoft.Gateway\SampleConfigurations
|
||||
**TODO** Server URL needs to be configured. For now hardcoded in InnerEyeSegmentationClient.cs, EnvironmentBaseAddress
|
||||
|
||||
## Storage account
|
||||
**Account name**: devstoreaccount1
|
||||
|
||||
### Containers
|
||||
Used to store segmentations coming in (converted to NIFTI) and out (converted to NIFTI). One container is created per segmentation request
|
||||
|
||||
#### Tables
|
||||
|
||||
| Name | Purpose |
|
||||
| ----------------- | ------------------------------------------------------------ |
|
||||
| gatewayversions | not in use |
|
||||
| imagesindexes | not in use |
|
||||
| models | not in use (will be used after model configuration is read from the AML package) |
|
||||
| progresstable | contains progress per segmentation, updated by cloud code. PartitionKey - model ID; RowKey - segmentation ID |
|
||||
| rtfeedbackindexes | not in use |
|
||||
| rtindexes | ?? |
|
||||
| TimingsTable | Used to compute progress by storing average time of multiple model runs |
|
||||
|
||||
## InnerEye app
|
||||
|
||||
If using InnerEye app, below are some tips on configuration.
|
||||
|
||||
App launched from shortcut on the desktop (Microsoft InnerEye Dev). App configuration is stored in _%APPDATA%\Microsoft\InnerEye\MicrosoftInnerEyeWPFDevexeSettings.json_.
|
||||
|
||||
Relevant configuration parameters are:
|
||||
* DcmReceiveFolder - where the received rtstruct or images are stored
|
||||
* UserDefinedPushLocations - configured send destinations (also can be configured through the app)
|
||||
* DcmAeTitle - AET of the DICOM server that app is running
|
||||
* DcmPort - port for the app's DICOM server
|
||||
|
||||
You can check that the app can talk to the gateway by going to app setting (gear icon in the bottom left) and pressing "Echo" next to the endpoint corresponding to the gateway. If successful, the gateway should show success message in the notification bar and the gateway receiver console log should show informational log messages as well.
|
||||
|
||||
Test H&N volume is located in _C:\Users\azureuser\Test Images\headandneck_
|
||||
|
||||
# Chapter 4. Running the Demo
|
||||
|
||||
In order to run the demo, ensure that all services are running by launching them in the following order:
|
||||
|
||||
1. Inferencing container and API
|
||||
2. Gateway
|
||||
|
||||
Then do the following if using the InnerEye app:
|
||||
1. Launch the InnerEye app. You can use the echo to test if there is connectivity with the gateway.
|
||||
2. Then, open the image (test H&N volume is located in _C:\Users\azureuser\Test Images\headandneck_) and ensure you can see it in the app
|
||||
3. Select '...' button that is shown when hovering over image in the explorer panel, select "Export", choose destination, check "Send Series", make sure that "Send Structure Set" is unchecked, and click "Export".
|
||||
4. Observe the console output from solution components to ensure that the process is underway. Once process is complete you should see a yellow "new image received" icon in the left sidebar of the app.
|
||||
5. Go into the Incoming tab on the app and press "Open" next to the latest segmentation received.
|
|
@ -0,0 +1,3 @@
|
|||
# Video and Blogs
|
||||
|
||||
@Michela
|
|
@ -0,0 +1,39 @@
|
|||
# Editing Sequence Diagram
|
||||
|
||||
The [sequence diagram](sequence.wsd) is stored in the [PlantUML](https://plantuml.com/) text format.
|
||||
|
||||
## PlantUML
|
||||
|
||||
To build a png image file from the wsd file, follow the PlantUML [Getting Started](https://plantuml.com/starting):
|
||||
|
||||
1. Install [Java](https://www.java.com/en/download/).
|
||||
|
||||
1. Download [plantuml.jar](http://sourceforge.net/projects/plantuml/files/plantuml.jar/download)
|
||||
|
||||
### Preview
|
||||
|
||||
To show a live preview of the image whilst editting the file:
|
||||
|
||||
1. Run the command:
|
||||
|
||||
```cmd
|
||||
java -jar plantuml.jar
|
||||
```
|
||||
|
||||
2. Change the file extensions text box to show "wsd"
|
||||
|
||||
1. Select "sequence.png [sequence.wsd]" in the file list.
|
||||
|
||||
1. A preview of the image will be shown in a new window and will update automatically as the text file is editted.
|
||||
|
||||
### Export
|
||||
|
||||
To create an image from the file run the command:
|
||||
|
||||
```cmd
|
||||
java -jar plantuml.jar sequence.wsd
|
||||
```
|
||||
|
||||
## Visual Studio Code
|
||||
|
||||
There are PlantUML extension available for [Visual Studio Code](https://code.visualstudio.com/), for example [PlantUML](https://marketplace.visualstudio.com/items?itemName=jebbs.plantuml) which offer previews of the final image and image export functions.
|
|
@ -0,0 +1,4 @@
|
|||
# Guidelines for how to report bug
|
||||
|
||||
@Mark, @Ivan
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
# Pull Requests guidelines
|
||||
|
||||
@Mark, @Ivan
|
|
@ -0,0 +1,3 @@
|
|||
# Tag to be use on stack-overflow and other channels
|
||||
|
||||
@Mark, @Ivan
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:af212da614c9a9b1cc10fa2d634c49bbdf73500c8c6611296024d051d09f5b7e
|
||||
size 336704
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a92a3797b3e8589be3dcdef1efa234b06d3f2fa6e0f10eb3ac2f2be2cb8561c6
|
||||
size 53677
|
|
@ -0,0 +1,59 @@
|
|||
@startuml sequence
|
||||
skinparam backgroundColor #EEEBDC
|
||||
skinparam handwritten false
|
||||
participant "DICOM Client" as dc
|
||||
participant "Receiver Service" as rt
|
||||
database "RootDicomFolder" as db
|
||||
queue "Upload Queue" as uq
|
||||
participant "Upload Service" as ut
|
||||
participant "Inference Web Service" as is
|
||||
queue "Download Queue" as dq
|
||||
participant "Download Service" as dt
|
||||
queue "Push Queue" as pq
|
||||
participant "Push Service" as pt
|
||||
participant "Destination" as dd
|
||||
queue "Delete Queue" as xq
|
||||
participant "Delete Service" as xt
|
||||
activate dc
|
||||
dc -> rt: Assoc Req
|
||||
activate rt
|
||||
loop
|
||||
dc -> rt: C-Store
|
||||
rt -> db: Save DICOM files
|
||||
end
|
||||
dc -> rt: Assoc Release
|
||||
deactivate dc
|
||||
rt -> uq: DICOM files rec'd
|
||||
uq -> ut
|
||||
deactivate rt
|
||||
activate ut
|
||||
db -> ut: Load DICOM files
|
||||
ut -> is: POST anonymised files
|
||||
activate is
|
||||
ut -> dq: Uploaded
|
||||
dq -> dt
|
||||
activate dt
|
||||
ut -> xq: Delete DICOM files
|
||||
xq -> xt
|
||||
deactivate ut
|
||||
activate xt
|
||||
xt -> db: Delete DICOM files
|
||||
deactivate xt
|
||||
is -> dt: Download DICOM-RT
|
||||
deactivate is
|
||||
dt -> db: Save DICOM-RT
|
||||
dt -> pq: Downloaded
|
||||
pq -> pt
|
||||
deactivate dt
|
||||
activate pt
|
||||
db -> pt: Load DICOM-RT
|
||||
activate dd
|
||||
pt -> dd: Send DICOM-RT
|
||||
deactivate dd
|
||||
pt -> xq: Delete DICOM-RT file
|
||||
xq -> xt
|
||||
deactivate pt
|
||||
activate xt
|
||||
xt -> db: Delete DICOM-RT file
|
||||
deactivate xt
|
||||
@enduml
|
|
@ -0,0 +1,23 @@
|
|||
<!-- List of Include Files and folders and exception lists for this repo for GeoPolitical scanning. Contact Joerow for detail. -->
|
||||
<!-- Consult Global Readiness Notebook @ aka.ms/NExTGeoPol for further details -->
|
||||
<!-- This file is consumed by scripts in the 'health-localization' repo under the LocBuild\GeoPolitical folder(s) -->
|
||||
<!DOCTYPE varsdefined [
|
||||
<!ENTITY GitReposFolder "C:\GITs\Repos">
|
||||
<!ENTITY GitRepoName "Gateway">
|
||||
]>
|
||||
|
||||
<GeoPol_Folders>
|
||||
<!-- List of Folders to include for GeoPolitical scanning -->
|
||||
<GitRepoName>&GitRepoName;</GitRepoName>
|
||||
<Component Include="List here folders to Include in a GeoPol Scan">
|
||||
<!-- . means the entire repo -->
|
||||
<!-- Use back slash \ to indicate folder path e.g. C:\Temp\Git\ -->
|
||||
<IncludeFolder>.</IncludeFolder>
|
||||
</Component>
|
||||
<Component Exclude="List exceptions here to not be scanned, that have been included above">
|
||||
<!-- Make sure to consult http://aka.ms/NExtStart if excluding 3rd party or OSS components -->
|
||||
<!-- Use back slash \ to indicate folder path e.g. src\external\ -->
|
||||
<ExcludeFolder>.gitignore</ExcludeFolder>
|
||||
<ExcludeFolder>GeoPol.xml</ExcludeFolder>
|
||||
</Component>
|
||||
</GeoPol_Folders>
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:50fff99a192f9edcdabd8ff038eb9b435f61bc4ffcccb8f2a386e7e1e5984b9c
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ce46b92e238dbac3a216ee46b9ef33bb0627394d55f121a17fa551a1bd3cc469
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:34281b6bdea4caf78c2697f01f1905c959bd7ab7369ed4857d711cc51d6ae999
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:805e97d1fc2146fd1b2afe2673a3c9435d77a2465d0b20968067b4bd5638d29d
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:8670e931652af0a874946b926fd26d184a7223f57a52a9462149932991b67dcf
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:685a803e57d4c184706ade300a8168ec247aec008410a2ab853f2df2dbe69c79
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:0241f30d2e1bacacf20212d34f3845ea75b783e6012eec5f266ed449260a35e4
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bcf90239ba20f1983f8dd41eeb3c4f189b5e119326115835e26401e68394dad6
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c016e1925104e799cbea3d2878ecb9a7da2bfb2e11356a10d87fc3ca3ca74448
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d3bbd8d7827c32df6d993fbeec0e98b3809c12b1de9b875817cc98142962f88f
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6b85fc06be70ef858c8d53e3eb85f715f19cfcec25102a19cee77d97f900e276
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:24eec6223db0a2e65e8da9cb885a1e02e0f51141dad5d3519724826de5e8182c
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d6d8a912b6b1721c249144f294ec1b6ead149e843d29d820054894f51118f055
|
||||
size 526474
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:10d1123758ef1e55f1a16d6f6e0af188ebc55afc17d6e44479bb23bb2ce78fbf
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f2e460817f9edb30557fb270b807c682602f25817474f28e0e28c14ac54234d9
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bb0397440c0563af5993b67cde489fc4786eeb0c971efb4f5b1115f742a5a5bd
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:71486a9fd6f0bdff81c95b3b9b2569c226bd58449a68f0c136a6b9c701a93076
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:19dee5cc07449c6475dd6e9ea41fbbde1907baad932f350cee3cfc2932fe6f4c
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b0495371764b38fc9b556e8fa6d41ea2153f8d33b375933d74b42349b0b1a1f5
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d60a33d88c971c0eef7edccc57e6889b1a25308b0f62a98de5d8d66d25016fc2
|
||||
size 526478
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:64d39c799ebc9e9fc29a0a2f6dcf6a06fc650de3cc1ff8e2dfb7198e07598a3b
|
||||
size 526044
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:567fde5610490605118bbddf206f2dce8fe4eaabe9f0767c59be45e2e8b6e435
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3484c46aad0bbd9e42b1499bc69b30c10138aef75c68630e7c73b8b5895ed115
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b180028584029ceeda6c4fcd1113f0dacd05e8c1f3d8f55e6c142dcf2ff29047
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:64979b699d117b74fab0c36650d42361c71e1a706a913555e4b1dc825239722c
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1fc2cfc9f2586765ca38b9f693d0ff93acfd30f90e2f89f55d14da2cfe49d668
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:45ec592478ce37f75076de21b96b3d534d6aff39e40c8613cd78770e6538f965
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:29fe517282a6e78c389761331e27c23c1334004cec4eed9faab13251e656f2f5
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:d4a6fb0d1d34256cdeaca408de5d58163bf4323191ce01335eb8782bbaa1861d
|
||||
size 150730
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:036c969bed6550442b8eb92993b3041c2888584db0ba440686433d646ae892fa
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f4b8a4a72c61aa8f79235e1cfa48f079c69d249c648a5b679d030e110987bbfa
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ce4cc10c58ccb3500ce744341ef4291659119950483ae52115aedba4f1a9316c
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3a3772cd4a47c4719466d4c28899f29a3ed9c5a9b48b48809b49af277e097c8a
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f40c5a4b051322eb9b43a24b26cd620e1fa6d54d8ede7da78643583b2a13cd3b
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3c52c09ba22103b9f4c89116e990ff93b7fd45c58fa0a80ac3181f95fdaf0f43
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:131b552658886547667cb9693e5163f32852dd90be9a16387051903eba654c84
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5513a440245c4ac180c61b8b13d4b67be8cf275acdb5c80620c9840a2de55b15
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6f3d79a22dab1f7022f5aec471de9939193b44adbebac7304477c4977e3ef2f5
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:109e22ee9fda4b86a77493a89b496b3577eb2ad60ba98229de53576fce71597a
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f4d73819684de029e03b534074169754f6e9ea70031f75e56dcba70959b0f2a9
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2f9f2ea65a9beb02d982190aaf03a82581dcaacb99eaaedc2c5511bea6b4a0cb
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4c28b9926924a0a3ce3278e199bb796ceed5b4cea34cd6043cea5f467f328499
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:14c862cfe8d62504fe2c52fda80a355c544df8a8b9272d9b41abd83478c9c4e7
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b6bdfe9d7b2e498ca4d515a8842b253da95a41de31287d2b21592e8d3eb4e25e
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e7aad5900ce129243b1e2e204c880023061da2a571cc990e0acf99a17f59a8d6
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9b92f215847c1d9afbb9758325539f799c3c30165c4d73d9f05030bf93c5be18
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bff8bb3367c76a493c60d7d1165db7a3a85e10be1312e082ea656e09f9b003bb
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:53b38404d3e048fa76d0bfb98dfb7e8fc150f231d9de37d32aa431e9e0f7901f
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e2dfc19d8d7be630e924a5e84fdb83818976240610c0de4ea899d76342612481
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c88ba0663232d13b3c2327a6b4211c74c83ac8ef84e75132547fb62d6d76f3f9
|
||||
size 150720
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:986964ae0e2e4549165716ae0b61c0027ec1c387961fc6a7fbad242a2824388e
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:73d16ad51f35c4b723f5e9c06447a7f90146ae3494624b117b299cfe2ea41cbf
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:55e98faac37af15cd1cb1343023ad9555b93fb2c946dcdb64cfce864d139e5df
|
||||
size 150730
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3b22259d183cfacdef0d1e2571456b078c07cd1a49fe8ac5ed024c80aee9f0b4
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e35564dc39c13bf4903edde3207446723f253b24a889af71b5a78e39efef1a10
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6e98977f36d2b51939160d282a6ecb350e8c8646fde405dfb0f2cd60e848e85e
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:611e4337963323d1c5f0efd80c43e63c40b52b078293a5c3521e5f501b2d9d18
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f6921f1f016e02c9fd029a4055ca00b5cd1eb9c2466279d0021f34173b7e75eb
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:c5a4c9b44ced9d2844938e386627a52dc2a52229b5bf947fc3a5b53947692adb
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:130ea08b7bde08b4ec3511e612af413ac823aabef999b756e82621fdbffc15ee
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e6cbf53909047517c1afda77aeee69734d134d22cb5c91a95fba9d6357d080c8
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a3e15178a4fb21f71dafdf1791eec5255ae201481a92e447e63b0442d8433f95
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e2f18ef59a3377f791866b5b63e1e60631a5db9c7777ed611ec08e6ea857a77f
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:00d73ab54cceb6a2c8e518c61fdf64e1814bfcbc1db70f4bf17052e080947a52
|
||||
size 150730
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3d37b52c31a7e53bcb563f4d4cefc37d6e7f6ddd2e731ca8b821a41cfb451112
|
||||
size 150730
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e59139d5e805782cd664e8684986d8e875138616753b96f3115dbd8d8bc52d04
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:915328cdbabb0d4d1b709620bd9cf4e3b9a6d91a0374097423e14a5845ca4f32
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:94f7e9a9558e2778acab95feb059f5e84d9a04a975a1d5a771ca1512b98496a5
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:6449ccf9319c70c6e93b59a2578a4fea18942ff15e7574d18b073b49a80f64ca
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ac1efeb4d3ec9ac4c82b16dcc5fc8c9e9c1b6c6ede6ad5281f16083c70d8f8f3
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2dc7d1d03db10130cb053559cc5e7017514bdb6f57ad46283bb7733a89075a8b
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:3b3b3f777e8e3b4fe33ae6ca066eb0967cabd110bc3bfb924f879180634f8ee0
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:85f24ac9e0f55c397b4d2708ff7b686a287dc46c2ec506b0a7ca7e8899391f3f
|
||||
size 150726
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9edfd30f552172560e6f11f68cb120aa1a1b80d1022342c986caa1abaa088d99
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:39e26c9b62fee9c588b65cfcb13028a602501869586e34c845952215fba31e36
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b06e81e69cd27af0adb281563f5f8485d4f8ad5b8262d23a622e8ea0012b1326
|
||||
size 150732
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9eb5f6cdb17b699188e5aac5e310f680c46557e74d7fe52e2dd850775a6080eb
|
||||
size 150724
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:1c60581abe1f45bd1b92c021aecf4a1de9c65efbdcd9189bea4021ee8e8168e6
|
||||
size 150724
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче