Fix: Test Coverage on Azure Pipeline CI (#348)

* Added build.sh

* Fixed tests

* Added pipeline yml

* Fixed pipeline

* Removed

* Passing in args to script

* Added container image

* Fixed args

* Fixed args

* Fixed args

* Removed last script

* Added test management

* Added job dependency

* Moved package installation to build.sh

* fixed

* Added job dependency

* Added RProfile site

* Fixed REnvironment

* Added Rprofile

* Added ~/

* Fixing tests

* fixed method name

* Renaming samples & tests

* renamed more tests

* Fixed lint

* Removed other tests

* Fixed directory call for testthat

* Added typo and remove sample

* Fixed pipeline pointer
This commit is contained in:
Brian Hoang 2019-02-13 21:36:40 -08:00 коммит произвёл GitHub
Родитель 372e382048
Коммит 975858072e
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
31 изменённых файлов: 259 добавлений и 148 удалений

41
.vsts/pipeline.yml Normal file
Просмотреть файл

@ -0,0 +1,41 @@
name: $(Build.SourceBranch)$(Rev:.r)
trigger:
- master
resources:
containers:
- container: linux
image: ubuntu:16.04
jobs:
- job: Build
displayName: Build Job
condition: succeeded()
pool:
vmImage: 'ubuntu-16.04'
steps:
- task: ShellScript@2
displayName: Build
inputs:
scriptPath: 'tests/test_scripts/build.sh'
- script: |
touch ~/.Rprofile
echo "Sys.setenv(BATCH_ACCOUNT_NAME ='"$(BATCH_ACCOUNT_NAME)"');" >> ~/.Rprofile
echo "Sys.setenv(BATCH_ACCOUNT_KEY ='"$(BATCH_ACCOUNT_KEY)"');" >> ~/.Rprofile
echo "Sys.setenv(BATCH_ACCOUNT_URL ='"$(BATCH_ACCOUNT_URL)"');" >> ~/.Rprofile
echo "Sys.setenv(STORAGE_ACCOUNT_NAME ='"$(STORAGE_ACCOUNT_NAME)"');" >> ~/.Rprofile
echo "Sys.setenv(STORAGE_ACCOUNT_KEY ='"$(STORAGE_ACCOUNT_KEY)"');" >> ~/.Rprofile
sudo R \
-e "getwd()" \
-e "devtools::install()" \
-e "devtools::build()" \
-e "doAzureParallel::generateCredentialsConfig('test_credentials.json', batchAccountName = Sys.getenv('BATCH_ACCOUNT_NAME'), batchAccountKey = Sys.getenv('BATCH_ACCOUNT_KEY'), batchAccountUrl = Sys.getenv('BATCH_ACCOUNT_URL'), storageAccountName = Sys.getenv('STORAGE_ACCOUNT_NAME'), storageAccountKey = Sys.getenv('STORAGE_ACCOUNT_KEY'))"
condition: succeeded()
displayName: Create R Profile Environment Setting
- task: ShellScript@2
displayName: Run Unit Tests
inputs:
scriptPath: 'tests/testthat/unit_tests/unit_tests.sh'

Просмотреть файл

Просмотреть файл

@ -17,10 +17,10 @@ generateCredentialsConfig("credentials.json")
setCredentials("credentials.json")
# generate cluster config json file
generateClusterConfig("cluster-caret.json")
generateClusterConfig("caret_cluster.json")
# Creating an Azure parallel backend
cluster <- makeCluster(cluster = "cluster-caret.json")
cluster <- makeCluster("caret_cluster.json")
# Register your Azure parallel backend to the foreach implementation
registerDoAzureParallel(cluster)
@ -33,7 +33,7 @@ registerDoAzureParallel(cluster)
# https://topepo.github.io/caret/index.html
library(caret)
# Set your chunk size of your tasks to 8
# Set the chunk size of your tasks to 8
# So that caret knows in group tasks into larger chunks
setChunkSize(8)
@ -41,10 +41,10 @@ setChunkSize(8)
install.packages("DAAG")
library(DAAG)
# 'spam7' is a data set that consists of 4601 email items,
# of which 1813 items were identified as spam. This sample
# has 7 features, one of which is titled 'yesno'. In this
# example, we will be classifying our data into 'yesno' to
# 'spam7' is a data set that consists of 4601 email items,
# of which 1813 items were identified as spam. This sample
# has 7 features, one of which is titled 'yesno'. In this
# example, we will be classifying our data into 'yesno' to
# identify which rows are spam, and which are not.
# split the data into training and testing
@ -53,7 +53,7 @@ inTraining <- createDataPartition(spam7$yesno, p = .75, list = FALSE)
training <- spam7[ inTraining,]
testing <- spam7[-inTraining,]
# Define the settings for the cv. Because we have already
# Define the settings for the cv. Because we have already
# registered our parallel backend, Caret will know to use it
fitControl <- trainControl(## 10-fold cross validation
method = "repeatedcv",
@ -68,9 +68,9 @@ fitControl <- trainControl(## 10-fold cross validation
rf_fit <- train(## classification column
yesno ~ .,
yesno ~ .,
## dataframe to train on
data = training,
data = training,
## model to use - other models are also available (see caret documentation)
method = "rf",
## the metric to use for evaluation

Просмотреть файл

@ -1,5 +1,5 @@
{
"name": "package_management",
"name": "bioconductor_pool",
"vmSize": "Standard_A2_v2",
"maxTasksPerNode": 1,
"poolSize": {

Просмотреть файл

@ -0,0 +1,13 @@
#!/bin/bash
sudo echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" | sudo tee -a /etc/apt/sources.list
gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9
gpg -a --export E084DAB9 | sudo apt-key add -
sudo apt-get update
sudo apt-get install -y r-base r-base-dev libcurl4-openssl-dev
sudo apt-get install -y libssl-dev libxml2-dev libgdal-dev libproj-dev libgsl-dev
sudo R \
-e "getwd();" \
-e "install.packages(c('devtools', 'remotes', 'testthat', 'roxygen2'));"

Просмотреть файл

@ -1,23 +0,0 @@
#!/bin/bash
sudo echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" | sudo tee -a /etc/apt/sources.list
gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9
gpg -a --export E084DAB9 | sudo apt-key add -
sudo apt-get update
sudo apt-get install -y r-base r-base-dev libcurl4-openssl-dev
sudo apt-get install -y libssl-dev libxml2-dev libgdal-dev libproj-dev libgsl-dev
sudo R \
-e "Sys.setenv(BATCH_ACCOUNT_NAME = '$BATCH_ACCOUNT_NAME')" \
-e "Sys.setenv(BATCH_ACCOUNT_KEY = '$BATCH_ACCOUNT_KEY')" \
-e "Sys.setenv(BATCH_ACCOUNT_URL = '$BATCH_ACCOUNT_URL')" \
-e "Sys.setenv(STORAGE_ACCOUNT_NAME = '$STORAGE_ACCOUNT_NAME')" \
-e "Sys.setenv(STORAGE_ACCOUNT_KEY = '$STORAGE_ACCOUNT_KEY')" \
-e "getwd();" \
-e "install.packages(c('devtools', 'remotes', 'testthat', 'roxygen2'));" \
-e "devtools::install();" \
-e "devtools::build();" \
-e "res <- devtools::test(reporter='summary');" \
-e "df <- as.data.frame(res);" \
-e "if(sum(df[['failed']]) > 0 || any(df[['error']])) { q(status=1) }"

Просмотреть файл

@ -0,0 +1,81 @@
context("Cluster Management Test")
test_that("Create Cluster Test", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
cluster <-
doAzureParallel::makeCluster(settings$clusterConfig, wait = FALSE)
cluster <- getCluster(cluster$poolId)
clusterList <- getClusterList()
filter <- list()
filter$state <- c("active", "deleting")
testthat::expect_true('test-pool' %in% clusterList$Id)
})
test_that("Get Cluster Test", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
cluster <-
doAzureParallel::makeCluster(settings$clusterConfig, wait = FALSE)
cluster <- getCluster(cluster$poolId)
clusterList <- getClusterList()
filter <- list()
filter$state <- c("active", "deleting")
testthat::expect_true('test-pool' %in% clusterList$Id)
clusterList <- getClusterList(filter)
for (i in 1:length(clusterList$State)) {
testthat::expect_true(clusterList$State[i] == 'active' ||
clusterList$State[i] == 'deleting')
}
})
test_that("Autoscale Cluster Test", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
cluster <-
doAzureParallel::makeCluster(settings$clusterConfig, wait = FALSE)
cluster <- getCluster(cluster$poolId)
clusterList <- getClusterList()
filter <- list()
filter$state <- c("active", "deleting")
testthat::expect_true('test-pool' %in% clusterList$Id)
clusterList <- getClusterList(filter)
for (i in 1:length(clusterList$State)) {
testthat::expect_true(clusterList$State[i] == 'active' ||
clusterList$State[i] == 'deleting')
}
})
test_that("Delete Cluster Test", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
cluster <-
doAzureParallel::makeCluster(settings$clusterConfig, wait = FALSE)
doAzureParallel::stopCluster(cluster)
testthat::expect_true('test-pool' %in% clusterList$Id)
clusterList <- getClusterList(filter)
})

Просмотреть файл

@ -4,9 +4,6 @@ test_that("Remove error handling with combine test", {
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
@ -31,9 +28,6 @@ test_that("Remove error handling test", {
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
settings$clusterConfig$poolId <- "error-handling-test"
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
@ -58,9 +52,6 @@ test_that("Pass error handling test", {
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
settings$clusterConfig$poolId <- "error-handling-test"
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
@ -82,14 +73,10 @@ test_that("Pass error handling test", {
})
test_that("Stop error handling test", {
testthat::skip("Manual Test")
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
settings$clusterConfig$poolId <- "error-handling-test"
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
@ -99,11 +86,7 @@ test_that("Stop error handling test", {
testthat::expect_error(
res <-
foreach::foreach(i = 1:4, .errorhandling = "stop") %dopar% {
if (i == 2) {
randomObject
}
i
randomObject
}
)
})

Просмотреть файл

@ -1,12 +1,31 @@
context("foreach options test")
context("Integration Test")
# Run this test for users to make sure the core features
# of doAzureParallel are still working
test_that("simple foreach 1 to 4", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
doAzureParallel::registerDoAzureParallel(cluster)
'%dopar%' <- foreach::'%dopar%'
res <-
foreach::foreach(i = 1:4) %dopar% {
i
}
res <- unname(res)
testthat::expect_equal(length(res), 4)
testthat::expect_equal(res, list(1, 2, 3, 4))
})
context("Foreach Options Integration Test")
test_that("chunksize", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)

Просмотреть файл

@ -5,12 +5,42 @@ test_that("merge job result locally test", {
testthat::skip_on_travis()
testthat::skip("Skipping merge job locally")
source("utility.R")
settings <- gettingSettings()
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
setChunkSize(2)
'%dopar%' <- foreach::'%dopar%'
jobId <-
foreach::foreach(
i = 1:11,
.errorhandling = "pass",
.options.azure = list(
enableCloudCombine = FALSE,
wait = FALSE
)
) %dopar% {
i
}
res <- getJobResult(jobId)
testthat::expect_equal(length(res),
10)
for (i in 1:10) {
testthat::expect_equal(res[[i]],
i)
}
})
test_that("merge job result locally test", {
testthat::skip_on_travis()
testthat::skip("Skipping merge job locally")
source("utility.R")
settings <- getSettings()
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)

Просмотреть файл

@ -1,27 +0,0 @@
context("Cluster Management Test")
test_that("Get Cluster List / Get Cluster test", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
cluster <-
doAzureParallel::makeCluster(settings$clusterConfig, wait = FALSE)
cluster <- getCluster(cluster$poolId)
clusterList <- getClusterList()
filter <- list()
filter$state <- c("active", "deleting")
testthat::expect_true('test-pool' %in% clusterList$Id)
clusterList <- getClusterList(filter)
for (i in 1:length(clusterList$State)) {
testthat::expect_true(clusterList$State[i] == 'active' ||
clusterList$State[i] == 'deleting')
}
})

Просмотреть файл

@ -1,26 +0,0 @@
context("Integration Test")
# Run this test for users to make sure the core features
# of doAzureParallel are still working
test_that("simple foreach 1 to 4", {
testthat::skip_on_travis()
source("utility.R")
settings <- getSettings()
# set your credentials
doAzureParallel::setCredentials(settings$credentials)
cluster <- doAzureParallel::makeCluster(settings$clusterConfig)
doAzureParallel::registerDoAzureParallel(cluster)
'%dopar%' <- foreach::'%dopar%'
res <-
foreach::foreach(i = 1:4) %dopar% {
i
}
res <- unname(res)
testthat::expect_equal(length(res), 4)
testthat::expect_equal(res, list(1, 2, 3, 4))
})

Просмотреть файл

@ -1,6 +1,6 @@
context("validating cluster config")
test_that("validating a cluster config file with null pool property", {
test_that("generateClusterConfig_NullPoolValue_Success", {
clusterConfig <- "badcluster.json"
generateClusterConfig(clusterConfig)
@ -11,7 +11,7 @@ test_that("validating a cluster config file with null pool property", {
on.exit(file.remove(clusterConfig))
})
test_that("validating a cluster config file with bad autoscale formula property", {
test_that("generateClusterConfig_BadAutoscaleFormula_Failed", {
clusterConfig <- "badcluster.json"
generateClusterConfig(clusterConfig)
@ -27,7 +27,7 @@ test_that("validating a cluster config file with bad autoscale formula property"
})
test_that("validating a cluster config file with incorrect data types", {
test_that("generateClusterConfig_InvalidDataTypes_Failed", {
clusterConfig <- "badcluster.json"
generateClusterConfig(clusterConfig)
@ -43,7 +43,7 @@ test_that("validating a cluster config file with incorrect data types", {
on.exit(file.remove(clusterConfig))
})
test_that("validating a cluster config file with null values", {
test_that("generateClusterConfig_NullValues_Failed", {
clusterConfig <- "nullcluster.json"
generateClusterConfig(clusterConfig)

Просмотреть файл

@ -1,12 +1,12 @@
context("linux wrap commands")
test_that("single command on command line", {
test_that("linuxWrapCommands_SingleCommand_Success", {
commandLine <- linuxWrapCommands("ls")
expect_equal(commandLine, "/bin/bash -c \"set -e; set -o pipefail; ls; wait\"")
})
test_that("multiple commands on command line", {
test_that("linuxWrapCommands_MultipleCommand_Success", {
commands <- c("ls", "echo \"hello\"", "cp origfile newfile")
commandLine <- linuxWrapCommands(commands)

Просмотреть файл

@ -1,6 +1,6 @@
context("creating output files")
test_that("verify output file properties", {
test_that("createOutputFile_FileProperties_Success", {
fakeUrl <-
"https://accountname.blob.core.windows.net/outputs?se=2017-07-31&sr=c&st=2017-07-12"
@ -12,7 +12,7 @@ test_that("verify output file properties", {
})
test_that("create output file with null path", {
test_that("createOutputFile_NullValue_Success", {
fakeUrl <-
"https://accountname.blob.core.windows.net/outputs?se=2017-07-31&sr=c&st=2017-07-12"
@ -25,7 +25,7 @@ test_that("create output file with null path", {
)
})
test_that("create output file with multiple virtual directories", {
test_that("createOutputFile_MultipleVirtualDirectories_Success", {
fakeUrl <-
"https://accountname.blob.core.windows.net/outputs/foo/baz/bar?se=2017-07-31&sr=c&st=2017-07-12"

Просмотреть файл

@ -1,5 +1,5 @@
context("Package Command Line Tests")
test_that("successfully create cran job package command line", {
test_that("getJobPackageInstallationCommand_Cran_Success", {
jobInstallation <-
getJobPackageInstallationCommand("cran", c("hts", "lubridate", "tidyr", "dplyr"))
expect_equal(
@ -8,7 +8,7 @@ test_that("successfully create cran job package command line", {
)
})
test_that("successfully create github job package command line", {
test_that("getJobPackageInstallationCommand_Github_Success", {
jobInstallation <-
getJobPackageInstallationCommand("github", c("Azure/doAzureParallel", "Azure/rAzureBatch"))
expect_equal(
@ -17,7 +17,7 @@ test_that("successfully create github job package command line", {
)
})
test_that("successfully create cran pool package command line", {
test_that("getPoolPackageInstallationCommand_Cran_Success", {
poolInstallation <-
getPoolPackageInstallationCommand("cran", c("hts", "lubridate", "tidyr"))
expect_equal(length(poolInstallation), 1)
@ -36,7 +36,7 @@ test_that("successfully create cran pool package command line", {
expect_equal(poolInstallation, expected)
})
test_that("successfully create github pool package command line", {
test_that("getPoolPackageInstallationCommand_Github_Success", {
poolInstallation <-
getPoolPackageInstallationCommand("github", c("Azure/doAzureParallel", "Azure/rAzureBatch"))
expect_equal(length(poolInstallation), 1)
@ -55,7 +55,7 @@ test_that("successfully create github pool package command line", {
expect_equal(poolInstallation, expected)
})
test_that("successfully create bioconductor pool package command line", {
test_that("getPoolPackageInstallationCommand_Bioconductor_Success", {
poolInstallation <-
getPoolPackageInstallationCommand("bioconductor", c("IRanges", "a4"))

Просмотреть файл

@ -1,7 +1,7 @@
# Run this test for users to make sure the set credentials from json or R object features
# of doAzureParallel are still working
context("set credentials from R object scenario test")
test_that("set credentials/cluster config programmatically scenario test", {
test_that("setCredentials_Sdk_Success", {
testthat::skip("Live test")
testthat::skip_on_travis()
@ -47,7 +47,7 @@ test_that("set credentials/cluster config programmatically scenario test", {
validation$isValidClusterConfig(clusterConfig)
})
test_that("set credentials/cluster config from Json file scenario test", {
test_that("SetCredentials_Json_Success", {
testthat::skip("Live test")
testthat::skip_on_travis()

Просмотреть файл

Просмотреть файл

@ -0,0 +1,10 @@
#!/bin/bash
sudo R \
-e "getwd();" \
-e "devtools::install();" \
-e "devtools::build();" \
-e "devtools::load_all();" \
-e "res <- testthat::test_dir('.', reporter='summary');" \
-e "df <- as.data.frame(res);" \
-e "if(sum(df[['failed']]) > 0 || any(df[['error']])) { q(status=1) }"

Просмотреть файл

@ -3,25 +3,7 @@ getSettings <- function(dedicatedMin = 0,
lowPriorityMin = 0,
lowPriorityMax = 2,
poolName = "test-pool"){
list(
credentials = list(
"sharedKey" = list(
"batchAccount" = list(
"name" = Sys.getenv("BATCH_ACCOUNT_NAME"),
"key" = Sys.getenv("BATCH_ACCOUNT_KEY"),
"url" = Sys.getenv("BATCH_ACCOUNT_URL")
),
"storageAccount" = list(
"name" = Sys.getenv("STORAGE_ACCOUNT_NAME"),
"key" = Sys.getenv("STORAGE_ACCOUNT_KEY"),
"endpointSuffix" = "core.windows.net"
)
),
"githubAuthenticationToken" = "",
"dockerAuthentication" = list("username" = "",
"password" = "",
"registry" = "")
),
settings <- list(
clusterConfig = list(
"name" = poolName,
"vmSize" = "Standard_D2_v2",
@ -46,4 +28,32 @@ getSettings <- function(dedicatedMin = 0,
"commandLine" = list()
)
)
if (file.exists('test_credentials.json')) {
doAzureParallel::setCredentials("test_credentials.json")
}
else{
settings['credentials'] <- list(
"sharedKey" = list(
"batchAccount" = list(
"name" = Sys.getenv("BATCH_ACCOUNT_NAME"),
"key" = Sys.getenv("BATCH_ACCOUNT_KEY"),
"url" = Sys.getenv("BATCH_ACCOUNT_URL")
),
"storageAccount" = list(
"name" = Sys.getenv("STORAGE_ACCOUNT_NAME"),
"key" = Sys.getenv("STORAGE_ACCOUNT_KEY"),
"endpointSuffix" = "core.windows.net"
)
),
"githubAuthenticationToken" = "",
"dockerAuthentication" = list("username" = "",
"password" = "",
"registry" = "")
)
doAzureParallel::setCredentials(settings$credentials)
}
return(settings)
}