зеркало из https://github.com/microsoft/lisa.git
Merge pull request #2 from iamshital/master
First working commit of new automation structure
This commit is contained in:
Коммит
afde792af5
|
@ -0,0 +1,8 @@
|
|||
|
||||
XML/AzureSecrets.xml
|
||||
TestResults*
|
||||
iamshital.code-workspace
|
||||
1
|
||||
TestConfiguration.xml
|
||||
Temp*
|
||||
report*
|
|
@ -0,0 +1,245 @@
|
|||
##############################################################################################
|
||||
# AzureAutomationManager.ps1
|
||||
# Description : This script manages all the setup and test operations in Azure environemnt.
|
||||
# It is an entry script of Azure Automation
|
||||
# Operations :
|
||||
# - Installing AzureSDK
|
||||
# - VHD preparation : Installing packages required by ICA, LIS drivers and waagent
|
||||
# - Uplaoding test VHD to cloud
|
||||
# - Invokes azure test suite
|
||||
## Author : v-shisav@microsoft.com
|
||||
## Author : v-ampaw@microsoft.com
|
||||
###############################################################################################
|
||||
param (
|
||||
[CmdletBinding()]
|
||||
[string] $xmlConfigFile,
|
||||
[switch] $eMail,
|
||||
[string] $logFilename="azure_ica.log",
|
||||
[switch] $runtests, [switch]$onCloud,
|
||||
[switch] $vhdprep,
|
||||
[switch] $upload,
|
||||
[switch] $help,
|
||||
[string] $RGIdentifier,
|
||||
[string] $cycleName,
|
||||
[string] $RunSelectedTests,
|
||||
[string] $TestPriority,
|
||||
[string] $osImage,
|
||||
[switch] $EconomyMode,
|
||||
[switch] $keepReproInact,
|
||||
[string] $DebugDistro,
|
||||
[switch] $UseAzureResourceManager,
|
||||
[string] $OverrideVMSize,
|
||||
[switch] $EnableAcceleratedNetworking,
|
||||
[string] $customKernel,
|
||||
[string] $customLIS,
|
||||
[string] $customLISBranch,
|
||||
[string] $resizeVMsAfterDeployment,
|
||||
[string] $ExistingResourceGroup,
|
||||
[switch] $CleanupExistingRG,
|
||||
|
||||
# Experimental Feature
|
||||
[switch] $UseManagedDisks,
|
||||
|
||||
[int] $coureCountExceededTimeout = 3600,
|
||||
[int] $testIterations = 1,
|
||||
[string] $tipSessionId="",
|
||||
[string] $tipCluster="",
|
||||
[switch] $ForceDeleteResources
|
||||
)
|
||||
Get-ChildItem .\Libraries -Recurse | Where-Object { $_.FullName.EndsWith(".psm1") } | ForEach-Object { Import-Module $_.FullName -Force -Global}
|
||||
|
||||
$xmlConfig = [xml](Get-Content $xmlConfigFile)
|
||||
$user = $xmlConfig.config.Azure.Deployment.Data.UserName
|
||||
$password = $xmlConfig.config.Azure.Deployment.Data.Password
|
||||
$sshKey = $xmlConfig.config.Azure.Deployment.Data.sshKey
|
||||
$sshPublickey = $xmlConfig.config.Azure.Deployment.Data.sshPublicKey
|
||||
|
||||
Set-Variable -Name user -Value $user -Scope Global
|
||||
Set-Variable -Name password -Value $password -Scope Global
|
||||
Set-Variable -Name sshKey -Value $sshKey -Scope Global
|
||||
Set-Variable -Name sshPublicKey -Value $sshPublicKey -Scope Global
|
||||
Set-Variable -Name sshPublicKeyThumbprint -Value $sshPublicKeyThumbprint -Scope Global
|
||||
Set-Variable -Name PublicConfiguration -Value @() -Scope Global
|
||||
Set-Variable -Name PrivateConfiguration -Value @() -Scope Global
|
||||
Set-Variable -Name CurrentTestData -Value $CurrentTestData -Scope Global
|
||||
Set-Variable -Name preserveKeyword -Value "preserving" -Scope Global
|
||||
Set-Variable -Name tipSessionId -Value $tipSessionId -Scope Global
|
||||
Set-Variable -Name tipCluster -Value $tipCluster -Scope Global
|
||||
|
||||
Set-Variable -Name global4digitRandom -Value $(Get-Random -SetSeed $(Get-Random) -Maximum 9999 -Minimum 1111) -Scope Global
|
||||
Set-Variable -Name coureCountExceededTimeout -Value $coureCountExceededTimeout -Scope Global
|
||||
|
||||
if($EnableAcceleratedNetworking)
|
||||
{
|
||||
Set-Variable -Name EnableAcceleratedNetworking -Value $true -Scope Global
|
||||
}
|
||||
|
||||
if($ForceDeleteResources)
|
||||
{
|
||||
Set-Variable -Name ForceDeleteResources -Value $true -Scope Global
|
||||
}
|
||||
if($resizeVMsAfterDeployment)
|
||||
{
|
||||
Set-Variable -Name resizeVMsAfterDeployment -Value $resizeVMsAfterDeployment -Scope Global
|
||||
}
|
||||
|
||||
if ( $OverrideVMSize )
|
||||
{
|
||||
Set-Variable -Name OverrideVMSize -Value $OverrideVMSize -Scope Global
|
||||
}
|
||||
if ( $customKernel )
|
||||
{
|
||||
Set-Variable -Name customKernel -Value $customKernel -Scope Global
|
||||
}
|
||||
if ( $customLIS )
|
||||
{
|
||||
Set-Variable -Name customLIS -Value $customLIS -Scope Global
|
||||
}
|
||||
if ( $customLISBranch )
|
||||
{
|
||||
Set-Variable -Name customLISBranch -Value $customLISBranch -Scope Global
|
||||
}
|
||||
if ( $RunSelectedTests )
|
||||
{
|
||||
Set-Variable -Name RunSelectedTests -Value $RunSelectedTests -Scope Global
|
||||
}
|
||||
if ($ExistingResourceGroup)
|
||||
{
|
||||
Set-Variable -Name ExistingRG -Value $ExistingResourceGroup -Scope Global
|
||||
}
|
||||
if ($CleanupExistingRG)
|
||||
{
|
||||
Set-Variable -Name CleanupExistingRG -Value $true -Scope Global
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name CleanupExistingRG -Value $false -Scope Global
|
||||
}
|
||||
if ($UseManagedDisks)
|
||||
{
|
||||
Set-Variable -Name UseManagedDisks -Value $true -Scope Global
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name UseManagedDisks -Value $false -Scope Global
|
||||
}
|
||||
|
||||
if ( $xmlConfig.config.Azure.General.ARMStorageAccount -imatch "NewStorage_" )
|
||||
{
|
||||
$NewARMStorageAccountType = ($xmlConfig.config.Azure.General.ARMStorageAccount).Replace("NewStorage_","")
|
||||
Set-Variable -Name NewARMStorageAccountType -Value $NewARMStorageAccountType -Scope Global
|
||||
}
|
||||
try
|
||||
{
|
||||
$Platform = $xmlConfig.config.CurrentTestPlatform
|
||||
|
||||
if ( $Platform -eq "Azure" )
|
||||
{
|
||||
$testResults = "TestResults"
|
||||
if (! (test-path $testResults))
|
||||
{
|
||||
mkdir $testResults | out-null
|
||||
}
|
||||
$testStartTime = [DateTime]::Now.ToUniversalTime()
|
||||
Set-Variable -Name testStartTime -Value $testStartTime -Scope Global
|
||||
$testDir = $testResults + "\" + $cycleName + "-" + $testStartTime.ToString("yyyyMMddHHmmssff")
|
||||
mkdir $testDir -ErrorAction SilentlyContinue | out-null
|
||||
Set-Content -Value "" -Path .\report\testSummary.html -Force -ErrorAction SilentlyContinue | Out-Null
|
||||
Set-Content -Value "" -Path .\report\AdditionalInfo.html -Force -ErrorAction SilentlyContinue | Out-Null
|
||||
$logFile = $testDir + "\" + "AzureLogs.txt"
|
||||
Set-Variable -Name logfile -Value $logFile -Scope Global
|
||||
Set-Content -Path .\report\lastLogDirectory.txt -Value $testDir -ErrorAction SilentlyContinue
|
||||
Set-Variable -Name Distro -Value $RGIdentifier -Scope Global
|
||||
Set-Variable -Name onCloud -Value $onCloud -Scope Global
|
||||
Set-Variable -Name xmlConfig -Value $xmlConfig -Scope Global
|
||||
Set-Content -Path .\report\lastLogDirectory.txt -Value $testDir -ErrorAction SilentlyContinue
|
||||
Set-Variable -Name vnetIsAllConfigured -Value $false -Scope Global
|
||||
if($EconomyMode)
|
||||
{
|
||||
Set-Variable -Name EconomyMode -Value $true -Scope Global
|
||||
if($keepReproInact)
|
||||
{
|
||||
Set-Variable -Name keepReproInact -Value $true -Scope Global
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name EconomyMode -Value $false -Scope Global
|
||||
if($keepReproInact)
|
||||
{
|
||||
Set-Variable -Name keepReproInact -Value $true -Scope Global
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name keepReproInact -Value $false -Scope Global
|
||||
}
|
||||
}
|
||||
$AzureSetup = $xmlConfig.config.Azure.General
|
||||
LogMsg ("Info : AzureAutomationManager.ps1 - LIS on Azure Automation")
|
||||
LogMsg ("Info : Created test results directory:", $testDir)
|
||||
LogMsg ("Info : Logfile = ", $logfile)
|
||||
LogMsg ("Info : Using config file $xmlConfigFile")
|
||||
if ( ( $xmlConfig.config.Azure.General.ARMStorageAccount -imatch "ExistingStorage" ) -or ($xmlConfig.config.Azure.General.StorageAccount -imatch "ExistingStorage" ) )
|
||||
{
|
||||
$regionName = $xmlConfig.config.Azure.General.Location.Replace(" ","").Replace('"',"").ToLower()
|
||||
$regionStorageMapping = [xml](Get-Content .\XML\RegionAndStorageAccounts.xml)
|
||||
|
||||
if ( $xmlConfig.config.Azure.General.ARMStorageAccount -imatch "standard")
|
||||
{
|
||||
$xmlConfig.config.Azure.General.ARMStorageAccount = $regionStorageMapping.AllRegions.$regionName.StandardStorage
|
||||
LogMsg "Info : Selecting existing standard storage account in $regionName - $($regionStorageMapping.AllRegions.$regionName.StandardStorage)"
|
||||
}
|
||||
if ( $xmlConfig.config.Azure.General.ARMStorageAccount -imatch "premium")
|
||||
{
|
||||
$xmlConfig.config.Azure.General.ARMStorageAccount = $regionStorageMapping.AllRegions.$regionName.PremiumStorage
|
||||
LogMsg "Info : Selecting existing premium storage account in $regionName - $($regionStorageMapping.AllRegions.$regionName.PremiumStorage)"
|
||||
}
|
||||
}
|
||||
Set-Variable -Name UseAzureResourceManager -Value $true -Scope Global
|
||||
$SelectedSubscription = RetryOperation -operation { Select-AzureRmSubscription -SubscriptionId $AzureSetup.SubscriptionID } -maxRetryCount 5 -description "Selecting subscription..."
|
||||
$subIDSplitted = ($SelectedSubscription.Subscription.SubscriptionId).Split("-")
|
||||
$userIDSplitted = ($SelectedSubscription.Account.Id).Split("-")
|
||||
LogMsg "SubscriptionName : $($SelectedSubscription.Subscription.Name)"
|
||||
LogMsg "SubscriptionId : $($subIDSplitted[0])-xxxx-xxxx-xxxx-$($subIDSplitted[4])"
|
||||
LogMsg "User : $($userIDSplitted[0])-xxxx-xxxx-xxxx-$($userIDSplitted[4])"
|
||||
LogMsg "ServiceEndpoint : $($SelectedSubscription.Environment.ActiveDirectoryServiceEndpointResourceId)"
|
||||
LogMsg "CurrentStorageAccount : $($AzureSetup.ARMStorageAccount)"
|
||||
if($keepReproInact)
|
||||
{
|
||||
LogMsg "PLEASE NOTE: keepReproInact is set. VMs will not be deleted after test is finished even if, test gets PASS."
|
||||
}
|
||||
|
||||
if ($DebugDistro)
|
||||
{
|
||||
$OsImage = $xmlConfig.config.Azure.Deployment.Data.Distro | ? { $_.name -eq $DebugDistro} | % { $_.OsImage }
|
||||
Set-Variable -Name DebugOsImage -Value $OsImage -Scope Global
|
||||
}
|
||||
$testCycle = GetCurrentCycleData -xmlConfig $xmlConfig -cycleName $cycleName
|
||||
$testSuiteResultDetails=.\AzureTestSuite.ps1 $xmlConfig -Distro $Distro -cycleName $cycleName -testIterations $testIterations
|
||||
$logDirFilename = [System.IO.Path]::GetFilenameWithoutExtension($xmlConfigFile)
|
||||
$summaryAll = GetTestSummary -testCycle $testCycle -StartTime $testStartTime -xmlFileName $logDirFilename -distro $Distro -testSuiteResultDetails $testSuiteResultDetails
|
||||
$PlainTextSummary += $summaryAll[0]
|
||||
$HtmlTextSummary += $summaryAll[1]
|
||||
Set-Content -Value $HtmlTextSummary -Path .\report\testSummary.html -Force | Out-Null
|
||||
$PlainTextSummary = $PlainTextSummary.Replace("<br />", "`r`n")
|
||||
$PlainTextSummary = $PlainTextSummary.Replace("<pre>", "")
|
||||
$PlainTextSummary = $PlainTextSummary.Replace("</pre>", "")
|
||||
LogMsg "$PlainTextSummary"
|
||||
if($eMail)
|
||||
{
|
||||
SendEmail $xmlConfig -body $HtmlTextSummary
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogError "$Platform not supported."
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
ThrowException($_)
|
||||
}
|
||||
Finally
|
||||
{
|
||||
exit
|
||||
}
|
|
@ -0,0 +1,483 @@
|
|||
#v-shisav : STILL IN BETA VERSION
|
||||
|
||||
param($xmlConfig, [string] $Distro, [string] $cycleName, [int] $testIterations)
|
||||
Function RunTestsOnCycle ($cycleName , $xmlConfig, $Distro, $testIterations )
|
||||
{
|
||||
$StartTime = [Datetime]::Now.ToUniversalTime()
|
||||
LogMsg "Starting the Cycle - $($CycleName.ToUpper())"
|
||||
$executionCount = 0
|
||||
$dbEnvironment = "Azure"
|
||||
$dbTestCycle = $CycleName.Trim()
|
||||
$dbExecutionID = $dbDateTimeUTC = "$($StartTime.Year)-$($StartTime.Month)-$($StartTime.Day) $($StartTime.Hour):$($StartTime.Minute):$($StartTime.Second)"
|
||||
$dbLocation = ($xmlConfig.config.Azure.General.Location).Replace('"','').Replace(" ","").ToLower()
|
||||
$dbOverrideVMSize = $OverrideVMSize
|
||||
if ( $EnableAcceleratedNetworking )
|
||||
{
|
||||
$dbNetworking = "SRIOV"
|
||||
}
|
||||
else
|
||||
{
|
||||
$dbNetworking = "Synthetic"
|
||||
}
|
||||
foreach ( $tempDistro in $xmlConfig.config.Azure.Deployment.Data.Distro )
|
||||
{
|
||||
if ( ($tempDistro.Name).ToUpper() -eq ($Distro).ToUpper() )
|
||||
{
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
Write-Host $tempDistro.ARMImage
|
||||
if ( ($tempDistro.ARMImage.Publisher -ne $null) -and ($tempDistro.ARMImage.Offer -ne $null) -and ($tempDistro.ARMImage.Sku -ne $null) -and ($tempDistro.ARMImage.Version -ne $null) )
|
||||
{
|
||||
$ARMImage = $tempDistro.ARMImage
|
||||
Set-Variable -Name ARMImage -Value $ARMImage -Scope Global
|
||||
LogMsg "ARMImage name - $($ARMImage.Publisher) : $($ARMImage.Offer) : $($ARMImage.Sku) : $($ARMImage.Version)"
|
||||
$dbARMImage = "$($ARMImage.Publisher) $($ARMImage.Offer) $($ARMImage.Sku) $($ARMImage.Version)"
|
||||
}
|
||||
if ( $tempDistro.OsVHD )
|
||||
{
|
||||
$BaseOsVHD = $tempDistro.OsVHD.Trim()
|
||||
Set-Variable -Name BaseOsVHD -Value $BaseOsVHD -Scope Global
|
||||
LogMsg "Base VHD name - $BaseOsVHD"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( $tempDistro.OsImage )
|
||||
{
|
||||
$BaseOsImage = $tempDistro.OsImage.Trim()
|
||||
Set-Variable -Name BaseOsImage -Value $BaseOsImage -Scope Global
|
||||
LogMsg "Base image name - $BaseOsImage"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!$BaseOsImage -and !$UseAzureResourceManager)
|
||||
{
|
||||
Throw "Please give ImageName or OsVHD for ASM deployment."
|
||||
}
|
||||
if (!$($ARMImage.Publisher) -and !$BaseOSVHD -and $UseAzureResourceManager)
|
||||
{
|
||||
Throw "Please give ARM Image / VHD for ARM deployment."
|
||||
}
|
||||
|
||||
#If Base OS VHD is present in another storage account, then copy to test storage account first.
|
||||
if ($BaseOsVHD -imatch "/")
|
||||
{
|
||||
#Check if the test storage account is same as VHD's original storage account.
|
||||
$givenVHDStorageAccount = $BaseOsVHD.Replace("https://","").Replace("http://","").Split(".")[0]
|
||||
$ARMStorageAccount = $xmlConfig.config.Azure.General.ARMStorageAccount
|
||||
|
||||
if ($givenVHDStorageAccount -ne $ARMStorageAccount )
|
||||
{
|
||||
LogMsg "Your test VHD is not in target storage account ($ARMStorageAccount)."
|
||||
LogMsg "Your VHD will be copied to $ARMStorageAccount now."
|
||||
$sourceContainer = $BaseOsVHD.Split("/")[$BaseOsVHD.Split("/").Count - 2]
|
||||
$vhdName = $BaseOsVHD.Split("/")[$BaseOsVHD.Split("/").Count - 1]
|
||||
if ($ARMStorageAccount -inotmatch "NewStorage_")
|
||||
{
|
||||
$copyStatus = CopyVHDToAnotherStorageAccount -sourceStorageAccount $givenVHDStorageAccount -sourceStorageContainer $sourceContainer -destinationStorageAccount $ARMStorageAccount -destinationStorageContainer "vhds" -vhdName $vhdName
|
||||
if (!$copyStatus)
|
||||
{
|
||||
Throw "Failed to copy the VHD to $ARMStorageAccount"
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name BaseOsVHD -Value $vhdName -Scope Global
|
||||
LogMsg "New Base VHD name - $vhdName"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Automation only supports copying VHDs to existing storage account."
|
||||
}
|
||||
#Copy the VHD to current storage account.
|
||||
}
|
||||
}
|
||||
|
||||
LogMsg "Loading the cycle Data..."
|
||||
|
||||
$currentCycleData = GetCurrentCycleData -xmlConfig $xmlConfig -cycleName $cycleName
|
||||
|
||||
$xmlElementsToAdd = @("currentTest", "stateTimeStamp", "state", "emailSummary", "htmlSummary", "jobID", "testCaseResults")
|
||||
foreach($element in $xmlElementsToAdd)
|
||||
{
|
||||
if (! $testCycle.${element})
|
||||
{
|
||||
$newElement = $xmlConfig.CreateElement($element)
|
||||
$newElement.set_InnerText("")
|
||||
$results = $testCycle.AppendChild($newElement)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$testSuiteLogFile=$logFile
|
||||
$testSuiteResultDetails=@{"totalTc"=0;"totalPassTc"=0;"totalFailTc"=0;"totalAbortedTc"=0}
|
||||
$id = ""
|
||||
|
||||
# Start JUnit XML report logger.
|
||||
$reportFolder = "$pwd/report"
|
||||
if(!(Test-Path $reportFolder))
|
||||
{
|
||||
New-Item -ItemType "Directory" $reportFolder
|
||||
}
|
||||
StartLogReport("$reportFolder/report_$($testCycle.cycleName).xml")
|
||||
$testsuite = StartLogTestSuite "CloudTesting"
|
||||
|
||||
$testCount = $currentCycleData.test.Length
|
||||
if (-not $testCount)
|
||||
{
|
||||
$testCount = 1
|
||||
}
|
||||
|
||||
foreach ($test in $currentCycleData.test)
|
||||
{
|
||||
$originalTest = $test
|
||||
if (-not $test)
|
||||
{
|
||||
$test = $currentCycleData.test
|
||||
$originalTest = $test
|
||||
}
|
||||
if ($RunSelectedTests)
|
||||
{
|
||||
if ($RunSelectedTests.Trim().Replace(" ","").Split(",") -contains $test.Name)
|
||||
{
|
||||
$currentTestData = GetCurrentTestData -xmlConfig $xmlConfig -testName $test.Name
|
||||
$originalTestName = $currentTestData.testName
|
||||
if ( $currentTestData.AdditionalCustomization.Networking -eq "SRIOV" )
|
||||
{
|
||||
Set-Variable -Name EnableAcceleratedNetworking -Value $true -Scope Global
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Skipping $($test.Name) because it is not in selected tests to run."
|
||||
Continue;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$currentTestData = GetCurrentTestData -xmlConfig $xmlConfig -testName $test.Name
|
||||
$originalTestName = $currentTestData.testName
|
||||
}
|
||||
# Generate Unique Test
|
||||
for ( $testIterationCount = 1; $testIterationCount -le $testIterations; $testIterationCount ++ )
|
||||
{
|
||||
if ( $testIterations -ne 1 )
|
||||
{
|
||||
$currentTestData.testName = "$($originalTestName)-$testIterationCount"
|
||||
$test.Name = "$($originalTestName)-$testIterationCount"
|
||||
}
|
||||
$server = $xmlConfig.config.global.ServerEnv.Server
|
||||
$cluster = $xmlConfig.config.global.ClusterEnv.Cluster
|
||||
$rdosVersion = $xmlConfig.config.global.ClusterEnv.RDOSVersion
|
||||
$fabricVersion = $xmlConfig.config.global.ClusterEnv.FabricVersion
|
||||
$Location = $xmlConfig.config.global.ClusterEnv.Location
|
||||
$testId = $currentTestData.TestId
|
||||
$testSetup = $currentTestData.setupType
|
||||
$lisBuild = $xmlConfig.config.global.VMEnv.LISBuild
|
||||
$lisBuildBranch = $xmlConfig.config.global.VMEnv.LISBuildBranch
|
||||
$VMImageDetails = $xmlConfig.config.global.VMEnv.VMImageDetails
|
||||
$waagentBuild=$xmlConfig.config.global.VMEnv.waagentBuild
|
||||
# For the last test running in economy mode, set the IsLastCaseInCycle flag so that the deployments could be cleaned up
|
||||
if ($EconomyMode -and $counter -eq ($testCount - 1))
|
||||
{
|
||||
Set-Variable -Name IsLastCaseInCycle -Value $true -Scope Global
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name IsLastCaseInCycle -Value $false -Scope Global
|
||||
}
|
||||
if ($currentTestData)
|
||||
{
|
||||
|
||||
if (!( $currentTestData.Platform.Contains($xmlConfig.config.CurrentTestPlatform)))
|
||||
{
|
||||
LogMsg "$($currentTestData.testName) does not support $($xmlConfig.config.CurrentTestPlatform) platform."
|
||||
continue;
|
||||
}
|
||||
if(($testPriority -imatch $currentTestData.Priority ) -or (!$testPriority))
|
||||
{
|
||||
$testcase = StartLogTestCase $testsuite "$($test.Name)" "CloudTesting.$($testCycle.cycleName)"
|
||||
$testSuiteResultDetails.totalTc = $testSuiteResultDetails.totalTc +1
|
||||
$stopWatch = SetStopWatch
|
||||
Set-Variable -Name currentTestData -Value $currentTestData -Scope Global
|
||||
mkdir "$testDir\$($currentTestData.testName)" -ErrorAction SilentlyContinue | out-null
|
||||
$testCaseLogFile = $testDir + "\" + $($currentTestData.testName) + "\" + "azure_ica.log"
|
||||
$global:logFile = $testCaseLogFile
|
||||
Set-Content -Value "" -Path $testCaseLogFile -Force | Out-Null
|
||||
if ((!$currentTestData.SubtestValues -and !$currentTestData.TestMode))
|
||||
{
|
||||
#Tests With No subtests and no SubValues will be executed here..
|
||||
try
|
||||
{
|
||||
$testMode = "single"
|
||||
$testResult = ""
|
||||
$LogDir = "$testDir\$($currentTestData.testName)"
|
||||
Set-Variable -Name LogDir -Value $LogDir -Scope Global
|
||||
LogMsg "~~~~~~~~~~~~~~~TEST STARTED : $($currentTestData.testName)~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
$testScriptPs1 = $currentTestData.PowershellScript
|
||||
$startTime = [Datetime]::Now.ToUniversalTime()
|
||||
$command = ".\Testscripts\Windows\" + $testScriptPs1
|
||||
LogMsg "Starting test $($currentTestData.testName)"
|
||||
$testResult = Invoke-Expression $command
|
||||
}
|
||||
catch
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
finally
|
||||
{
|
||||
$executionCount += 1
|
||||
$testResult = RefineTestResult1 -tempResult $testResult
|
||||
$endTime = [Datetime]::Now.ToUniversalTime()
|
||||
$testRunDuration = GetStopWatchElapasedTime $stopWatch "mm"
|
||||
$testCycle.emailSummary += "$($currentTestData.testName) Execution Time: $testRunDuration minutes<br />"
|
||||
$testCycle.emailSummary += " $($currentTestData.testName) : $testResult <br />"
|
||||
$testResultRow = ""
|
||||
LogMsg "~~~~~~~~~~~~~~~TEST END : $($currentTestData.testName)~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
$dbTestName = $($currentTestData.testName)
|
||||
$dbTestResult = $testResult
|
||||
}
|
||||
if($testResult -imatch "PASS")
|
||||
{
|
||||
$testSuiteResultDetails.totalPassTc = $testSuiteResultDetails.totalPassTc +1
|
||||
$testResultRow = "<span style='color:green;font-weight:bolder'>PASS</span>"
|
||||
FinishLogTestCase $testcase
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$($currentTestData.testName)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
elseif($testResult -imatch "FAIL")
|
||||
{
|
||||
$testSuiteResultDetails.totalFailTc = $testSuiteResultDetails.totalFailTc +1
|
||||
$testResultRow = "<span style='color:red;font-weight:bolder'>FAIL</span>"
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
FinishLogTestCase $testcase "FAIL" "$($test.Name) failed." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$($currentTestData.testName)$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
elseif($testResult -imatch "ABORTED")
|
||||
{
|
||||
$testSuiteResultDetails.totalAbortedTc = $testSuiteResultDetails.totalAbortedTc +1
|
||||
$testResultRow = "<span style='background-color:yellow;font-weight:bolder'>ABORT</span>"
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
FinishLogTestCase $testcase "ERROR" "$($test.Name) is aborted." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$($currentTestData.testName)$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogErr "Test Result is empty."
|
||||
$testSuiteResultDetails.totalAbortedTc = $testSuiteResultDetails.totalAbortedTc +1
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
$testResultRow = "<span style='background-color:yellow;font-weight:bolder'>ABORT</span>"
|
||||
FinishLogTestCase $testcase "ERROR" "$($test.Name) is aborted." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$tempHtmlText$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
$testMode = "multi"
|
||||
$testResult = @()
|
||||
$LogDir = "$testDir\$($currentTestData.testName)"
|
||||
Set-Variable -Name LogDir -Value $LogDir -Scope Global
|
||||
LogMsg "~~~~~~~~~~~~~~~TEST STARTED : $($currentTestData.testName)~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
$testScriptPs1 = $currentTestData.PowershellScript
|
||||
$command = ".\Testscripts\Windows\" + $testScriptPs1
|
||||
LogMsg "$command"
|
||||
LogMsg "Starting multiple tests : $($currentTestData.testName)"
|
||||
$startTime = [Datetime]::Now.ToUniversalTime()
|
||||
$testResult = Invoke-Expression $command
|
||||
}
|
||||
catch
|
||||
{
|
||||
$testResult[0] = "ABORTED"
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
finally
|
||||
{
|
||||
$testResult = RefineTestResult2 -testResult $testResult
|
||||
try {
|
||||
$tempHtmlText = ($testResult[1]).Substring(0,((($testResult[1]).Length)-6))
|
||||
}
|
||||
catch {
|
||||
$tempHtmlText = "Unable to parse the results. Will be fixed shortly."
|
||||
}
|
||||
$executionCount += 1
|
||||
$testRunDuration = GetStopWatchElapasedTime $stopWatch "mm"
|
||||
$testRunDuration = $testRunDuration.ToString()
|
||||
$testCycle.emailSummary += "$($currentTestData.testName) Execution Time: $testRunDuration minutes<br />"
|
||||
$testCycle.emailSummary += " $($currentTestData.testName) : $($testResult[0]) <br />"
|
||||
$testCycle.emailSummary += "$($testResult[1])"
|
||||
$summary = "$($testResult[1])"
|
||||
LogMsg "~~~~~~~~~~~~~~~TEST END : $($currentTestData.testName)~~~~~~~~~~"
|
||||
}
|
||||
if($testResult[0] -imatch "PASS")
|
||||
{
|
||||
$testSuiteResultDetails.totalPassTc = $testSuiteResultDetails.totalPassTc +1
|
||||
$testResultRow = "<span style='color:green;font-weight:bolder'>PASS</span>"
|
||||
FinishLogTestCase $testcase
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$tempHtmlText</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
elseif($testResult[0] -imatch "FAIL")
|
||||
{
|
||||
$testSuiteResultDetails.totalFailTc = $testSuiteResultDetails.totalFailTc +1
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
$testResultRow = "<span style='color:red;font-weight:bolder'>FAIL</span>"
|
||||
FinishLogTestCase $testcase "FAIL" "$($test.Name) failed." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$tempHtmlText$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
elseif($testResult[0] -imatch "ABORTED")
|
||||
{
|
||||
$testSuiteResultDetails.totalAbortedTc = $testSuiteResultDetails.totalAbortedTc +1
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
$testResultRow = "<span style='background-color:yellow;font-weight:bolder'>ABORT</span>"
|
||||
FinishLogTestCase $testcase "ERROR" "$($test.Name) is aborted." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$tempHtmlText$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogErr "Test Result is empty."
|
||||
$testSuiteResultDetails.totalAbortedTc = $testSuiteResultDetails.totalAbortedTc +1
|
||||
$caseLog = Get-Content -Raw $testCaseLogFile
|
||||
$testResultRow = "<span style='background-color:yellow;font-weight:bolder'>ABORT</span>"
|
||||
FinishLogTestCase $testcase "ERROR" "$($test.Name) is aborted." $caseLog
|
||||
$testCycle.htmlSummary += "<tr><td><font size=`"3`">$executionCount</font></td><td>$tempHtmlText$(AddReproVMDetailsToHtmlReport)</td><td>$testRunDuration min</td><td>$testResultRow</td></tr>"
|
||||
}
|
||||
}
|
||||
if ($xmlSecrets)
|
||||
{
|
||||
try
|
||||
{
|
||||
$testLogFolder = "TestLogs"
|
||||
$testLogStorageAccount = $xmlSecrets.secrets.testLogsStorageAccount
|
||||
$testLogStorageAccountKey = $xmlSecrets.secrets.testLogsStorageAccountKey
|
||||
$ticks= (Get-Date).Ticks
|
||||
$uploadFileName = ".\temp\$($currentTestData.testName)-$ticks.zip"
|
||||
$out = ZipFiles -zipfilename $uploadFileName -sourcedir $LogDir
|
||||
$uploadLink = .\Extras\UploadFilesToStorageAccount.ps1 -filePaths $uploadFileName -destinationStorageAccount $testLogStorageAccount -destinationContainer "logs" -destinationFolder "$testLogFolder" -destinationStorageKey $testLogStorageAccountKey
|
||||
$utctime = (Get-Date).ToUniversalTime()
|
||||
$dbDateTimeUTC = "$($utctime.Year)-$($utctime.Month)-$($utctime.Day) $($utctime.Hour):$($utctime.Minute):$($utctime.Second)"
|
||||
$dataSource = $xmlSecrets.secrets.DatabaseServer
|
||||
$dbuser = $xmlSecrets.secrets.DatabaseUser
|
||||
$dbpassword = $xmlSecrets.secrets.DatabasePassword
|
||||
$database = $xmlSecrets.secrets.DatabaseName
|
||||
$dataTableName = "AzureTestResultsMasterTable"
|
||||
$dbTestName = $($currentTestData.testName)
|
||||
$SQLQuery = "INSERT INTO $dataTableName (DateTimeUTC,Environment,TestCycle,ExecutionID,TestName,TestResult,ARMImage,OsVHD,KernelVersion,LISVersion,GuestDistro,AzureHost,Location,OverrideVMSize,Networking,LogFile,BuildURL) VALUES "
|
||||
if ($testMode -eq "multi")
|
||||
{
|
||||
$SQLQuery += "('$dbDateTimeUTC','$dbEnvironment','$dbTestCycle','$dbExecutionID','$dbTestName','$($testResult[0])','$dbARMImage','$BaseOsVHD','$finalKernelVersion','$finalLISVersion','$GuestDistro','$HostVersion','$dbLocation','$dbOverrideVMSize','$dbNetworking','$uploadLink', '$env:BUILD_URL`consoleFull'),"
|
||||
foreach ($tempResult in $summary.Split('>'))
|
||||
{
|
||||
if ($tempResult)
|
||||
{
|
||||
$tempResult = $tempResult.Trim().Replace("<br /","").Trim()
|
||||
$subTestResult = $tempResult.Split(":")[$tempResult.Split(":").Count -1 ].Trim()
|
||||
$subTestName = $tempResult.Replace("$subTestResult","").Trim().TrimEnd(":").Trim()
|
||||
$SQLQuery += "('$dbDateTimeUTC','$dbEnvironment','$dbTestCycle','$dbExecutionID','SubTest-$subTestName','$subTestResult','$dbARMImage','$BaseOsVHD','$finalKernelVersion','$finalLISVersion','$GuestDistro','$HostVersion','$dbLocation','$dbOverrideVMSize','$dbNetworking', '$uploadLink', '$env:BUILD_URL`consoleFull'),"
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $testMode -eq "single")
|
||||
{
|
||||
$dbTestResult = $testResult
|
||||
$SQLQuery += "('$dbDateTimeUTC','$dbEnvironment','$dbTestCycle','$dbExecutionID','$dbTestName','$dbTestResult','$dbARMImage','$BaseOsVHD','$finalKernelVersion','$finalLISVersion','$GuestDistro','$HostVersion','$dbLocation','$dbOverrideVMSize','$dbNetworking', '$uploadLink', '$env:BUILD_URL`consoleFull')"
|
||||
}
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
$connectionString = "Server=$dataSource;uid=$dbuser; pwd=$dbpassword;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading test results to database : done!!"
|
||||
}
|
||||
catch
|
||||
{
|
||||
LogErr "Uploading test results to database : ERROR"
|
||||
LogMsg $SQLQuery
|
||||
}
|
||||
}
|
||||
Write-Host $testSuiteResultDetails.totalPassTc,$testSuiteResultDetails.totalFailTc,$testSuiteResultDetails.totalAbortedTc
|
||||
#Back to Test Suite Main Logging
|
||||
$global:logFile = $testSuiteLogFile
|
||||
$currentJobs = Get-Job
|
||||
foreach ( $job in $currentJobs )
|
||||
{
|
||||
$jobStatus = Get-Job -Id $job.ID
|
||||
if ( $jobStatus.State -ne "Running" )
|
||||
{
|
||||
Remove-Job -Id $job.ID -Force
|
||||
if ( $? )
|
||||
{
|
||||
LogMsg "Removed $($job.State) background job ID $($job.Id)."
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "$($job.Name) is running."
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Skipping $($currentTestData.Priority) test : $($currentTestData.testName)"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogErr "No Test Data found for $($test.Name).."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LogMsg "Checking background cleanup jobs.."
|
||||
$cleanupJobList = Get-Job | where { $_.Name -imatch "DeleteResourceGroup"}
|
||||
$isAllCleaned = $false
|
||||
while(!$isAllCleaned)
|
||||
{
|
||||
$runningJobsCount = 0
|
||||
$isAllCleaned = $true
|
||||
$cleanupJobList = Get-Job | where { $_.Name -imatch "DeleteResourceGroup"}
|
||||
foreach ( $cleanupJob in $cleanupJobList )
|
||||
{
|
||||
|
||||
$jobStatus = Get-Job -Id $cleanupJob.ID
|
||||
if ( $jobStatus.State -ne "Running" )
|
||||
{
|
||||
|
||||
$tempRG = $($cleanupJob.Name).Replace("DeleteResourceGroup-","")
|
||||
LogMsg "$tempRG : Delete : $($jobStatus.State)"
|
||||
Remove-Job -Id $cleanupJob.ID -Force
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "$($cleanupJob.Name) is running."
|
||||
$isAllCleaned = $false
|
||||
$runningJobsCount += 1
|
||||
}
|
||||
}
|
||||
if ($runningJobsCount -gt 0)
|
||||
{
|
||||
Write-Host "$runningJobsCount background cleanup jobs still running. Waiting 30 seconds..."
|
||||
sleep -Seconds 30
|
||||
}
|
||||
}
|
||||
Write-Host "All background cleanup jobs finished."
|
||||
$azureContextFiles = Get-Item "$env:TEMP\*.azurecontext"
|
||||
$out = $azureContextFiles | Remove-Item -Force | Out-Null
|
||||
LogMsg "Removed $($azureContextFiles.Count) context files."
|
||||
LogMsg "Cycle Finished.. $($CycleName.ToUpper())"
|
||||
$EndTime = [Datetime]::Now.ToUniversalTime()
|
||||
|
||||
FinishLogTestSuite($testsuite)
|
||||
FinishLogReport
|
||||
|
||||
$testSuiteResultDetails
|
||||
}
|
||||
|
||||
RunTestsOnCycle -cycleName $cycleName -xmlConfig $xmlConfig -Distro $Distro -testIterations $testIterations
|
|
@ -0,0 +1,218 @@
|
|||
|
||||
Param(
|
||||
$DestinationPath = ".\"
|
||||
)
|
||||
|
||||
Get-ChildItem .\TestLibs\*.psm1 | ForEach-Object { Import-Module $_.FullName -Force}
|
||||
ValiateXMLs -ParentFolder ".\"
|
||||
|
||||
$xmlData = @()
|
||||
foreach ( $file in (Get-ChildItem -Path .\XML\TestCases\*.xml ))
|
||||
{
|
||||
$xmlData += ([xml](Get-Content -Path $file.FullName)).TestCases
|
||||
}
|
||||
$TestToRegionMapping = ([xml](Get-Content .\XML\TestToRegionMapping.xml))
|
||||
#Get Unique Platforms
|
||||
$Platforms = $xmlData.test.Platform.Split(',') | Sort-Object | Get-Unique
|
||||
Write-Host $Platforms
|
||||
$Categories = $xmlData.test.Category | Sort-Object | Get-Unique
|
||||
Write-Host $Categories
|
||||
$Areas =$xmlData.test.Area | Sort-Object | Get-Unique
|
||||
Write-Host $Areas
|
||||
$Tags =$xmlData.test.Tags.Split(",") | Sort-Object | Get-Unique
|
||||
Write-Host $Tags
|
||||
$TestNames = $xmlData.testName | Sort-Object | Get-Unique
|
||||
Write-Host $TestNames
|
||||
|
||||
|
||||
$JenkinsMenuFile = "platform`tcategory`tarea`tregion`n"
|
||||
#Generate Jenkins File
|
||||
foreach ( $platform in $Platforms )
|
||||
{
|
||||
$Categories = ($xmlData.test | Where-Object { $_.Platform.Contains($platform) }).Category
|
||||
foreach ( $category in $Categories)
|
||||
{
|
||||
$Regions =$TestToRegionMapping.enabledRegions.global.Split(",")
|
||||
$Areas = ($xmlData.test | Where-Object { $_.Platform.Contains($platform) } | Where-Object { $_.Category -eq "$category" }).Area
|
||||
if ( $TestToRegionMapping.enabledRegions.Category.$category )
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Category.$category).Split(",")
|
||||
}
|
||||
foreach ($area in $Areas)
|
||||
{
|
||||
if ( [string]::IsNullOrEmpty($TestToRegionMapping.enabledRegions.Category.$category))
|
||||
{
|
||||
if ($TestToRegionMapping.enabledRegions.Area.$area)
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Area.$area).Split(",")
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Category.$category).Split(",")
|
||||
if ( $TestToRegionMapping.enabledRegions.Area.$area )
|
||||
{
|
||||
$tempRegions = @()
|
||||
$AreaRegions = ($TestToRegionMapping.enabledRegions.Area.$area).Split(",")
|
||||
foreach ( $arearegion in $AreaRegions )
|
||||
{
|
||||
Write-Host "foreach ( $arearegion in $AreaRegions )"
|
||||
if ( $Regions.Contains($arearegion))
|
||||
{
|
||||
Write-Host "if ( $Regions.Contains($arearegion))"
|
||||
$tempRegions += $arearegion
|
||||
}
|
||||
}
|
||||
if ( $tempRegions.Count -ge 1)
|
||||
{
|
||||
$Regions = $tempRegions
|
||||
}
|
||||
else
|
||||
{
|
||||
$Regions = "no_region_available"
|
||||
}
|
||||
}
|
||||
}
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
$JenkinsMenuFile += "$platform`t$category`t$area`t$platform>>$category>>$area>>$region`n"
|
||||
}
|
||||
}
|
||||
if ( $(($Areas | Get-Unique).Count) -gt 1)
|
||||
{
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
$JenkinsMenuFile += "$platform`t$category`tAll`t$platform>>$category>>All>>$region`n"
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( $(($Categories | Get-Unique).Count) -gt 1)
|
||||
{
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
$JenkinsMenuFile += "$platform`tAll`tAll`t$platform>>All>>All>>$region`n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Set-Content -Value $JenkinsMenuFile -Path "$DestinationPath\JenkinsMenuFile.txt" -Force
|
||||
(Get-Content "$DestinationPath\JenkinsMenuFile.txt") | Where-Object {$_.trim() -ne "" } | set-content "$DestinationPath\JenkinsMenuFile.txt"
|
||||
|
||||
|
||||
$tagsFile = "platform`ttag`tregion`n"
|
||||
foreach ( $platform in $Platforms )
|
||||
{
|
||||
foreach ( $tag in $Tags)
|
||||
{
|
||||
$Regions =$TestToRegionMapping.enabledRegions.global.Split(",")
|
||||
if ( $tag )
|
||||
{
|
||||
if ( $TestToRegionMapping.enabledRegions.Tag.$tag )
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Tag.$tag).Split(",")
|
||||
}
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
$tagsFile += "$platform`t$tag`t$platform>>$tag>>$region`n"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Set-Content -Value $tagsFile -Path "$DestinationPath\JenkinsMenuFile4.txt" -Force
|
||||
(Get-Content "$DestinationPath\JenkinsMenuFile4.txt") | Where-Object {$_.trim() -ne "" } | set-content "$DestinationPath\JenkinsMenuFile4.txt"
|
||||
|
||||
|
||||
$testnameFile = "platform`ttestname`tregion`n"
|
||||
foreach ( $platform in $Platforms )
|
||||
{
|
||||
foreach ( $testname in $TestNames)
|
||||
{
|
||||
$Regions =$TestToRegionMapping.enabledRegions.global.Split(",")
|
||||
if ( $TestToRegionMapping.enabledRegions.TestName.$testname )
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.TestName.$testname).Split(",")
|
||||
}
|
||||
if ( $testname )
|
||||
{
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
$testnameFile += "$platform`t$testname`t$region`n"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Set-Content -Value $testnameFile -Path "$DestinationPath\JenkinsMenuFile3.txt" -Force
|
||||
(Get-Content "$DestinationPath\JenkinsMenuFile3.txt") | Where-Object {$_.trim() -ne "" } | set-content "$DestinationPath\JenkinsMenuFile3.txt"
|
||||
|
||||
|
||||
|
||||
$JenkinsMenuFile2 = "platform`tcategory`tarea`ttestname`tregion`n"
|
||||
#Generate Jenkins File
|
||||
foreach ( $platform in $Platforms )
|
||||
{
|
||||
$Categories = ($xmlData.test | Where-Object { $_.Platform.Contains($platform) }).Category
|
||||
foreach ( $category in $Categories)
|
||||
{
|
||||
$Regions =$TestToRegionMapping.enabledRegions.global.Split(",")
|
||||
$Areas = ($xmlData.test | Where-Object { $_.Platform.Contains($platform) } | Where-Object { $_.Category -eq "$category" }).Area
|
||||
if ( $TestToRegionMapping.enabledRegions.Category.$category )
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Category.$category).Split(",")
|
||||
}
|
||||
foreach ($area in $Areas)
|
||||
{
|
||||
if ( [string]::IsNullOrEmpty($TestToRegionMapping.enabledRegions.Category.$category))
|
||||
{
|
||||
if ($TestToRegionMapping.enabledRegions.Area.$area)
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Area.$area).Split(",")
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.Category.$category).Split(",")
|
||||
if ( $TestToRegionMapping.enabledRegions.Area.$area )
|
||||
{
|
||||
$tempRegions = @()
|
||||
$AreaRegions = ($TestToRegionMapping.enabledRegions.Area.$area).Split(",")
|
||||
foreach ( $arearegion in $AreaRegions )
|
||||
{
|
||||
if ( $Regions.Contains($arearegion))
|
||||
{
|
||||
$tempRegions += $arearegion
|
||||
}
|
||||
}
|
||||
if ( $tempRegions.Count -ge 1)
|
||||
{
|
||||
$Regions = $tempRegions
|
||||
}
|
||||
else
|
||||
{
|
||||
$Regions = "no_region_available"
|
||||
}
|
||||
}
|
||||
}
|
||||
$TestNames = ($xmlData.test | Where-Object { $_.Platform.Contains($platform) } | Where-Object { $_.Category -eq "$category" } | Where-Object { $_.Area -eq "$area" } ).TestName
|
||||
foreach ( $testname in $TestNames )
|
||||
{
|
||||
$Regions =$TestToRegionMapping.enabledRegions.global.Split(",")
|
||||
if ( $TestToRegionMapping.enabledRegions.TestName.$testname )
|
||||
{
|
||||
$Regions = ($TestToRegionMapping.enabledRegions.TestName.$testname).Split(",")
|
||||
}
|
||||
foreach ( $region in $Regions)
|
||||
{
|
||||
#Write-Host "$platform`t$category`t$area`t$testname`t$region"
|
||||
$JenkinsMenuFile2 += "$platform`t$category`t$area`t$testname`t$platform>>$category>>$area>>$testname>>$region`n"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Write-Host "Setting Content"
|
||||
Set-Content -Value $JenkinsMenuFile2 -Path "$DestinationPath\JenkinsMenuFile2.txt" -Force
|
||||
Write-Host "Replacing whitespaces"
|
||||
(Get-Content "$DestinationPath\JenkinsMenuFile2.txt") | Where-Object {$_.trim() -ne "" } | set-content "$DestinationPath\JenkinsMenuFile2.txt"
|
||||
Write-Host "Completed."
|
||||
|
||||
exit 0
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,445 @@
|
|||
function GetTestSummary($testCycle, [DateTime] $StartTime, [string] $xmlFilename, [string] $distro, $testSuiteResultDetails)
|
||||
{
|
||||
<#
|
||||
.Synopsis
|
||||
Append the summary text from each VM into a single string.
|
||||
|
||||
.Description
|
||||
Append the summary text from each VM one long string. The
|
||||
string includes line breaks so it can be display on a
|
||||
console or included in an e-mail message.
|
||||
|
||||
.Parameter xmlConfig
|
||||
The parsed xml from the $xmlFilename file.
|
||||
Type : [System.Xml]
|
||||
|
||||
.Parameter startTime
|
||||
The date/time the ICA test run was started
|
||||
Type : [DateTime]
|
||||
|
||||
.Parameter xmlFilename
|
||||
The name of the xml file for the current test run.
|
||||
Type : [String]
|
||||
|
||||
.ReturnValue
|
||||
A string containing all the summary message from all
|
||||
VMs in the current test run.
|
||||
|
||||
.Example
|
||||
GetTestSummary $testCycle $myStartTime $myXmlTestFile
|
||||
|
||||
#>
|
||||
|
||||
$endTime = [Datetime]::Now.ToUniversalTime()
|
||||
$testSuiteRunDuration= $endTime - $StartTime
|
||||
$testSuiteRunDuration=$testSuiteRunDuration.Days.ToString() + ":" + $testSuiteRunDuration.hours.ToString() + ":" + $testSuiteRunDuration.minutes.ToString()
|
||||
$str = "<br />Test Results Summary<br />"
|
||||
$str += "ICA test run on " + $startTime
|
||||
if ( $BaseOsImage )
|
||||
{
|
||||
$str += "<br />Image under test " + $BaseOsImage
|
||||
}
|
||||
if ( $BaseOSVHD )
|
||||
{
|
||||
$str += "<br />VHD under test " + $BaseOSVHD
|
||||
}
|
||||
if ( $ARMImage )
|
||||
{
|
||||
$str += "<br />ARM Image under test " + "$($ARMImage.Publisher) : $($ARMImage.Offer) : $($ARMImage.Sku) : $($ARMImage.Version)"
|
||||
}
|
||||
$str += "<br />Total Executed TestCases " + $testSuiteResultDetails.totalTc + " (" + $testSuiteResultDetails.totalPassTc + " Pass" + ", " + $testSuiteResultDetails.totalFailTc + " Fail" + ", " + $testSuiteResultDetails.totalAbortedTc + " Abort)"
|
||||
$str += "<br />Total Execution Time(dd:hh:mm) " + $testSuiteRunDuration.ToString()
|
||||
$str += "<br />XML file: $xmlFilename<br /><br />"
|
||||
|
||||
# Add information about the host running ICA to the e-mail summary
|
||||
$str += "<pre>"
|
||||
$str += $testCycle.emailSummary + "<br />"
|
||||
$hostName = hostname
|
||||
$str += "<br />Logs can be found at \\${hostname}\TestResults\" + $xmlFilename + "-" + $StartTime.ToString("yyyyMMdd-HHmmss") + "<br /><br />"
|
||||
$str += "</pre>"
|
||||
$plainTextSummary = $str
|
||||
$strHtml = "<style type='text/css'>" +
|
||||
".TFtable{width:1024px; border-collapse:collapse; }" +
|
||||
".TFtable td{ padding:7px; border:#4e95f4 1px solid;}" +
|
||||
".TFtable tr{ background: #b8d1f3;}" +
|
||||
".TFtable tr:nth-child(odd){ background: #dbe1e9;}" +
|
||||
".TFtable tr:nth-child(even){background: #ffffff;}</style>" +
|
||||
"<Html><head><title>Test Results Summary</title></head>" +
|
||||
"<body style = 'font-family:sans-serif;font-size:13px;color:#000000;margin:0px;padding:30px'>" +
|
||||
"<br/><h1 style='background-color:lightblue;width:1024'>Test Results Summary</h1>"
|
||||
$strHtml += "<h2 style='background-color:lightblue;width:1024'>ICA test run on - " + $startTime + "</h2><span style='font-size: medium'>"
|
||||
if ( $BaseOsImage )
|
||||
{
|
||||
$strHtml += '<p>Image under test - <span style="font-family:courier new,courier,monospace;">' + "$BaseOsImage</span></p>"
|
||||
}
|
||||
if ( $BaseOSVHD )
|
||||
{
|
||||
$strHtml += '<p>VHD under test - <span style="font-family:courier new,courier,monospace;">' + "$BaseOsVHD</span></p>"
|
||||
}
|
||||
if ( $ARMImage )
|
||||
{
|
||||
$strHtml += '<p>ARM Image under test - <span style="font-family:courier new,courier,monospace;">' + "$($ARMImage.Publisher) : $($ARMImage.Offer) : $($ARMImage.Sku) : $($ARMImage.Version)</span></p>"
|
||||
}
|
||||
|
||||
$strHtml += '<p>Total Executed TestCases - <strong><span style="font-size:16px;">' + "$($testSuiteResultDetails.totalTc)" + '</span></strong><br />' + '[ <span style="font-size:16px;"><span style="color:#008000;"><strong>' + $testSuiteResultDetails.totalPassTc + ' </strong></span></span> - PASS, <span style="font-size:16px;"><span style="color:#ff0000;"><strong>' + "$($testSuiteResultDetails.totalFailTc)" + '</strong></span></span>- FAIL, <span style="font-size:16px;"><span style="color:#ff0000;"><strong><span style="background-color:#ffff00;">' + "$($testSuiteResultDetails.totalAbortedTc)" +'</span></strong></span></span> - ABORTED ]</p>'
|
||||
$strHtml += "<br /><br/>Total Execution Time(dd:hh:mm) " + $testSuiteRunDuration.ToString()
|
||||
$strHtml += "<br /><br/>XML file: $xmlFilename<br /><br /></span>"
|
||||
|
||||
# Add information about the host running ICA to the e-mail summary
|
||||
$strHtml += "<table border='0' class='TFtable'>"
|
||||
$strHtml += $testCycle.htmlSummary
|
||||
$strHtml += "</table>"
|
||||
|
||||
$strHtml += "</body></Html>"
|
||||
|
||||
if (-not (Test-Path(".\temp\CI"))) {
|
||||
mkdir ".\temp\CI" | Out-Null
|
||||
}
|
||||
|
||||
Set-Content ".\temp\CI\index.html" $strHtml
|
||||
return $plainTextSummary, $strHtml
|
||||
}
|
||||
|
||||
function SendEmail([XML] $xmlConfig, $body)
|
||||
{
|
||||
<#
|
||||
.Synopsis
|
||||
Send an e-mail message with test summary information.
|
||||
|
||||
.Description
|
||||
Collect the test summary information from each testcycle. Send an
|
||||
eMail message with this summary information to emailList defined
|
||||
in the xml config file.
|
||||
|
||||
.Parameter xmlConfig
|
||||
The parsed XML from the test xml file
|
||||
Type : [System.Xml]
|
||||
|
||||
.ReturnValue
|
||||
none
|
||||
|
||||
.Example
|
||||
SendEmail $myConfig
|
||||
#>
|
||||
|
||||
$to = $xmlConfig.config.global.emailList.split(",")
|
||||
$from = $xmlConfig.config.global.emailSender
|
||||
$subject = $xmlConfig.config.global.emailSubject + " " + $testStartTime
|
||||
$smtpServer = $xmlConfig.config.global.smtpServer
|
||||
$fname = [System.IO.Path]::GetFilenameWithoutExtension($xmlConfigFile)
|
||||
# Highlight the failed tests
|
||||
$body = $body.Replace("Aborted", '<em style="background:Yellow; color:Red">Aborted</em>')
|
||||
$body = $body.Replace("FAIL", '<em style="background:Yellow; color:Red">Failed</em>')
|
||||
|
||||
Send-mailMessage -to $to -from $from -subject $subject -body $body -smtpserver $smtpServer -BodyAsHtml
|
||||
}
|
||||
|
||||
function Usage()
|
||||
{
|
||||
write-host
|
||||
write-host " Start automation: AzureAutomationManager.ps1 -xmlConfigFile <xmlConfigFile> -runTests -email -Distro <DistroName> -cycleName <TestCycle>"
|
||||
write-host
|
||||
write-host " xmlConfigFile : Specifies the configuration for the test environment."
|
||||
write-host " DistroName : Run tests on the distribution OS image defined in Azure->Deployment->Data->Distro"
|
||||
write-host " -help : Displays this help message."
|
||||
write-host
|
||||
}
|
||||
Function GetCurrentCycleData($xmlConfig, $cycleName)
|
||||
{
|
||||
foreach ($Cycle in $xmlConfig.config.testCycles.Cycle )
|
||||
{
|
||||
if($cycle.cycleName -eq $cycleName)
|
||||
{
|
||||
return $cycle
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
Function ThrowException($Exception)
|
||||
{
|
||||
$line = $Exception.InvocationInfo.ScriptLineNumber
|
||||
$script_name = ($Exception.InvocationInfo.ScriptName).Replace($PWD,".")
|
||||
$ErrorMessage = $Exception.Exception.Message
|
||||
Write-Host "EXCEPTION : $ErrorMessage"
|
||||
Write-Host "SOURCE : Line $line in script $script_name."
|
||||
Throw "Calling function - $($MyInvocation.MyCommand)"
|
||||
}
|
||||
|
||||
<#
|
||||
JUnit XML Report Schema:
|
||||
http://windyroad.com.au/dl/Open%20Source/JUnit.xsd
|
||||
Example:
|
||||
Import-Module .\UtilLibs.psm1 -Force
|
||||
|
||||
StartLogReport("$pwd/report.xml")
|
||||
|
||||
$testsuite = StartLogTestSuite "CloudTesting"
|
||||
|
||||
$testcase = StartLogTestCase $testsuite "BVT" "CloudTesting.BVT"
|
||||
FinishLogTestCase $testcase
|
||||
|
||||
$testcase = StartLogTestCase $testsuite "NETWORK" "CloudTesting.NETWORK"
|
||||
FinishLogTestCase $testcase "FAIL" "NETWORK fail" "Stack trace: XXX"
|
||||
|
||||
$testcase = StartLogTestCase $testsuite "VNET" "CloudTesting.VNET"
|
||||
FinishLogTestCase $testcase "ERROR" "VNET error" "Stack trace: XXX"
|
||||
|
||||
FinishLogTestSuite($testsuite)
|
||||
|
||||
$testsuite = StartLogTestSuite "FCTesting"
|
||||
|
||||
$testcase = StartLogTestCase $testsuite "BVT" "FCTesting.BVT"
|
||||
FinishLogTestCase $testcase
|
||||
|
||||
$testcase = StartLogTestCase $testsuite "NEGATIVE" "FCTesting.NEGATIVE"
|
||||
FinishLogTestCase $testcase "FAIL" "NEGATIVE fail" "Stack trace: XXX"
|
||||
|
||||
FinishLogTestSuite($testsuite)
|
||||
|
||||
FinishLogReport
|
||||
|
||||
report.xml:
|
||||
<testsuites>
|
||||
<testsuite name="CloudTesting" timestamp="2014-07-11T06:37:24" tests="3" failures="1" errors="1" time="0.04">
|
||||
<testcase name="BVT" classname="CloudTesting.BVT" time="0" />
|
||||
<testcase name="NETWORK" classname="CloudTesting.NETWORK" time="0">
|
||||
<failure message="NETWORK fail">Stack trace: XXX</failure>
|
||||
</testcase>
|
||||
<testcase name="VNET" classname="CloudTesting.VNET" time="0">
|
||||
<error message="VNET error">Stack trace: XXX</error>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
<testsuite name="FCTesting" timestamp="2014-07-11T06:37:24" tests="2" failures="1" errors="0" time="0.03">
|
||||
<testcase name="BVT" classname="FCTesting.BVT" time="0" />
|
||||
<testcase name="NEGATIVE" classname="FCTesting.NEGATIVE" time="0">
|
||||
<failure message="NEGATIVE fail">Stack trace: XXX</failure>
|
||||
</testcase>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
#>
|
||||
|
||||
[xml]$junitReport = $null
|
||||
[object]$reportRootNode = $null
|
||||
[string]$junitReportPath = ""
|
||||
[bool]$isGenerateJunitReport=$False
|
||||
|
||||
Function StartLogReport([string]$reportPath)
|
||||
{
|
||||
if(!$junitReport)
|
||||
{
|
||||
$global:junitReport = new-object System.Xml.XmlDocument
|
||||
$newElement = $global:junitReport.CreateElement("testsuites")
|
||||
$global:reportRootNode = $global:junitReport.AppendChild($newElement)
|
||||
|
||||
$global:junitReportPath = $reportPath
|
||||
|
||||
$global:isGenerateJunitReport = $True
|
||||
}
|
||||
else
|
||||
{
|
||||
throw "CI report has been created."
|
||||
}
|
||||
|
||||
return $junitReport
|
||||
}
|
||||
|
||||
Function FinishLogReport([bool]$isFinal=$True)
|
||||
{
|
||||
if(!$global:isGenerateJunitReport)
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
$global:junitReport.Save($global:junitReportPath)
|
||||
if($isFinal)
|
||||
{
|
||||
$global:junitReport = $null
|
||||
$global:reportRootNode = $null
|
||||
$global:junitReportPath = ""
|
||||
$global:isGenerateJunitReport=$False
|
||||
}
|
||||
}
|
||||
|
||||
Function StartLogTestSuite([string]$testsuiteName)
|
||||
{
|
||||
if(!$global:isGenerateJunitReport)
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
$newElement = $global:junitReport.CreateElement("testsuite")
|
||||
$newElement.SetAttribute("name", $testsuiteName)
|
||||
$newElement.SetAttribute("timestamp", [Datetime]::Now.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss"))
|
||||
$newElement.SetAttribute("tests", 0)
|
||||
$newElement.SetAttribute("failures", 0)
|
||||
$newElement.SetAttribute("errors", 0)
|
||||
$newElement.SetAttribute("time", 0)
|
||||
$testsuiteNode = $global:reportRootNode.AppendChild($newElement)
|
||||
|
||||
$timer = CIStartTimer
|
||||
$testsuite = New-Object -TypeName PSObject
|
||||
Add-Member -InputObject $testsuite -MemberType NoteProperty -Name testsuiteNode -Value $testsuiteNode -Force
|
||||
Add-Member -InputObject $testsuite -MemberType NoteProperty -Name timer -Value $timer -Force
|
||||
|
||||
return $testsuite
|
||||
}
|
||||
|
||||
Function FinishLogTestSuite([object]$testsuite)
|
||||
{
|
||||
if(!$global:isGenerateJunitReport)
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
$testsuite.testsuiteNode.Attributes["time"].Value = CIStopTimer $testsuite.timer
|
||||
FinishLogReport $False
|
||||
}
|
||||
|
||||
Function StartLogTestCase([object]$testsuite, [string]$caseName, [string]$className)
|
||||
{
|
||||
if(!$global:isGenerateJunitReport)
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
$newElement = $global:junitReport.CreateElement("testcase")
|
||||
$newElement.SetAttribute("name", $caseName)
|
||||
$newElement.SetAttribute("classname", $classname)
|
||||
$newElement.SetAttribute("time", 0)
|
||||
|
||||
$testcaseNode = $testsuite.testsuiteNode.AppendChild($newElement)
|
||||
|
||||
$timer = CIStartTimer
|
||||
$testcase = New-Object -TypeName PSObject
|
||||
Add-Member -InputObject $testcase -MemberType NoteProperty -Name testsuite -Value $testsuite -Force
|
||||
Add-Member -InputObject $testcase -MemberType NoteProperty -Name testcaseNode -Value $testcaseNode -Force
|
||||
Add-Member -InputObject $testcase -MemberType NoteProperty -Name timer -Value $timer -Force
|
||||
return $testcase
|
||||
}
|
||||
|
||||
Function FinishLogTestCase([object]$testcase, [string]$result="PASS", [string]$message="", [string]$detail="")
|
||||
{
|
||||
if(!$global:isGenerateJunitReport)
|
||||
{
|
||||
return
|
||||
}
|
||||
|
||||
$testcase.testcaseNode.Attributes["time"].Value = CIStopTimer $testcase.timer
|
||||
|
||||
[int]$testcase.testsuite.testsuiteNode.Attributes["tests"].Value += 1
|
||||
if ($result -eq "FAIL")
|
||||
{
|
||||
$newChildElement = $global:junitReport.CreateElement("failure")
|
||||
$newChildElement.InnerText = $detail
|
||||
$newChildElement.SetAttribute("message", $message)
|
||||
$testcase.testcaseNode.AppendChild($newChildElement)
|
||||
|
||||
[int]$testcase.testsuite.testsuiteNode.Attributes["failures"].Value += 1
|
||||
}
|
||||
|
||||
if ($result -eq "ERROR")
|
||||
{
|
||||
$newChildElement = $global:junitReport.CreateElement("error")
|
||||
$newChildElement.InnerText = $detail
|
||||
$newChildElement.SetAttribute("message", $message)
|
||||
$testcase.testcaseNode.AppendChild($newChildElement)
|
||||
|
||||
[int]$testcase.testsuite.testsuiteNode.Attributes["errors"].Value += 1
|
||||
}
|
||||
FinishLogReport $False
|
||||
}
|
||||
|
||||
Function CIStartTimer()
|
||||
{
|
||||
$timer = [system.diagnostics.stopwatch]::startNew()
|
||||
return $timer
|
||||
}
|
||||
|
||||
Function CIStopTimer([System.Diagnostics.Stopwatch]$timer)
|
||||
{
|
||||
$timer.Stop()
|
||||
return [System.Math]::Round($timer.Elapsed.TotalSeconds, 2)
|
||||
|
||||
}
|
||||
|
||||
Function AddReproVMDetailsToHtmlReport()
|
||||
{
|
||||
$reproVMHtmlText += "<br><font size=`"2`"><em>Repro VMs: </em></font>"
|
||||
if ( $UserAzureResourceManager )
|
||||
{
|
||||
foreach ( $vm in $allVMData )
|
||||
{
|
||||
$reproVMHtmlText += "<br><font size=`"2`">ResourceGroup : $($vm.ResourceGroup), IP : $($vm.PublicIP), SSH : $($vm.SSHPort)</font>"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach ( $vm in $allVMData )
|
||||
{
|
||||
$reproVMHtmlText += "<br><font size=`"2`">ServiceName : $($vm.ServiceName), IP : $($vm.PublicIP), SSH : $($vm.SSHPort)</font>"
|
||||
}
|
||||
}
|
||||
return $reproVMHtmlText
|
||||
}
|
||||
|
||||
Function GetCurrentCycleData($xmlConfig, $cycleName)
|
||||
{
|
||||
foreach ($Cycle in $xmlConfig.config.testCycles.Cycle )
|
||||
{
|
||||
if($cycle.cycleName -eq $cycleName)
|
||||
{
|
||||
return $cycle
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Function GetCurrentTestData($xmlConfig, $testName)
|
||||
{
|
||||
foreach ($test in $xmlConfig.config.testsDefinition.test)
|
||||
{
|
||||
if ($test.testName -eq $testName)
|
||||
{
|
||||
LogMsg "Loading the test data for $($test.testName)"
|
||||
Set-Variable -Name CurrentTestData -Value $test -Scope Global -Force
|
||||
return $test
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Function RefineTestResult2 ($testResult)
|
||||
{
|
||||
$i=0
|
||||
$tempResult = @()
|
||||
foreach ($cmp in $testResult)
|
||||
{
|
||||
if(($cmp -eq "PASS") -or ($cmp -eq "FAIL") -or ($cmp -eq "ABORTED"))
|
||||
{
|
||||
$tempResult += $testResult[$i]
|
||||
$tempResult += $testResult[$i+1]
|
||||
$testResult = $tempResult
|
||||
break
|
||||
}
|
||||
$i++;
|
||||
}
|
||||
return $testResult
|
||||
}
|
||||
|
||||
Function RefineTestResult1 ($tempResult)
|
||||
{
|
||||
foreach ($new in $tempResult)
|
||||
{
|
||||
$lastObject = $new
|
||||
}
|
||||
$tempResultSplitted = $lastObject.Split(" ")
|
||||
if($tempResultSplitted.Length > 1 )
|
||||
{
|
||||
Write-Host "Test Result = $lastObject" -ForegroundColor Gray
|
||||
}
|
||||
$lastWord = ($tempResultSplitted.Length - 1)
|
||||
|
||||
return $tempResultSplitted[$lastWord]
|
||||
}
|
|
@ -0,0 +1,411 @@
|
|||
Param(
|
||||
|
||||
#Do not use. Reserved for Jenkins use.
|
||||
$BuildNumber=$env:BUILD_NUMBER,
|
||||
|
||||
#Required
|
||||
[string] $TestLocation="westeurope",
|
||||
[string] $RGIdentifier = "TEST",
|
||||
[string] $TestPlatform = "Azure",
|
||||
[string] $ARMImageName = "Canonical UbuntuServer 16.04-LTS latest",
|
||||
|
||||
#Optinal
|
||||
[string] $OsVHD, #... Required if -ARMImageName is not provided.
|
||||
[string] $TestCategory = "",
|
||||
[string] $TestArea,
|
||||
[string] $TestTag = "",
|
||||
[string] $TestNames="VERIFY-DEPLOYMENT-PROVISION",
|
||||
[switch] $Verbose,
|
||||
|
||||
|
||||
#Swithces
|
||||
[switch] $keepReproInact
|
||||
)
|
||||
|
||||
#Import the Functinos from Library Files.
|
||||
Get-ChildItem .\Libraries -Recurse | Where-Object { $_.FullName.EndsWith(".psm1") } | ForEach-Object { Import-Module $_.FullName -Force -Global }
|
||||
LogVerbose "Set-Variable -Name WorkingDirectory -Value (Get-Location).Path -Scope Global"
|
||||
|
||||
try
|
||||
{
|
||||
#region Validate Parameters
|
||||
LogVerbose "Set-Variable -Name WorkingDirectory -Value (Get-Location).Path -Scope Global"
|
||||
$ParameterErrors = @()
|
||||
if ( !$TestPlatform )
|
||||
{
|
||||
$ParameterErrors += "-TestPlatform <Azure/AzureStack> is required."
|
||||
}
|
||||
if ( !$ARMImageName -and !$OsVHD )
|
||||
{
|
||||
$ParameterErrors += "-ARMImageName <'Publisher Offer Sku Version'>/ -OsVHD <'VHD_Name.vhd'> is required"
|
||||
}
|
||||
if ( !$TestLocation)
|
||||
{
|
||||
$ParameterErrors += "-TestLocation <Location> is required"
|
||||
}
|
||||
if ( !$RGIdentifier )
|
||||
{
|
||||
$ParameterErrors += "-RGIdentifier <PersonalIdentifier> is required. This string will added to Resources created by Automation."
|
||||
}
|
||||
if ( $ParameterErrors.Count -gt 0)
|
||||
{
|
||||
$ParameterErrors | ForEach-Object { LogError $_ }
|
||||
Throw "Paremeters are not valid."
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Input parameters are valid"
|
||||
}
|
||||
#endregion
|
||||
|
||||
if ($TestPlatform -eq "Azure")
|
||||
{
|
||||
#TBD Verify if the current PS session is authenticated.
|
||||
#As of now, it expects that PS session is authenticated.
|
||||
#We'll change this behaviour in upcoming commits.
|
||||
}
|
||||
|
||||
#region Static Global Variables
|
||||
Set-Variable -Name WorkingDirectory -Value (Get-Location).Path -Scope Global
|
||||
LogVerbose "Set-Variable -Name WorkingDirectory -Value (Get-Location).Path -Scope Global"
|
||||
Set-Variable -Name shortRandomNumber -Value $(Get-Random -Maximum 99999 -Minimum 11111) -Scope Global
|
||||
LogVerbose "Set-Variable -Name shortRandomNumber -Value $(Get-Random -Maximum 99999 -Minimum 11111) -Scope Global"
|
||||
Set-Variable -Name shortRandomWord -Value $(-join ((65..90) | Get-Random -Count 4 | ForEach-Object {[char]$_})) -Scope Global
|
||||
LogVerbose "Set-Variable -Name shortRandomWord -Value $(-join ((65..90) | Get-Random -Count 4 | ForEach-Object {[char]$_})) -Scope Global"
|
||||
#endregion
|
||||
|
||||
#region Runtime Global Variables
|
||||
if ( $Verbose )
|
||||
{
|
||||
$VerboseCommand = "-Verbose"
|
||||
Set-Variable -Name VerboseCommand -Value "-Verbose" -Scope Global
|
||||
}
|
||||
else
|
||||
{
|
||||
Set-Variable -Name VerboseCommand -Value "" -Scope Global
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region Local Variables
|
||||
$TestXMLs = Get-ChildItem -Path "$WorkingDirectory\XML\TestCases\*.xml"
|
||||
$SetupTypeXMLs = Get-ChildItem -Path "$WorkingDirectory\XML\VMConfigurations\*.xml"
|
||||
$allTests = @()
|
||||
$ARMImage = $ARMImageName.Split(" ")
|
||||
$xmlFile = "$WorkingDirectory\TestConfiguration.xml"
|
||||
if ( $TestCategory -eq "All")
|
||||
{
|
||||
$TestCategory = ""
|
||||
}
|
||||
if ( $TestArea -eq "All")
|
||||
{
|
||||
$TestArea = ""
|
||||
}
|
||||
if ( $TestNames -eq "All")
|
||||
{
|
||||
$TestNames = ""
|
||||
}
|
||||
if ( $TestTag -eq "All")
|
||||
{
|
||||
$TestTag = $null
|
||||
}
|
||||
#endregion
|
||||
|
||||
#Validate all XML files in working directory.
|
||||
ValiateXMLs -ParentFolder $WorkingDirectory
|
||||
|
||||
#region Collect Tests Data
|
||||
if ( $TestPlatform -and !$TestCategory -and !$TestArea -and !$TestNames -and !$TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ($TestPlatform -eq $test.Platform )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $TestPlatform -and $TestCategory -and (!$TestArea -or $TestArea -eq "default") -and !$TestNames -and !$TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ( ($TestPlatform -eq $test.Platform ) -and $($TestCategory -eq $test.Category) )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $TestPlatform -and $TestCategory -and ($TestArea -and $TestArea -ne "default") -and !$TestNames -and !$TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ( ($TestPlatform -eq $test.Platform ) -and $($TestCategory -eq $test.Category) -and $($TestArea -eq $test.Area) )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $TestPlatform -and $TestCategory -and $TestNames -and !$TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ( ($TestPlatform -eq $test.Platform ) -and $($TestCategory -eq $test.Category) -and $($TestArea -eq $test.Area) -and ($TestNames.Split(",").Contains($test.TestName) ) )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $TestPlatform -and !$TestCategory -and !$TestArea -and $TestNames -and !$TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ( ($TestPlatform -eq $test.Platform ) -and ($TestNames.Split(",").Contains($test.TestName) ) )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif ( $TestPlatform -and !$TestCategory -and !$TestArea -and !$TestNames -and $TestTag)
|
||||
{
|
||||
foreach ( $file in $TestXMLs.FullName)
|
||||
{
|
||||
|
||||
$currentTests = ([xml]( Get-Content -Path $file)).TestCases
|
||||
if ( $TestPlatform )
|
||||
{
|
||||
foreach ( $test in $currentTests.test )
|
||||
{
|
||||
if ( ($TestPlatform -eq $test.Platform ) -and ( $test.Tags.Split(",").Contains($TestTag) ) )
|
||||
{
|
||||
LogMsg "Collected $($test.TestName)"
|
||||
$allTests += $test
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Invalid Test Selection"
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region Create Test XML
|
||||
$SetupTypes = $allTests.SetupType | Sort-Object | Get-Unique
|
||||
|
||||
$tab = @()
|
||||
for ( $i = 0; $i -lt 30; $i++)
|
||||
{
|
||||
$currentTab = ""
|
||||
for ( $j = 0; $j -lt $i; $j++)
|
||||
{
|
||||
$currentTab += "`t"
|
||||
}
|
||||
$tab += $currentTab
|
||||
}
|
||||
|
||||
|
||||
$GlobalConfiguration = [xml](Get-content .\XML\GlobalConfigurations.xml)
|
||||
<##########################################################################
|
||||
We're following the Indentation of the XML file to make XML creation easier.
|
||||
##########################################################################>
|
||||
$xmlContent = ("$($tab[0])" + '<?xml version="1.0" encoding="utf-8"?>')
|
||||
$xmlContent += ("$($tab[0])" + "<config>`n")
|
||||
$xmlContent += ("$($tab[0])" + "<CurrentTestPlatform>$TestPlatform</CurrentTestPlatform>`n")
|
||||
$xmlContent += ("$($tab[1])" + "<Azure>`n")
|
||||
|
||||
#region Add Subscription Details
|
||||
$xmlContent += ("$($tab[2])" + "<General>`n")
|
||||
|
||||
foreach ( $line in $GlobalConfiguration.Global.Azure.Subscription.InnerXml.Replace("><",">`n<").Split("`n"))
|
||||
{
|
||||
$xmlContent += ("$($tab[3])" + "$line`n")
|
||||
}
|
||||
$xmlContent += ("$($tab[2])" + "<Location>$TestLocation</Location>`n")
|
||||
$xmlContent += ("$($tab[2])" + "</General>`n")
|
||||
#endregion
|
||||
|
||||
#region Database details
|
||||
$xmlContent += ("$($tab[2])" + "<database>`n")
|
||||
foreach ( $line in $GlobalConfiguration.Global.Azure.ResultsDatabase.InnerXml.Replace("><",">`n<").Split("`n"))
|
||||
{
|
||||
$xmlContent += ("$($tab[3])" + "$line`n")
|
||||
}
|
||||
$xmlContent += ("$($tab[2])" + "</database>`n")
|
||||
#endregion
|
||||
|
||||
#region Deployment details
|
||||
$xmlContent += ("$($tab[2])" + "<Deployment>`n")
|
||||
$xmlContent += ("$($tab[3])" + "<Data>`n")
|
||||
$xmlContent += ("$($tab[4])" + "<Distro>`n")
|
||||
$xmlContent += ("$($tab[5])" + "<Name>$RGIdentifier</Name>`n")
|
||||
$xmlContent += ("$($tab[5])" + "<ARMImage>`n")
|
||||
$xmlContent += ("$($tab[6])" + "<Publisher>" + "$($ARMImage[0])" + "</Publisher>`n")
|
||||
$xmlContent += ("$($tab[6])" + "<Offer>" + "$($ARMImage[1])" + "</Offer>`n")
|
||||
$xmlContent += ("$($tab[6])" + "<Sku>" + "$($ARMImage[2])" + "</Sku>`n")
|
||||
$xmlContent += ("$($tab[6])" + "<Version>" + "$($ARMImage[3])" + "</Version>`n")
|
||||
$xmlContent += ("$($tab[5])" + "</ARMImage>`n")
|
||||
$xmlContent += ("$($tab[5])" + "<OsVHD>" + "$OsVHD" + "</OsVHD>`n")
|
||||
$xmlContent += ("$($tab[4])" + "</Distro>`n")
|
||||
$xmlContent += ("$($tab[4])" + "<UserName>" + "$($GlobalConfiguration.Global.Azure.TestCredentials.LinuxUsername)" + "</UserName>`n")
|
||||
$xmlContent += ("$($tab[4])" + "<Password>" + "$($GlobalConfiguration.Global.Azure.TestCredentials.LinuxPassword)" + "</Password>`n")
|
||||
$xmlContent += ("$($tab[3])" + "</Data>`n")
|
||||
|
||||
foreach ( $file in $SetupTypeXMLs.FullName)
|
||||
{
|
||||
foreach ( $SetupType in $SetupTypes )
|
||||
{
|
||||
$CurrentSetupType = ([xml]( Get-Content -Path $file)).TestSetup
|
||||
if ( $CurrentSetupType.$SetupType -ne $null)
|
||||
{
|
||||
$SetupTypeElement = $CurrentSetupType.$SetupType
|
||||
$xmlContent += ("$($tab[3])" + "<$SetupType>`n")
|
||||
#$xmlContent += ("$($tab[4])" + "$($SetupTypeElement.InnerXml)`n")
|
||||
foreach ( $line in $SetupTypeElement.InnerXml.Replace("><",">`n<").Split("`n"))
|
||||
{
|
||||
$xmlContent += ("$($tab[4])" + "$line`n")
|
||||
}
|
||||
|
||||
$xmlContent += ("$($tab[3])" + "</$SetupType>`n")
|
||||
}
|
||||
}
|
||||
}
|
||||
$xmlContent += ("$($tab[2])" + "</Deployment>`n")
|
||||
#endregion
|
||||
$xmlContent += ("$($tab[1])" + "</Azure>`n")
|
||||
|
||||
|
||||
#region TestDefinition
|
||||
$xmlContent += ("$($tab[1])" + "<testsDefinition>`n")
|
||||
foreach ( $currentTest in $allTests)
|
||||
{
|
||||
$xmlContent += ("$($tab[2])" + "<test>`n")
|
||||
foreach ( $line in $currentTest.InnerXml.Replace("><",">`n<").Split("`n"))
|
||||
{
|
||||
$xmlContent += ("$($tab[3])" + "$line`n")
|
||||
}
|
||||
$xmlContent += ("$($tab[2])" + "</test>`n")
|
||||
}
|
||||
$xmlContent += ("$($tab[1])" + "</testsDefinition>`n")
|
||||
#endregion
|
||||
|
||||
#region TestCycle
|
||||
$xmlContent += ("$($tab[1])" + "<testCycles>`n")
|
||||
$xmlContent += ("$($tab[2])" + "<Cycle>`n")
|
||||
$xmlContent += ("$($tab[3])" + "<cycleName>TC-$shortRandomNumber</cycleName>`n")
|
||||
foreach ( $currentTest in $allTests)
|
||||
{
|
||||
$line = $currentTest.TestName
|
||||
$xmlContent += ("$($tab[3])" + "<test>`n")
|
||||
$xmlContent += ("$($tab[4])" + "<Name>$line</Name>`n")
|
||||
$xmlContent += ("$($tab[3])" + "</test>`n")
|
||||
}
|
||||
$xmlContent += ("$($tab[2])" + "</Cycle>`n")
|
||||
$xmlContent += ("$($tab[1])" + "</testCycles>`n")
|
||||
#endregion
|
||||
$xmlContent += ("$($tab[0])" + "</config>`n")
|
||||
Set-Content -Value $xmlContent -Path $xmlFile -Force
|
||||
try
|
||||
{
|
||||
$xmlConfig = [xml](Get-Content $xmlFile)
|
||||
$xmlConfig.Save("$xmlFile")
|
||||
LogMsg "Auto created $xmlFile validated successfully."
|
||||
}
|
||||
catch
|
||||
{
|
||||
Throw "Auto created $xmlFile is not valid."
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Download necessary tools.
|
||||
mkdir -Path .\tools -ErrorAction SilentlyContinue | Out-Null
|
||||
Import-Module BitsTransfer
|
||||
if (!( Test-Path -Path .\tools\7za.exe ))
|
||||
{
|
||||
Write-Host "Downloading 7za.exe"
|
||||
$out = Start-BitsTransfer -Source "https://github.com/iamshital/azure-linux-automation-support-files/raw/master/tools/7za.exe" | Out-Null
|
||||
}
|
||||
if (!( Test-Path -Path .\tools\dos2unix.exe ))
|
||||
{
|
||||
Write-Host "Downloading dos2unix.exe"
|
||||
$out = Start-BitsTransfer -Source "https://github.com/iamshital/azure-linux-automation-support-files/raw/master/tools/dos2unix.exe" | Out-Null
|
||||
}
|
||||
if (!( Test-Path -Path .\tools\plink.exe ))
|
||||
{
|
||||
Write-Host "Downloading plink.exe"
|
||||
$out = Start-BitsTransfer -Source "https://github.com/iamshital/azure-linux-automation-support-files/raw/master/tools/plink.exe" | Out-Null
|
||||
}
|
||||
if (!( Test-Path -Path .\tools\pscp.exe ))
|
||||
{
|
||||
Write-Host "Downloading pscp.exe"
|
||||
$out = Start-BitsTransfer -Source "https://github.com/iamshital/azure-linux-automation-support-files/raw/master/tools/pscp.exe" | Out-Null
|
||||
}
|
||||
Move-Item -Path "*.exe" -Destination .\tools -ErrorAction SilentlyContinue -Force
|
||||
#endregion
|
||||
|
||||
LogMsg ".\AutomationManager.ps1 -xmlConfigFile '$xmlFile' -cycleName TC-$shortRandomNumber -RGIdentifier $RGIdentifier -runtests -UseAzureResourceManager"
|
||||
.\AutomationManager.ps1 -xmlConfigFile "$xmlFile" -cycleName "TC-$shortRandomNumber" -RGIdentifier $RGIdentifier -runtests -UseAzureResourceManager -keepReproInact
|
||||
|
||||
#TBD Analyse the test result
|
||||
#TBD Archive the logs
|
||||
#TBD Email the reports
|
||||
}
|
||||
catch
|
||||
{
|
||||
$line = $_.InvocationInfo.ScriptLineNumber
|
||||
$script_name = ($_.InvocationInfo.ScriptName).Replace($PWD,".")
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
LogMsg "Source : Line $line in script $script_name."
|
||||
$ExitCode = 1
|
||||
}
|
||||
finally
|
||||
{
|
||||
exit $ExitCode
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from azuremodules import *
|
||||
|
||||
def RunTest():
|
||||
UpdateState("TestRunning")
|
||||
RunLog.info("Checking if root password is deleted or not...")
|
||||
|
||||
passwd_output = Run("cat /etc/shadow | grep root")
|
||||
root_passwd = passwd_output.split(":")[1]
|
||||
if ('*' in root_passwd or '!' in root_passwd):
|
||||
RunLog.info('root password is deleted in /etc/shadow.')
|
||||
ResultLog.info('PASS')
|
||||
else:
|
||||
RunLog.error('root password not deleted.%s', passwd_output)
|
||||
ResultLog.error('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
RunTest()
|
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
from azuremodules import *
|
||||
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
import re
|
||||
#for error checking
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-e', '--expected', help='specify expected hostname', required=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
#if no value specified then stop
|
||||
expectedHostname = args.expected
|
||||
|
||||
def RunTest(expectedHost):
|
||||
UpdateState("TestRunning")
|
||||
if CheckHostName(expectedHost) and CheckFQDN(expectedHost):
|
||||
ResultLog.info('PASS')
|
||||
UpdateState("TestCompleted")
|
||||
else:
|
||||
ResultLog.error('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
def CheckHostName(expectedHost):
|
||||
RunLog.info("Checking hostname...")
|
||||
output = Run("hostname")
|
||||
if expectedHost.upper() in output.upper():
|
||||
RunLog.info('Hostname is set successfully to {0}'.format(expectedHost))
|
||||
return True
|
||||
else:
|
||||
RunLog.error('Hostname change failed. Current hostname : {0} Expected hostname : {1}'.format(output, expectedHost))
|
||||
return False
|
||||
|
||||
def CheckFQDN(expectedHost):
|
||||
RunLog.info("Checking fqdn...")
|
||||
[current_distro, distro_version] = DetectDistro()
|
||||
nslookupCmd = "nslookup {0}".format(expectedHost)
|
||||
if current_distro == 'coreos':
|
||||
nslookupCmd = "python nslookup.py -n {0}".format(expectedHost)
|
||||
output = Run(nslookupCmd)
|
||||
if re.search("server can't find", output) is None:
|
||||
RunLog.info('nslookup successfully for: {0}'.format(expectedHost))
|
||||
return True
|
||||
else:
|
||||
RunLog.error("nslookup failed for: {0}, {1}".format(expectedHost, output))
|
||||
return False
|
||||
|
||||
|
||||
RunTest(expectedHostname)
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
export PATH="/sbin:/bin:/usr/sbin:/usr/bin"
|
||||
dmesg > `hostname`-dmesg.txt
|
||||
cp /var/log/waagent.log `hostname`-waagent.log.txt
|
||||
uname -r > `hostname`-kernelVersion.txt
|
||||
uptime -s > `hostname`-uptime.txt || echo "UPTIME_COMMAND_ERROR" > `hostname`-uptime.txt
|
||||
modinfo hv_netvsc > `hostname`-lis.txt
|
||||
release=`cat /etc/*release*`
|
||||
if [ -f /etc/redhat-release ] ; then
|
||||
echo "/etc/redhat-release detected"
|
||||
if [[ "$release" =~ "Oracle" ]] ; then
|
||||
cat /etc/os-release | grep ^PRETTY_NAME | sed 's/"//g' | sed 's/PRETTY_NAME=//g' > `hostname`-distroVersion.txt
|
||||
else
|
||||
cat /etc/redhat-release > `hostname`-distroVersion.txt
|
||||
fi
|
||||
elif [ -f /etc/SuSE-release ] ; then
|
||||
echo "/etc/SuSE-release detected"
|
||||
cat /etc/os-release | grep ^PRETTY_NAME | sed 's/"//g' | sed 's/PRETTY_NAME=//g' > `hostname`-distroVersion.txt
|
||||
elif [[ "$release" =~ "UBUNTU" ]] || [[ "$release" =~ "Ubuntu" ]] || [[ "$release" =~ "Debian" ]]; then
|
||||
NAME=`cat /etc/os-release | grep ^NAME= | sed 's/"//g' | sed 's/NAME=//g'`
|
||||
VERSION=`cat /etc/os-release | grep ^VERSION= | sed 's/"//g' | sed 's/VERSION=//g'`
|
||||
echo "$NAME $VERSION" > `hostname`-distroVersion.txt
|
||||
elif [ -e /usr/share/clear/version ]; then
|
||||
NAME=`cat /usr/lib/os-release | grep ^PRETTY_NAME | sed 's/"//g' | sed 's/PRETTY_NAME=//g'`
|
||||
VERSION=`cat /usr/lib/os-release | grep ^VERSION= | sed 's/"//g' | sed 's/VERSION=//g'`
|
||||
echo "$NAME $VERSION" > `hostname`-distroVersion.txt
|
||||
else
|
||||
echo "unknown" > `hostname`-distroVersion.txt
|
||||
echo $release > `hostname`-unknownDistro.txt
|
||||
fi
|
||||
exit 0
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
bootLogs=`dmesg`
|
||||
if [[ $bootLogs =~ "Data path switched to VF" ]];
|
||||
then
|
||||
echo "DATAPATH_SWITCHED_TO_VF"
|
||||
else
|
||||
if [[ $DISTRO =~ "Ubuntu" ]];
|
||||
then
|
||||
#A temporary workaround for SRIOV issue.
|
||||
macAddr=`cat /sys/class/net/eth0/address`
|
||||
echo "SUBSYSTEM==\"net\", ACTION==\"add\", DRIVERS==\"hv_netvsc\", ATTR{address}==\"${macAddr}\", NAME=\"eth0\"" > /etc/udev/rules.d/70-persistent-net.rules
|
||||
echo "SUBSYSTEM==\"net\", ACTION==\"add\", DRIVERS==\"mlx4_core\", ATTR{address}==\"${macAddr}\", NAME=\"vf0\"" >> /etc/udev/rules.d/70-persistent-net.rules
|
||||
#sed -i '/rename*/c\vf0' /etc/network/interfaces
|
||||
echo "SYSTEM_RESTART_REQUIRED"
|
||||
fi
|
||||
fi
|
||||
exit 0
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
#Reference: https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-vm-accelerated-networking
|
||||
bootLogs=`dmesg`
|
||||
if [[ $bootLogs =~ "Data path switched to VF" ]];
|
||||
then
|
||||
echo "DATAPATH_SWITCHED_TO_VF"
|
||||
else
|
||||
wget https://raw.githubusercontent.com/torvalds/linux/master/tools/hv/bondvf.sh
|
||||
chmod +x ./bondvf.sh
|
||||
./bondvf.sh
|
||||
cp bondvf.sh /etc/init.d
|
||||
update-rc.d bondvf.sh defaults
|
||||
echo "SYSTEM_RESTART_REQUIRED"
|
||||
fi
|
||||
exit 0
|
|
@ -0,0 +1,75 @@
|
|||
#!/bin/bash
|
||||
#V-SHISAV@MICROSOFT.COM
|
||||
# THIS SCRIPT DETECT FOLLOWING DISTROS:
|
||||
# UBUNTU [VERSION INDEPENDANT]
|
||||
# CENTOS [VERSION INDEPENDANT]
|
||||
# SUSE LINUX ENTERPRISE SERVER [VERSION INDEPENDANT]
|
||||
# OPENSUSE [VERSION INDEPENDANT]
|
||||
# REDHAT
|
||||
# ORACLELINUX
|
||||
# FEDORA
|
||||
DetectDistro()
|
||||
{
|
||||
while echo $1 | grep ^- > /dev/null; do
|
||||
eval $( echo $1 | sed 's/-//g' | tr -d '\012')=$2
|
||||
shift
|
||||
shift
|
||||
done
|
||||
if [ -e /etc/debian_version ]; then
|
||||
tmp=`cat /etc/*-release`
|
||||
if [[ "$tmp" == *Ubuntu* ]]; then
|
||||
echo "UBUNTU"
|
||||
exitVal=0
|
||||
else
|
||||
echo "DEBIAN"
|
||||
exitVal=0
|
||||
fi
|
||||
elif [ -e /etc/redhat-release ]; then
|
||||
tmp=`cat /etc/redhat-release`
|
||||
if [ -e /etc/oracle-release ]; then
|
||||
tmp=`cat /etc/oracle-release`
|
||||
if [[ "$tmp" == *Oracle* ]]; then
|
||||
echo "ORACLELINUX"
|
||||
exitVal=0
|
||||
else
|
||||
echo "Unknown"
|
||||
exitVal=1
|
||||
fi
|
||||
elif [[ "$tmp" == *CentOS* ]]; then
|
||||
echo "CENTOS"
|
||||
exitVal=0
|
||||
elif [[ "$tmp" == *Fedora* ]]; then
|
||||
echo "FEDORA"
|
||||
exitVal=0
|
||||
elif [[ "$tmp" == *Red* ]]; then
|
||||
echo "REDHAT"
|
||||
exitVal=0
|
||||
else
|
||||
echo "Unknown"
|
||||
exitVal=1
|
||||
fi
|
||||
elif [ -e /etc/SuSE-release ]; then
|
||||
tmp=`cat /etc/SuSE-release`
|
||||
if [[ "$tmp" == *Enterprise* ]]; then
|
||||
echo "SLES"
|
||||
exitVal=0
|
||||
elif [[ "$tmp" == *open* ]]; then
|
||||
echo "SUSE"
|
||||
exitVal=0
|
||||
else
|
||||
echo "Unknown"
|
||||
fi
|
||||
elif [ -e /etc/os-release ]; then
|
||||
tmp=`cat /etc/os-release`
|
||||
if [[ "$tmp" == *coreos* ]]; then
|
||||
echo "COREOS"
|
||||
exitVal=0
|
||||
fi
|
||||
elif [ -e /usr/share/clear/version ]; then
|
||||
tmp=`cat /usr/share/clear/version`
|
||||
echo "CLEARLINUX"
|
||||
exitVal=0
|
||||
fi
|
||||
return $exitVal
|
||||
}
|
||||
DetectDistro
|
|
@ -0,0 +1,142 @@
|
|||
#!/bin/bash
|
||||
#Author: Vijay Tripathi <vijayt@microsoft.com>
|
||||
Log() {
|
||||
msg="echo $1"
|
||||
cmd=$2
|
||||
file=$3
|
||||
format="echo "---------------------------------------------------""
|
||||
|
||||
$format >> $file
|
||||
$msg >> $file
|
||||
echo "Command Used:" $2 >> $3
|
||||
eval $cmd >> $file
|
||||
$format >> $file
|
||||
|
||||
2>/dev/null
|
||||
}
|
||||
|
||||
intro() {
|
||||
##Create the Directory in Which Logs would be stored
|
||||
currtime=$(date +"%b%d%Y-%H-%M-%S");
|
||||
#hostnm=$(hostname)
|
||||
hostnm=""
|
||||
#dirname="LIS-Logs-"${hostnm};
|
||||
dirname="LIS-Logs"
|
||||
mkdir $dirname;
|
||||
}
|
||||
|
||||
Collect_Waagent_Logs() {
|
||||
echo "Collecting Waagent Details...."
|
||||
Log "Collecting Waagent Details at" 'date' $dirname/Waagent.txt
|
||||
Log "Waagent Process Running Status" 'ps -ef | grep waagent' $dirname/Waagent.txt
|
||||
if [ -f /usr/share/oem/bin/waagent ]
|
||||
then
|
||||
Log "Waagent Version is" '/usr/share/oem/python/bin/python /usr/share/oem/bin/waagent --version' $dirname/Waagent.txt
|
||||
else
|
||||
Log "Waagent Version is" '/usr/sbin/waagent --version' $dirname/Waagent.txt
|
||||
fi
|
||||
Log "Root Device Timeout" 'cat /sys/block/sda/device/timeout' $dirname/Waagent.txt
|
||||
if [[ $dist == *Debian* ]] || [[ $dist == *Ubuntu* ]]
|
||||
then
|
||||
Log "Waagent Package Details" 'dpkg -p walinuxagent' $dirname/Waagent.txt
|
||||
else
|
||||
Log "Waagent Package Details" 'rpm -qil WALinuxAgent' $dirname/Waagent.txt
|
||||
fi
|
||||
Log "Waagent.log file" 'cat /var/log/waagent.log' $dirname/Waagent.log
|
||||
}
|
||||
|
||||
Collect_OS_Logs() {
|
||||
echo "Collection Operating System Logs....."
|
||||
Log "Collection Operating System Details at" 'date' $dirname/OS.log
|
||||
Log "Kernel Version" 'uname -a' $dirname/OS.log
|
||||
Log "Distro Release Details" 'cat /etc/issue' $dirname/OS.log
|
||||
Log "Additional Kernel Details" 'cat /proc/version' $dirname/OS.log
|
||||
Log "Mount Points" 'mount' $dirname/OS.log
|
||||
Log "System Limits" 'ulimit -a' $dirname/OS.log
|
||||
#Log "NFS Shares on System" 'showmount -e' $dirname/OS.log
|
||||
Log "Hosts File Details" 'cat /etc/hosts' $dirname/OS.log
|
||||
Log "Locale Details" 'locale' $dirname/OS.log
|
||||
Log "Running Process Details" 'ps -auwwx' $dirname/OS.log
|
||||
if [ -e /boot/grub/grub.conf ]; then
|
||||
Log "Grub File Details" 'cat /boot/grub/grub.conf' $dirname/grub.log
|
||||
elif [ -e /boot/grub/menu.lst ]; then
|
||||
Log "Grub File Details" 'cat /boot/grub/menu.lst' $dirname/grub.log
|
||||
elif [ -e /etc/grub.conf ]; then
|
||||
Log "Grub File Details" 'cat /etc/grub.conf' $dirname/grub.log
|
||||
fi
|
||||
Log "Enviornment Variables Settings" 'env' $dirname/OS.log
|
||||
Log "Dmesg File Details" 'dmesg' $dirname/dmesg.txt
|
||||
dist=`cat /etc/issue`
|
||||
echo $dist
|
||||
if [[ $dist == *Debian* ]] || [[ $dist == *Ubuntu* ]]
|
||||
then
|
||||
Log "Kernel Loaded Packages" 'dpkg -l | grep kernel' $dirname/KernelPackagess.txt
|
||||
else
|
||||
Log "Kernel Loaded Packages" 'rpm -qa | grep kernel' $dirname/KernelPackages.txt
|
||||
fi
|
||||
#Log "var log messages saved" 'cat /var/log/messages' $dirname/VarLogMessages.txt
|
||||
Log "System has Been up since" 'uptime' $dirname/OS.log
|
||||
echo "Operating system Log process finished..."
|
||||
Log "I/O Scheduler Details" 'cat /sys/block/sda/queue/scheduler ' $dirname/OS.log
|
||||
}
|
||||
|
||||
Collect_LIS() {
|
||||
echo "Collecting Microsoft Linux Integration Service Data..."
|
||||
Log "LIS Modules Loaded" 'lsmod | grep vsc' $dirname/LISDetails.txt
|
||||
vmbus=`lsmod | grep vmbus | cut -d' ' -f1`
|
||||
Log "LIS Modules version Details" 'modinfo $vmbus' $dirname/LISDetails.txt
|
||||
echo "Collecting Microsoft Linux Integration Service Data Finsished..."
|
||||
}
|
||||
|
||||
Collect_DiskandMemory() {
|
||||
echo "Collecting Disk and Memory Data"
|
||||
Log "Disk Partition Details" 'fdisk -l' $dirname/Disk.txt
|
||||
Log "Filesystem details" 'df -k' $dirname/Disk.txt
|
||||
Log "Additional Partition Details" 'cat /proc/partitions' $dirname/Disk.txt
|
||||
Log "Memory Details" 'cat /proc/meminfo' $dirname/Memory.txt
|
||||
Log "Scsi details" 'cat /proc/scsi/scsi' $dirname/Disk.txt
|
||||
Log "Memory Usage Details in MB" 'free -m' $dirname/Memory.txt
|
||||
Log "I/O Memory details" 'cat /proc/iomem' $dirname/Memory.txt
|
||||
echo "Collecting Disk and Memory Data Finished..."
|
||||
}
|
||||
|
||||
Collect_Processor() {
|
||||
echo "Collecting Processor Data..."
|
||||
Log "Processor Details" 'cat /proc/cpuinfo' $dirname/Cpuinfo.txt
|
||||
Log "Processor Count" 'cat /proc/cpuinfo | grep ^proc' $dirname/Cpuinfo.txt
|
||||
Log "Interrurpts details" 'cat /proc/interrupts' $dirname/interrupts.txt
|
||||
Log "List of loaded Modules" 'lsmod' $dirname/Modules.txt
|
||||
Log "List of IO Ports" 'cat /proc/ioports' $dirname/IOports.txt
|
||||
Log "Processor Real time activity" 'top -b -n 5' $dirname/Top.txt
|
||||
Log "Processes consuming most amount of memory" 'ps -eo pcpu,pid,user,args | sort -k 1 -r | head -10' $dirname/Top.txt
|
||||
echo "Collecting Processor Data Finished..."
|
||||
}
|
||||
|
||||
Collect_Network() {
|
||||
echo "Collecting Network Data..."
|
||||
Log "Network Interface Details" 'ifconfig -a' $dirname/Network.txt
|
||||
Log "Network Status Details by interface" 'netstat -i' $dirname/Network.txt
|
||||
Log "Network Status Details of all sockets" 'netstat -a' $dirname/Network.txt
|
||||
Log "Network Status Details Source and Destinations ips and ports" 'netstat -lan' $dirname/Network.txt
|
||||
Log "Routing Table Details" 'route' $dirname/Route.txt
|
||||
echo "Collecting Network Data Finished..."
|
||||
}
|
||||
|
||||
Create_Compr_Logs() {
|
||||
echo "Compressing Logs"
|
||||
tar -czf $dirname.tgz $dirname/*
|
||||
}
|
||||
|
||||
Upload_Logs() {
|
||||
return;
|
||||
|
||||
}
|
||||
intro
|
||||
Collect_OS_Logs
|
||||
Collect_Waagent_Logs
|
||||
Collect_LIS
|
||||
Collect_DiskandMemory
|
||||
Collect_Processor
|
||||
Collect_Network
|
||||
Create_Compr_Logs
|
||||
|
|
@ -0,0 +1,847 @@
|
|||
#!/usr/bin/python
|
||||
#####################################################################################################################################
|
||||
# THIS FILE CONTAINS ALL THE FUNCTIONS USED IN PYTHON TEST FILES... HANDLE WITH CARE...
|
||||
# FOR ANY QUERY - V-SHISAV@MICROSOFT.COM
|
||||
# DO NOT DELETE ANY STATEMENT FROM THE FUNCTION EVEN IF IT IS COMMENTED!!! BECAUSE I'M TRACKING, WHAT I'M DOING...
|
||||
#####################################################################################################################################
|
||||
|
||||
import subprocess
|
||||
import logging
|
||||
import string
|
||||
import os
|
||||
import time
|
||||
import os.path
|
||||
import array
|
||||
import linecache
|
||||
import sys
|
||||
import re
|
||||
|
||||
try:
|
||||
import commands
|
||||
except ImportError:
|
||||
import subprocess as commands
|
||||
|
||||
py_ver_str = sys.version
|
||||
print(sys.version)
|
||||
|
||||
#THIS LOG WILL COLLECT ALL THE LOGS THAT ARE RUN WHILE THE TEST IS GOING ON...
|
||||
RunLog = logging.getLogger("RuntimeLog : ")
|
||||
WRunLog = logging.FileHandler('Runtime.log','w')
|
||||
RunFormatter = logging.Formatter('%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
WRunLog.setFormatter(RunFormatter)
|
||||
RunLog.setLevel(logging.DEBUG)
|
||||
RunScreen = logging.StreamHandler()
|
||||
RunScreen.setFormatter(RunFormatter)
|
||||
#RunLog.addHandler(RunScreen)
|
||||
RunLog.addHandler(WRunLog)
|
||||
|
||||
#This will collect Result from every test case :
|
||||
ResultLog = logging.getLogger("Result : ")
|
||||
WResultLog = logging.FileHandler('Summary.log','w')
|
||||
#ResultFormatter = logging.Formatter('%(asctime)s : %(levelname)s : %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
|
||||
ResultFormatter = logging.Formatter('%(message)s')
|
||||
WResultLog.setFormatter(ResultFormatter)
|
||||
ResultLog.setLevel(logging.DEBUG)
|
||||
ResultScreen = logging.StreamHandler()
|
||||
ResultScreen.setFormatter(ResultFormatter)
|
||||
#ResultLog.addHandler(ResultScreen)
|
||||
ResultLog.addHandler(WResultLog)
|
||||
|
||||
def UpdateRepos(current_distro):
|
||||
RunLog.info ("\nUpdating the repositoriy information...")
|
||||
if (current_distro.find("ubuntu") != -1) or (current_distro.find("debian") != -1):
|
||||
#method 'RunUpdate': fix deadlock when using stdout=PIPE and/or stderr=PIPE and the child process generates enough output to a pipe
|
||||
RunUpdate("apt-get update")
|
||||
elif (current_distro.find("rhel") != -1) or (current_distro.find("Oracle") != -1) or (current_distro.find('centos') != -1):
|
||||
RunUpdate("yum -y update")
|
||||
elif (current_distro.find("opensuse") != -1) or (current_distro.find("SUSE") != -1) or (current_distro.find("sles") != -1):
|
||||
RunUpdate("zypper --non-interactive --gpg-auto-import-keys update")
|
||||
else:
|
||||
RunLog.info("Repo upgradation failed on:"+current_distro)
|
||||
return False
|
||||
|
||||
RunLog.info ("Updating the repositoriy information... [done]")
|
||||
return True
|
||||
|
||||
def DownloadUrl(url, destination_folder, output_file=None):
|
||||
cmd = "wget -P "+destination_folder+" "+url+ " 2>&1"
|
||||
if output_file is not None:
|
||||
cmd = "wget {0} -O {1} 2>&1".format(url, output_file)
|
||||
|
||||
rtrn = Run(cmd)
|
||||
|
||||
if(rtrn.rfind("wget: command not found") != -1):
|
||||
InstallPackage("wget")
|
||||
rtrn = Run(cmd)
|
||||
|
||||
if( rtrn.rfind("100%") != -1):
|
||||
return True
|
||||
else:
|
||||
RunLog.info (rtrn)
|
||||
return False
|
||||
|
||||
def DetectDistro():
|
||||
distribution = 'unknown'
|
||||
version = 'unknown'
|
||||
|
||||
RunLog.info("Detecting Distro ")
|
||||
output = Run("cat /etc/*-release")
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
for line in outputlist:
|
||||
line = re.sub('"', '', line)
|
||||
if (re.match(r'^ID=(.*)',line,re.M|re.I) ):
|
||||
matchObj = re.match( r'^ID=(.*)', line, re.M|re.I)
|
||||
distribution = matchObj.group(1)
|
||||
elif (re.match(r'^VERSION_ID=(.*)',line,re.M|re.I) ):
|
||||
matchObj = re.match( r'^VERSION_ID=(.*)', line, re.M|re.I)
|
||||
version = matchObj.group(1)
|
||||
|
||||
if(distribution.strip() == "ol"):
|
||||
distribution = 'Oracle'
|
||||
|
||||
if(distribution == 'unknown'):
|
||||
# Finding the Distro
|
||||
for line in outputlist:
|
||||
if (re.match(r'.*Ubuntu.*',line,re.M|re.I) ):
|
||||
distribution = 'ubuntu'
|
||||
break
|
||||
elif (re.match(r'.*SUSE Linux.*',line,re.M|re.I)):
|
||||
distribution = 'SUSE'
|
||||
break
|
||||
elif (re.match(r'.*openSUSE.*',line,re.M|re.I)):
|
||||
distribution = 'opensuse'
|
||||
break
|
||||
elif (re.match(r'.*centos.*',line,re.M|re.I)):
|
||||
distribution = 'centos'
|
||||
break
|
||||
elif (re.match(r'.*Oracle.*',line,re.M|re.I)):
|
||||
distribution = 'Oracle'
|
||||
break
|
||||
elif (re.match(r'.*Red Hat.*',line,re.M|re.I)):
|
||||
distribution = 'rhel'
|
||||
break
|
||||
elif (re.match(r'.*Fedora.*',line,re.M|re.I)):
|
||||
distribution = 'fedora'
|
||||
break
|
||||
return [distribution, version]
|
||||
|
||||
def FileGetContents(filename):
|
||||
with open(filename) as f:
|
||||
return f.read()
|
||||
|
||||
def ExecMultiCmdsLocalSudo(cmd_list):
|
||||
f = open('/tmp/temp_script.sh','w')
|
||||
for line in cmd_list:
|
||||
f.write(line+'\n')
|
||||
f.close()
|
||||
Run ("chmod +x /tmp/temp_script.sh")
|
||||
Run ("/tmp/temp_script.sh 2>&1 > /tmp/exec_multi_cmds_local_sudo.log")
|
||||
return FileGetContents("/tmp/exec_multi_cmds_local_sudo.log")
|
||||
|
||||
def DetectLinuxDistro():
|
||||
if os.path.isfile("/etc/redhat-release"):
|
||||
return (True, "RedHat")
|
||||
if os.path.isfile("/etc/lsb-release") and "Ubuntu" in GetFileContents("/etc/lsb-release"):
|
||||
return (True, "Ubuntu")
|
||||
if os.path.isfile("/etc/debian_version"):
|
||||
return (True, "Debian")
|
||||
if os.path.isfile("/etc/SuSE-release"):
|
||||
return (True, "Suse")
|
||||
return (False, "Unknown")
|
||||
|
||||
def IsUbuntu():
|
||||
cmd = "cat /etc/issue"
|
||||
tmp=Run(cmd)
|
||||
return ("Ubuntu" in tmp)
|
||||
|
||||
def ParseWalaConf2Dict(walaconfpath):
|
||||
d = None
|
||||
if os.path.exists(walaconfpath):
|
||||
d={}
|
||||
lines = GetFileContentsByLines(walaconfpath)
|
||||
configs_list = [x.strip().split()[0] for x in lines if not x.startswith('#') and not x.startswith('\n')]
|
||||
for x in configs_list:
|
||||
try:
|
||||
k,v=x.split('=')
|
||||
d.setdefault(k,v)
|
||||
except Exception as e:
|
||||
pass
|
||||
else:
|
||||
RunLog.error("%s is not exists, please check." % walaconfpath)
|
||||
return d
|
||||
|
||||
def GetWalaConfPath():
|
||||
if os.path.exists("/etc/lsb-release") and int(Run("cat /etc/lsb-release | grep -i coreos | wc -l")) > 0:
|
||||
return "/usr/share/oem/waagent.conf"
|
||||
else:
|
||||
return "/etc/waagent.conf"
|
||||
|
||||
def GetResourceDiskMountPoint():
|
||||
walacfg_path = GetWalaConfPath()
|
||||
walacfg_dict = ParseWalaConf2Dict(walacfg_path)
|
||||
|
||||
if os.path.exists('/var/log/cloud-init.log') and os.path.islink('/var/lib/cloud/instance'):
|
||||
RunLog.info('ResourceDisk handled by cloud-init.')
|
||||
return '/mnt'
|
||||
else:
|
||||
RunLog.info("ResourceDisk handled by waagent.")
|
||||
return walacfg_dict['ResourceDisk.MountPoint']
|
||||
|
||||
def RunGetOutput(cmd):
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
retval = proc.communicate()
|
||||
output = retval[0]
|
||||
|
||||
output = unicode(output,
|
||||
encoding='utf-8',
|
||||
errors="backslashreplace")
|
||||
except subprocess.CalledProcessError as e:
|
||||
output = unicode(e.output,
|
||||
encoding='utf-8',
|
||||
errors="backslashreplace")
|
||||
return e.returncode, output
|
||||
return 0, output
|
||||
|
||||
def Run(cmd):
|
||||
proc=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
proc.wait()
|
||||
op = proc.stdout.read()
|
||||
RunLog.debug(op)
|
||||
code=proc.returncode
|
||||
if int(code) !=0:
|
||||
exception = 1
|
||||
else:
|
||||
#ensure type str return
|
||||
if py_ver_str[0] == '3':
|
||||
op = op.decode('utf-8')
|
||||
return op
|
||||
if exception == 1:
|
||||
str_code = str(code)
|
||||
return op
|
||||
#use method communicate() instead of wait()
|
||||
#This will deadlock when using stdout=PIPE and/or stderr=PIPE and the child process generates enough output to a pipe
|
||||
#such that it blocks waiting for the OS pipe buffer to accept more data. Use communicate() to avoid that.
|
||||
def RunUpdate(cmd):
|
||||
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
retval = proc.communicate()
|
||||
op = retval[0]
|
||||
RunLog.debug(op)
|
||||
code = proc.returncode
|
||||
if int(code) !=0:
|
||||
exception = 1
|
||||
else:
|
||||
if py_ver_str[0] == '3':
|
||||
op = op.decode('utf-8')
|
||||
return op
|
||||
if exception == 1:
|
||||
str_code = str(code)
|
||||
return op
|
||||
|
||||
def JustRun(cmd):
|
||||
return commands.getoutput(cmd)
|
||||
|
||||
def UpdateState(testState):
|
||||
stateFile = open('state.txt', 'w')
|
||||
stateFile.write(testState)
|
||||
stateFile.close()
|
||||
|
||||
def GetFileContents(filepath):
|
||||
file = None
|
||||
try:
|
||||
file = open(filepath)
|
||||
except:
|
||||
return None
|
||||
if file == None:
|
||||
return None
|
||||
try:
|
||||
return file.read()
|
||||
finally:
|
||||
file.close()
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
# Instlaltion routines
|
||||
|
||||
def YumPackageInstall(package):
|
||||
RunLog.info(("\nyum_package_install: " + package))
|
||||
output = Run("yum install -y "+package)
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
for line in outputlist:
|
||||
#Package installed successfully
|
||||
if (re.match(r'Complete!', line, re.M|re.I)):
|
||||
RunLog.info((package+": package installed successfully.\n"+line))
|
||||
return True
|
||||
#package is already installed
|
||||
elif (re.match(r'.* already installed and latest version', line, re.M|re.I)):
|
||||
RunLog.info((package + ": package is already installed.\n"+line))
|
||||
return True
|
||||
elif (re.match(r'^Nothing to do', line, re.M|re.I)):
|
||||
RunLog.info((package + ": package already installed.\n"+line))
|
||||
return True
|
||||
#Package installation failed
|
||||
elif (re.match(r'^Error: Nothing to do', line, re.M|re.I)):
|
||||
break
|
||||
#package is not found on the repository
|
||||
elif (re.match(r'^No package '+ re.escape(package)+ r' available', line, re.M|re.I)):
|
||||
break
|
||||
|
||||
#Consider package installation failed if non of the above matches.
|
||||
RunLog.error((package + ": package installation failed!\n" +output))
|
||||
return False
|
||||
|
||||
def AptgetPackageInstall(package,dbpasswd = "root"):
|
||||
RunLog.info("Installing Package: " + package)
|
||||
# Identify the package for Ubuntu
|
||||
# We Haven't installed mysql-secure_installation for Ubuntu Distro
|
||||
if (package == 'mysql-server'):
|
||||
RunLog.info( "apt-get function package:" + package)
|
||||
cmds = ("export DEBIAN_FRONTEND=noninteractive","echo mysql-server mysql-server/root_password select " + dbpasswd + " | debconf-set-selections", "echo mysql-server mysql-server/root_password_again select " + dbpasswd + "| debconf-set-selections", "apt-get install -y mysql-server")
|
||||
output = ExecMultiCmdsLocalSudo(cmds)
|
||||
else:
|
||||
cmds = ("export DEBIAN_FRONTEND=noninteractive", "apt-get install -y "+package)
|
||||
output = ExecMultiCmdsLocalSudo(cmds)
|
||||
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
unpacking = False
|
||||
setting_up = False
|
||||
|
||||
for line in outputlist:
|
||||
#package is already installed
|
||||
if (re.match(re.escape(package) + r' is already the newest version', line, re.M|re.I)):
|
||||
RunLog.info(package + ": package is already installed."+line)
|
||||
return True
|
||||
#package installation check 1
|
||||
elif (re.match(r'Unpacking.*'+ re.escape(package) + r'.*', line, re.M|re.I)):
|
||||
unpacking = True
|
||||
#package installation check 2
|
||||
elif (re.match(r'Setting up '+ re.escape(package) + r" \(.*" , line, re.M|re.I)):
|
||||
setting_up = True
|
||||
#Package installed successfully
|
||||
if (setting_up and unpacking):
|
||||
RunLog.info(package+": package installed successfully.")
|
||||
return True
|
||||
#package is not found on the repository
|
||||
elif (re.match(r'E: Unable to locate package '+ re.escape(package), line, re.M|re.I)):
|
||||
break
|
||||
#package installation failed due to server unavailability
|
||||
elif (re.match(r'E: Unable to fetch some archives', line, re.M|re.I)):
|
||||
break
|
||||
|
||||
#Consider package installation failed if non of the above matches.
|
||||
RunLog.info(package + ": package installation failed!\n")
|
||||
RunLog.info("Error log: "+output)
|
||||
return False
|
||||
|
||||
def ZypperPackageInstall(package):
|
||||
RunLog.info( "\nzypper_package_install: " + package)
|
||||
|
||||
output = Run("zypper --non-interactive in "+package)
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
for line in outputlist:
|
||||
#Package or package dependencies installed successfully
|
||||
if (re.match(r'.*Installing: '+r'.*done', line, re.M|re.I)):
|
||||
RunLog.info((package+": package installed successfully.\n"+line))
|
||||
return True
|
||||
#package or provider of package is already installed
|
||||
elif (re.match(r'.*\''+re.escape(package)+r'\' is already installed', line, re.M|re.I)):
|
||||
RunLog.info((package + ": package is already installed.\n"+line))
|
||||
return True
|
||||
#package is not found on the repository
|
||||
elif (re.match(r'^No provider of \''+ re.escape(package) + r'\' found', line, re.M|re.I)):
|
||||
break
|
||||
|
||||
#Consider package installation failed if non of the above matches.
|
||||
RunLog.error((package + ": package installation failed!\n"+output))
|
||||
return False
|
||||
|
||||
def ZypperPackageRemove(package):
|
||||
RunLog.info( "\nzypper_package_remove: " + package)
|
||||
|
||||
output = Run("zypper --non-interactive remove "+package)
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
for line in outputlist:
|
||||
#Package removed successfully
|
||||
if (re.match(r'.*Removing '+re.escape(package)+r'.*done', line, re.M|re.I)):
|
||||
RunLog.info((package+": package removed successfully.\n"+line))
|
||||
return True
|
||||
#package is not installed
|
||||
elif (re.match(r'\''+re.escape(package)+r'\' is not installed', line, re.M|re.I)):
|
||||
RunLog.info((package + ": package is not installed.\n"+line))
|
||||
return True
|
||||
#package is not found on the repository
|
||||
elif (re.match(r'\''+re.escape(package)+r'\' not found in package names', line, re.M|re.I)):
|
||||
return True
|
||||
|
||||
#Consider package remove failed if non of the above matches.
|
||||
RunLog.error((package + ": package remove failed!\n"+output))
|
||||
return False
|
||||
|
||||
def InstallPackage(package):
|
||||
RunLog.info( "\nInstall_package: "+package)
|
||||
[current_distro, distro_version] = DetectDistro()
|
||||
if (("ubuntu" in current_distro) or ("Debian" in current_distro)):
|
||||
return AptgetPackageInstall(package)
|
||||
elif (("rhel" in current_distro) or ("Oracle" in current_distro) or ("centos" in current_distro) or ("fedora" in current_distro)):
|
||||
return YumPackageInstall(package)
|
||||
elif (("SUSE" in current_distro) or ("opensuse" in current_distro) or ("sles" in current_distro)):
|
||||
return ZypperPackageInstall(package)
|
||||
else:
|
||||
RunLog.error((package + ": package installation failed!"))
|
||||
RunLog.info((current_distro + ": Unrecognised Distribution OS Linux found!"))
|
||||
return False
|
||||
|
||||
def InstallDeb(file_path):
|
||||
RunLog.info( "\nInstalling package: "+file_path)
|
||||
output = Run("dpkg -i "+file_path+" 2>&1")
|
||||
RunLog.info(output)
|
||||
outputlist = re.split("\n", output)
|
||||
|
||||
for line in outputlist:
|
||||
#package is already installed
|
||||
if(re.match("installation successfully completed", line, re.M|re.I)):
|
||||
RunLog.info(file_path + ": package installed successfully."+line)
|
||||
return True
|
||||
|
||||
RunLog.info(file_path+": Installation failed"+output)
|
||||
return False
|
||||
|
||||
def InstallRpm(file_path, package_name):
|
||||
RunLog.info( "\nInstalling package: "+file_path)
|
||||
output = Run("rpm -ivh --nodeps "+file_path+" 2>&1")
|
||||
RunLog.info(output)
|
||||
outputlist = re.split("\n", output)
|
||||
package = re.split("/", file_path )[-1]
|
||||
matchObj = re.match( r'(.*?)\.rpm', package, re.M|re.I)
|
||||
package = matchObj.group(1)
|
||||
|
||||
for line in outputlist:
|
||||
#package is already installed
|
||||
if (re.match(r'.*package'+re.escape(package) + r'.*is already installed', line, re.M|re.I)):
|
||||
RunLog.info(file_path + ": package is already installed."+line)
|
||||
return True
|
||||
elif(re.match(re.escape(package) + r'.*######', line, re.M|re.I)):
|
||||
RunLog.info(package + ": package installed successfully."+line)
|
||||
return True
|
||||
elif(re.match(re.escape(package_name) + r'.*######', line, re.M|re.I)):
|
||||
RunLog.info(package + ": package installed successfully."+line)
|
||||
return True
|
||||
|
||||
RunLog.info(file_path+": Installation failed"+output)
|
||||
return False
|
||||
|
||||
#-----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
# iperf server
|
||||
|
||||
def GetServerCommand():
|
||||
import argparse
|
||||
import sys
|
||||
#for error checking
|
||||
validPlatformValues = ["532","540","541", "542", "550"]
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-u', '--udp', help='switch : starts the server in udp data packets listening mode.', choices=['yes', 'no'] )
|
||||
parser.add_argument('-p', '--port', help='specifies which port should be used', required=True, type= int)
|
||||
parser.add_argument('-m', '--maxsegdisplay', help='Maximum Segment Size display ', choices=['yes', 'no'])
|
||||
parser.add_argument('-M', '--maxsegset', help='Maximum Segment Size Settings', type = int)
|
||||
parser.add_argument('-i', '--interval', help='specifies frequency of the output to be displyed on screen', type= int, required = True)
|
||||
args = parser.parse_args()
|
||||
#if no value specified then stop
|
||||
command = 'iperf -s' + ' -i' + str(args.interval) + ' -p' + str(args.port)
|
||||
if args.udp == 'yes':
|
||||
command = command + ' -u'
|
||||
if args.maxsegset != None:
|
||||
command = command + ' -M' + str(args.maxsegset)
|
||||
if args.maxsegdisplay == 'yes':
|
||||
command = command + ' -m'
|
||||
|
||||
finalCommand = 'nohup ' + command + ' > iperf-server.txt &'
|
||||
return finalCommand
|
||||
|
||||
#_________________________________________________________________________________________________________________________________________________
|
||||
|
||||
def StopServer():
|
||||
RunLog.info("Killing iperf server if running ..")
|
||||
temp = Run("killall iperf")
|
||||
|
||||
def StartServer(server):
|
||||
StopServer()
|
||||
RunLog.info("Starting iperf server..")
|
||||
temp = Run(server)
|
||||
tmp = Run("sleep 1")
|
||||
#print(output)
|
||||
iperfstatus = open('iperf-server.txt', 'r')
|
||||
output = iperfstatus.read()
|
||||
#print output
|
||||
RunLog.info("Checking if server is started..")
|
||||
if ("listening" in output) :
|
||||
str_out = str.split(output)
|
||||
#len_out = len(str_out)
|
||||
for each in str_out :
|
||||
#print(each)
|
||||
if each == "listening" :
|
||||
iperfPID = Run('pidof iperf')
|
||||
RunLog.info("Server started successfully. PID : %s", iperfPID)
|
||||
Run('echo "yes" > isServerStarted.txt')
|
||||
#UpdateState('TestCompleted')
|
||||
|
||||
else :
|
||||
RunLog.error('Server Failed to start..')
|
||||
Run("echo yes > isServerStarted.txt")
|
||||
UpdateState('Aborted')
|
||||
|
||||
#_______________________________________________________________________________________________________________________________________________
|
||||
|
||||
def AnalyseClientUpdateResult():
|
||||
iperfstatus = open('iperf-client.txt', 'r')
|
||||
output = iperfstatus.read()
|
||||
#print output
|
||||
Failure = 0
|
||||
RunLog.info("Checking if client was connected to server..")
|
||||
if ("connected" in output) :
|
||||
if ("TestInComplete" in output):
|
||||
RunLog.error('Client was successfully connected but, iperf process failed to exit.')
|
||||
Failure = Failure + 1
|
||||
if("failed" in output):
|
||||
RunLog.error("Client connected with some failed connections!")
|
||||
Failure = Failure + 1
|
||||
if("error" in output):
|
||||
RunLog.error("There were some errors in the connections.")
|
||||
Failure = Failure + 1
|
||||
|
||||
if("refused" in output):
|
||||
RunLog.error("some connections were refused.")
|
||||
Failure = Failure + 1
|
||||
|
||||
if(Failure == 0):
|
||||
RunLog.info("Client was successfully connected to server")
|
||||
ResultLog.info("PASS")
|
||||
UpdateState("TestCompleted")
|
||||
else:
|
||||
ResultLog.info("FAIL")
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
else:
|
||||
if("No address associated" in output):
|
||||
RunLog.error('Client was not connected to server.')
|
||||
RunLog.error("No address associated with hostname")
|
||||
ResultLog.info('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
elif("Connection refused" in output):
|
||||
RunLog.error('Client was not connected to server.')
|
||||
RunLog.error("Connection refused by the server.")
|
||||
ResultLog.info('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
|
||||
|
||||
elif("Name or service not known" in output):
|
||||
RunLog.error('Client was not connected to server.')
|
||||
RunLog.error("Name or service not known.")
|
||||
ResultLog.info('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
|
||||
else:
|
||||
RunLog.error('Client was not connected to server.')
|
||||
RunLog.error("Unlisted error. Check logs for more information...!")
|
||||
ResultLog.info('FAIL')
|
||||
UpdateState("TestCompleted")
|
||||
|
||||
|
||||
#________________________________________________________________________________________________________________________________________________
|
||||
|
||||
def isProcessRunning(processName):
|
||||
temp = 'ps -ef'
|
||||
outProcess = Run(temp)
|
||||
ProcessCount = outProcess.count('iperf -c')
|
||||
if (ProcessCount > 0):
|
||||
return "True"
|
||||
else:
|
||||
return "False"
|
||||
|
||||
#________________________________________________________________________________________________________________________________________________
|
||||
#
|
||||
#
|
||||
# VNET Library..
|
||||
|
||||
|
||||
#DECLARE GLOBAL VARIBALES HERE FIRST AND THEN ADD THEM TO SetVnetGlobalParametesrs()
|
||||
vnetDomain_db_filepath = ''
|
||||
vnetDomain_rev_filepath = ''
|
||||
dns_server_ip = ''
|
||||
resolv_conf_filepath = ''
|
||||
hosts_filepath = ''
|
||||
def SetVnetGlobalParameters():
|
||||
import argparse
|
||||
import sys
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-d', '--dns_server_ip', help='DNS server IP address',required=True)
|
||||
parser.add_argument('-D', '--vnetDomain_db_filepath', help='VNET Domain db filepath', required=True)
|
||||
parser.add_argument('-r', '--vnetDomain_rev_filepath', help='VNET rev filepath',required=True)
|
||||
parser.add_argument('-R', '--resolv_conf_filepath', help='resolv.conf filepath', required=True)
|
||||
parser.add_argument('-h', '--hosts_filepath', help='hosts filepath',required = True)
|
||||
args = parser.parse_args()
|
||||
global dns_server_ip
|
||||
global vnetDomain_db_filepath
|
||||
global vnetDomain_rev_filepath
|
||||
global resolv_conf_filepath
|
||||
global hosts_filepath
|
||||
vnetDomain_db_filepath = str(args.vnetDomain_db_filepath)
|
||||
vnetDomain_rev_filepath = str(args.vnetDomain_rev_filepath)
|
||||
dns_server_ip = str(args.dns_server_ip)
|
||||
resolv_conf_filepath = str(args.resolv_conf_filepath)
|
||||
hosts_filepath = str(args.hosts_filepath)
|
||||
|
||||
def GetFileContentsByLines(filepath):
|
||||
file = None
|
||||
try:
|
||||
file = open(filepath, 'r')
|
||||
except:
|
||||
return None
|
||||
if file == None:
|
||||
return None
|
||||
try:
|
||||
file_lines = file.readlines()
|
||||
return file_lines
|
||||
finally:
|
||||
file.close()
|
||||
|
||||
def RemoveStringMatchLinesFromFile(filepath, matchString):
|
||||
try:
|
||||
old_file_lines = GetFileContentsByLines(filepath)
|
||||
NewFile = open(filepath,'w')
|
||||
for eachLine in old_file_lines:
|
||||
if not matchString in eachLine :
|
||||
NewFile.writelines(eachLine)
|
||||
#By the end of this for loop, Selected lines will be removed.
|
||||
else:
|
||||
print("removed %s from %s" % ( eachLine.replace('\n',''), filepath))
|
||||
NewFile.close()
|
||||
except:
|
||||
print ('File : %s not found.' % filepath)
|
||||
|
||||
def ReplaceStringMatchLinesFromFile(filepath, matchString, newLine):
|
||||
try:
|
||||
old_file_lines = GetFileContentsByLines(filepath)
|
||||
NewFile = open(filepath,'w')
|
||||
for eachLine in old_file_lines:
|
||||
if matchString in eachLine :
|
||||
if '\n' in newLine:
|
||||
NewFile.writelines(newLine)
|
||||
else :
|
||||
NewFile.writelines('%s\n' % newLine)
|
||||
else :
|
||||
NewFile.writelines(eachLine)
|
||||
NewFile.close()
|
||||
except:
|
||||
print ('File : %s not found.' % filepath)
|
||||
|
||||
def GetStringMatchCount(filepath, matchString):
|
||||
#try:
|
||||
NewFile = open(filepath,'r')
|
||||
NewFile.close()
|
||||
matchCount = 0
|
||||
file_lines = GetFileContentsByLines(filepath)
|
||||
for eachLine in file_lines:
|
||||
if matchString in eachLine :
|
||||
matchCount = matchCount + 1
|
||||
return matchCount
|
||||
#except:
|
||||
print ('File : %s not found.' % filepath)
|
||||
|
||||
def RemoveICAVMsFromDBfile(vnetDomain_db_filepath):
|
||||
matchString = 'ICA-'
|
||||
RemoveStringMatchLinesFromFile(vnetDomain_db_filepath,matchString)
|
||||
|
||||
def RemoveICAVMsFromREVfile(vnetDomain_rev_filepath):
|
||||
matchString = 'ICA-'
|
||||
RemoveStringMatchLinesFromFile(vnetDomain_rev_filepath,matchString)
|
||||
|
||||
|
||||
def RetryOperation(operation, description, expectResult=None, maxRetryCount=18, retryInterval=10):
|
||||
retryCount = 1
|
||||
|
||||
while True:
|
||||
RunLog.info("Attempt : %s : %s", retryCount, description)
|
||||
ret = None
|
||||
|
||||
try:
|
||||
ret = Run(operation)
|
||||
if (expectResult and (ret.strip() == expectResult)) or (expectResult == None):
|
||||
return ret
|
||||
except:
|
||||
RunLog.info("Retrying Operation")
|
||||
|
||||
if retryCount >= maxRetryCount:
|
||||
break
|
||||
retryCount += 1
|
||||
time.sleep(retryInterval)
|
||||
if(expectResult != None):
|
||||
return ret
|
||||
return None
|
||||
|
||||
def AppendTextToFile(filepath,textString):
|
||||
#THIS FUNCTION DONES NOT CREATES ANY FILE. THE FILE MUST PRESENT AT THE SPECIFIED LOCATION.
|
||||
try:
|
||||
fileToEdit = open ( filepath , 'r' )
|
||||
fileToEdit.close()
|
||||
fileToEdit = open ( filepath , 'a' )
|
||||
if not '\n' in textString:
|
||||
fileToEdit.write(textString)
|
||||
else:
|
||||
fileToEdit.writelines(textString)
|
||||
fileToEdit.close()
|
||||
except:
|
||||
print('File %s not found' % filepath)
|
||||
|
||||
|
||||
def AddICAVMsToDnsServer(HostnameDIP,vnetDomain_db_filepath,vnetDomain_rev_filepath):
|
||||
#SetVnetGlobalParameters()
|
||||
vnetDomain=(vnetDomain_db_filepath.split("/"))[len((vnetDomain_db_filepath.split("/")))-1].replace(".db","")
|
||||
#PARSE THE VM DETAILS FIRST.
|
||||
separatedVMs = HostnameDIP.split('^')
|
||||
vmCounter = 0
|
||||
successCount = 0
|
||||
for eachVM in separatedVMs:
|
||||
vmCounter = vmCounter + 1
|
||||
eachVMdata = eachVM.split(':')
|
||||
eachVMHostname = eachVMdata[0]
|
||||
eachVMDIP = eachVMdata[1]
|
||||
lastDigitofVMDIP = eachVMDIP.split('.')[3]
|
||||
vnetDomainDBstring = '%s\tIN\tA\t%s\n' % (eachVMHostname,eachVMDIP)
|
||||
print(vnetDomainDBstring.replace('\n',''))
|
||||
AppendTextToFile(vnetDomain_db_filepath,vnetDomainDBstring)
|
||||
vnetDomainREVstring = '%s\tIN\tPTR\t%s.%s.\n' % (lastDigitofVMDIP,eachVMHostname,vnetDomain)
|
||||
AppendTextToFile(vnetDomain_rev_filepath,vnetDomainREVstring)
|
||||
print(vnetDomainREVstring.replace('\n',''))
|
||||
isDBFileEntry = GetStringMatchCount(vnetDomain_db_filepath,vnetDomainDBstring)
|
||||
isREVFileEntry = GetStringMatchCount(vnetDomain_rev_filepath,vnetDomainREVstring)
|
||||
if isDBFileEntry >= 1 and isREVFileEntry >= 1:
|
||||
print (vnetDomain_db_filepath + " file edited for " + eachVMDIP + " : " + eachVMHostname)
|
||||
print (vnetDomain_rev_filepath + " file edited for " + eachVMDIP + " : " + eachVMHostname)
|
||||
successCount = successCount + 1
|
||||
else:
|
||||
if isDBFileEntry != 1:
|
||||
print ("Failed to edit " + vnetDomain_db_filepath + " for " + eachVMDIP + " : " + eachVMHostname)
|
||||
if isREVFileEntry != 1:
|
||||
print ("Failed to edit " + vnetDomain_rev_filepath + " for " + eachVMDIP + " : " + eachVMHostname)
|
||||
if successCount == vmCounter:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
def RemoteUpload(hostIP, hostPassword, hostUsername, hostPort, filesToUpload, remoteLocation):
|
||||
import paramiko
|
||||
# print ('%s %s' % (hostIP,hostPort))
|
||||
transport = paramiko.Transport((hostIP,int(hostPort)))
|
||||
try:
|
||||
print('Connecting to %s'% hostIP),
|
||||
transport.connect(username = hostUsername, password = hostPassword)
|
||||
print('...Connected.')
|
||||
try:
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
filesToUpload = filesToUpload.split(',')
|
||||
for eachFile in filesToUpload :
|
||||
eachFileName = eachFile.split('/')
|
||||
# print eachFileName
|
||||
eachFileNameLength = len(eachFileName)
|
||||
# print eachFileNameLength
|
||||
exactFileName = eachFileName[eachFileNameLength-1]
|
||||
# print exactFileName
|
||||
if remoteLocation[-1] == '/':
|
||||
newFile = "%s%s" % (remoteLocation,exactFileName)
|
||||
else:
|
||||
newFile = "%s/%s" % (remoteLocation,exactFileName)
|
||||
# print ("%s - %s" % (eachFile, newFile))
|
||||
try:
|
||||
print ("Uploading %s to %s" % (eachFile, newFile)),
|
||||
sftp.put(eachFile, newFile)
|
||||
print ('...OK!')
|
||||
except:
|
||||
print('...Error!')
|
||||
transport.close()
|
||||
except:
|
||||
print("Failed to upload to %s" % hostIP)
|
||||
|
||||
except:
|
||||
print("...Failed!")
|
||||
|
||||
def RemoteDownload(hostIP, hostPassword, hostUsername, hostPort, filesToDownload, localLocation):
|
||||
import paramiko
|
||||
# print ('%s %s' % (hostIP,hostPort))
|
||||
transport = paramiko.Transport((hostIP,int(hostPort)))
|
||||
try:
|
||||
print('Connecting to %s'% hostIP),
|
||||
transport.connect(username = hostUsername, password = hostPassword)
|
||||
print('...Connected.')
|
||||
try:
|
||||
sftp = paramiko.SFTPClient.from_transport(transport)
|
||||
filesToDownload = filesToDownload.split(',')
|
||||
for eachFile in filesToDownload :
|
||||
eachFileName = eachFile.split('/')
|
||||
# print eachFileName
|
||||
eachFileNameLength = len(eachFileName)
|
||||
# print eachFileNameLength
|
||||
exactFileName = eachFileName[eachFileNameLength-1]
|
||||
# print exactFileName
|
||||
if localLocation[-1] == '/':
|
||||
newFile = "%s%s" % (localLocation,exactFileName)
|
||||
else:
|
||||
newFile = "%s/%s" % (localLocation,exactFileName)
|
||||
# print ("%s - %s" % (eachFile, newFile))
|
||||
try:
|
||||
print ("Downloading %s to %s" % (eachFile, newFile)),
|
||||
sftp.get(eachFile, newFile)
|
||||
print ('...OK!')
|
||||
except:
|
||||
print('...Error!')
|
||||
transport.close()
|
||||
except:
|
||||
print("Failed to Download to %s" % hostIP)
|
||||
|
||||
except:
|
||||
print("...Failed!")
|
||||
|
||||
|
||||
def ConfigureResolvConf(resolv_conf_filepath,dns_server_ip,vnetDomain):
|
||||
isDnsEntry = GetStringMatchCount(resolv_conf_filepath,dns_server_ip)
|
||||
hostName = JustRun('hostname')
|
||||
if isDnsEntry == 1:
|
||||
domainReplaceString="search " + vnetDomain
|
||||
ReplaceStringMatchLinesFromFile(resolv_conf_filepath,'search',domainReplaceString)
|
||||
isDnsNameEntry = GetStringMatchCount(resolv_conf_filepath,domainReplaceString)
|
||||
if isDnsNameEntry == 1:
|
||||
print('Added string "search ' + vnetDomain + '" to ' + resolv_conf_filepath)
|
||||
return 0
|
||||
else :
|
||||
print('Failed to add string "search ' + vnetDomain + '" to ' + resolv_conf_filepath)
|
||||
return 1
|
||||
else:
|
||||
print('DNS server IP is not present in ' + resolv_conf_filepath + ' file')
|
||||
return 2
|
||||
|
||||
def ConfigureHostsFile(hosts_filepath):
|
||||
hostName = JustRun('hostname')
|
||||
AppendTextToFile(hosts_filepath,"127.0.0.1 %s\n" % hostName)
|
||||
isHostsEdited = GetStringMatchCount(hosts_filepath, hostName)
|
||||
if isHostsEdited >= 1:
|
||||
print('Added string "127.0.0.1 ' + hostName + '" to ' + hosts_filepath)
|
||||
return 0
|
||||
else :
|
||||
print('Failed to Add string "127.0.0.1 ' + hostName + '" to ' + hosts_filepath)
|
||||
return 1
|
||||
|
||||
def GetOSDisk():
|
||||
resourceDiskPartition = JustRun("grep -i '%s' /etc/mtab | awk '{print $1;}' | tr -d '\n'" % GetResourceDiskMountPoint())
|
||||
if 'sda' in resourceDiskPartition:
|
||||
return 'sdb'
|
||||
else :
|
||||
return 'sda'
|
|
@ -0,0 +1,395 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# This script is library for shell scripts used in Azure Linux Automation.
|
||||
# Author: Srikanth Myakam
|
||||
# Email : v-srm@microsoft.com
|
||||
#
|
||||
#
|
||||
|
||||
function get_lis_version ()
|
||||
{
|
||||
lis_version=`modinfo hv_vmbus | grep "^version:"| awk '{print $2}'`
|
||||
if [ "$lis_version" == "" ]
|
||||
then
|
||||
lis_version="Default_LIS"
|
||||
fi
|
||||
echo $lis_version
|
||||
}
|
||||
|
||||
function get_host_version ()
|
||||
{
|
||||
dmesg | grep "Host Build" | sed "s/.*Host Build://"| awk '{print $1}'| sed "s/;//"
|
||||
}
|
||||
|
||||
function check_exit_status ()
|
||||
{
|
||||
exit_status=$?
|
||||
message=$1
|
||||
|
||||
if [ $exit_status -ne 0 ]; then
|
||||
echo "$message: Failed (exit code: $exit_status)"
|
||||
if [ "$2" == "exit" ]
|
||||
then
|
||||
exit $exit_status
|
||||
fi
|
||||
else
|
||||
echo "$message: Success"
|
||||
fi
|
||||
}
|
||||
|
||||
function detect_linux_ditribution_version()
|
||||
{
|
||||
local distro_version="Unknown"
|
||||
if [ -f /etc/centos-release ] ; then
|
||||
distro_version=`cat /etc/centos-release | sed s/.*release\ // | sed s/\ .*//`
|
||||
elif [ -f /etc/oracle-release ] ; then
|
||||
distro_version=`cat /etc/oracle-release | sed s/.*release\ // | sed s/\ .*//`
|
||||
elif [ -f /etc/redhat-release ] ; then
|
||||
distro_version=`cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//`
|
||||
elif [ -f /etc/os-release ] ; then
|
||||
distro_version=`cat /etc/os-release|sed 's/"//g'|grep "VERSION_ID="| sed 's/VERSION_ID=//'| sed 's/\r//'`
|
||||
fi
|
||||
echo $distro_version
|
||||
}
|
||||
|
||||
function detect_linux_ditribution()
|
||||
{
|
||||
local linux_ditribution=`cat /etc/*release*|sed 's/"//g'|grep "^ID="| sed 's/ID=//'`
|
||||
local temp_text=`cat /etc/*release*`
|
||||
if [ "$linux_ditribution" == "" ]
|
||||
then
|
||||
if echo "$temp_text" | grep -qi "ol"; then
|
||||
linux_ditribution='Oracle'
|
||||
elif echo "$temp_text" | grep -qi "Ubuntu"; then
|
||||
linux_ditribution='Ubuntu'
|
||||
elif echo "$temp_text" | grep -qi "SUSE Linux"; then
|
||||
linux_ditribution='SUSE'
|
||||
elif echo "$temp_text" | grep -qi "openSUSE"; then
|
||||
linux_ditribution='OpenSUSE'
|
||||
elif echo "$temp_text" | grep -qi "centos"; then
|
||||
linux_ditribution='CentOS'
|
||||
elif echo "$temp_text" | grep -qi "Oracle"; then
|
||||
linux_ditribution='Oracle'
|
||||
elif echo "$temp_text" | grep -qi "Red Hat"; then
|
||||
linux_ditribution='RHEL'
|
||||
else
|
||||
linux_ditribution='unknown'
|
||||
fi
|
||||
fi
|
||||
echo "$(echo "$linux_ditribution" | sed 's/.*/\u&/')"
|
||||
}
|
||||
|
||||
function updaterepos()
|
||||
{
|
||||
ditribution=$(detect_linux_ditribution)
|
||||
case "$ditribution" in
|
||||
Oracle|RHEL|CentOS)
|
||||
yum makecache
|
||||
;;
|
||||
|
||||
Ubuntu)
|
||||
apt-get update
|
||||
;;
|
||||
SUSE|openSUSE|sles)
|
||||
zypper refresh
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown ditribution"
|
||||
return 1
|
||||
esac
|
||||
}
|
||||
|
||||
function install_rpm ()
|
||||
{
|
||||
package_name=$1
|
||||
rpm -ivh --nodeps $package_name
|
||||
check_exit_status "install_rpm $package_name"
|
||||
}
|
||||
|
||||
function install_deb ()
|
||||
{
|
||||
package_name=$1
|
||||
dpkg -i $package_name
|
||||
apt-get install -f
|
||||
check_exit_status "install_deb $package_name"
|
||||
}
|
||||
|
||||
function apt_get_install ()
|
||||
{
|
||||
package_name=$1
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --force-yes $package_name
|
||||
check_exit_status "apt_get_install $package_name"
|
||||
}
|
||||
|
||||
function yum_install ()
|
||||
{
|
||||
package_name=$1
|
||||
yum install -y $package_name
|
||||
check_exit_status "yum_install $package_name"
|
||||
}
|
||||
|
||||
function zypper_install ()
|
||||
{
|
||||
package_name=$1
|
||||
zypper --non-interactive in $package_name
|
||||
check_exit_status "zypper_install $package_name"
|
||||
}
|
||||
|
||||
function install_package ()
|
||||
{
|
||||
local package_name=$@
|
||||
ditribution=$(detect_linux_ditribution)
|
||||
for i in "${package_name[@]}"
|
||||
do
|
||||
case "$ditribution" in
|
||||
Oracle|RHEL|CentOS)
|
||||
yum_install "$package_name"
|
||||
;;
|
||||
|
||||
Ubuntu)
|
||||
apt_get_install "$package_name"
|
||||
;;
|
||||
|
||||
SUSE|OpenSUSE|sles)
|
||||
zypper_install "$package_name"
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown ditribution"
|
||||
return 1
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
function creat_partitions ()
|
||||
{
|
||||
disk_list=($@)
|
||||
echo "Creating partitions on ${disk_list[@]}"
|
||||
|
||||
count=0
|
||||
while [ "x${disk_list[count]}" != "x" ]
|
||||
do
|
||||
echo ${disk_list[$count]}
|
||||
(echo n; echo p; echo 2; echo; echo; echo t; echo fd; echo w;) | fdisk ${disk_list[$count]}
|
||||
count=$(( $count + 1 ))
|
||||
done
|
||||
}
|
||||
|
||||
function remove_partitions ()
|
||||
{
|
||||
disk_list=($@)
|
||||
echo "Creating partitions on ${disk_list[@]}"
|
||||
|
||||
count=0
|
||||
while [ "x${disk_list[count]}" != "x" ]
|
||||
do
|
||||
echo ${disk_list[$count]}
|
||||
(echo p; echo d; echo w;) | fdisk ${disk_list[$count]}
|
||||
count=$(( $count + 1 ))
|
||||
done
|
||||
}
|
||||
|
||||
function create_raid_and_mount()
|
||||
{
|
||||
# Creats RAID using unused data disks attached to the VM.
|
||||
if [[ $# == 3 ]]
|
||||
then
|
||||
local deviceName=$1
|
||||
local mountdir=$2
|
||||
local format=$3
|
||||
else
|
||||
local deviceName="/dev/md1"
|
||||
local mountdir=/data-dir
|
||||
local format="ext4"
|
||||
fi
|
||||
|
||||
local uuid=""
|
||||
local list=""
|
||||
|
||||
echo "IO test setup started.."
|
||||
list=(`fdisk -l | grep 'Disk.*/dev/sd[a-z]' |awk '{print $2}' | sed s/://| sort| grep -v "/dev/sd[ab]$" `)
|
||||
|
||||
lsblk
|
||||
install_package mdadm
|
||||
echo "--- Raid $deviceName creation started ---"
|
||||
(echo y)| mdadm --create $deviceName --level 0 --raid-devices ${#list[@]} ${list[@]}
|
||||
check_exit_status "$deviceName Raid creation"
|
||||
|
||||
time mkfs -t $format $deviceName
|
||||
check_exit_status "$deviceName Raid format"
|
||||
|
||||
mkdir $mountdir
|
||||
uuid=`blkid $deviceName| sed "s/.*UUID=\"//"| sed "s/\".*\"//"`
|
||||
echo "UUID=$uuid $mountdir $format defaults 0 2" >> /etc/fstab
|
||||
mount $deviceName $mountdir
|
||||
check_exit_status "RAID ($deviceName) mount on $mountdir as $format"
|
||||
}
|
||||
|
||||
function remote_copy ()
|
||||
{
|
||||
remote_path="~"
|
||||
|
||||
while echo $1 | grep -q ^-; do
|
||||
eval $( echo $1 | sed 's/^-//' )=$2
|
||||
shift
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ `which sshpass` == "" ]]
|
||||
then
|
||||
echo "sshpass not installed\n Installing now..."
|
||||
install_package "sshpass"
|
||||
fi
|
||||
|
||||
if [ "x$host" == "x" ] || [ "x$user" == "x" ] || [ "x$passwd" == "x" ] || [ "x$filename" == "x" ] ; then
|
||||
echo "Usage: remote_copy -user <username> -passwd <user password> -host <host ipaddress> -filename <filename> -remote_path <location of the file on remote vm> -cmd <put/get>"
|
||||
return
|
||||
fi
|
||||
|
||||
if [ "$cmd" == "get" ] || [ "x$cmd" == "x" ]; then
|
||||
source_path="$user@$host:$remote_path/$filename"
|
||||
destination_path="."
|
||||
elif [ "$cmd" == "put" ]; then
|
||||
source_path=$filename
|
||||
destination_path=$user@$host:$remote_path/
|
||||
fi
|
||||
|
||||
status=`sshpass -p $passwd scp -o StrictHostKeyChecking=no $source_path $destination_path 2>&1`
|
||||
echo $status
|
||||
}
|
||||
|
||||
function remote_exec ()
|
||||
{
|
||||
while echo $1 | grep -q ^-; do
|
||||
eval $( echo $1 | sed 's/^-//' )=$2
|
||||
shift
|
||||
shift
|
||||
done
|
||||
cmd=$@
|
||||
if [[ `which sshpass` == "" ]]
|
||||
then
|
||||
echo "sshpass not installed\n Installing now..."
|
||||
install_package "sshpass"
|
||||
fi
|
||||
|
||||
if [ "x$host" == "x" ] || [ "x$user" == "x" ] || [ "x$passwd" == "x" ] || [ "x$cmd" == "x" ] ; then
|
||||
echo "Usage: remote_exec -user <username> -passwd <user password> -host <host ipaddress> <onlycommand>"
|
||||
return
|
||||
fi
|
||||
|
||||
status=`sshpass -p $passwd ssh -t -o StrictHostKeyChecking=no $user@$host $cmd 2>&1`
|
||||
echo $status
|
||||
}
|
||||
|
||||
function set_user_password {
|
||||
# This routine can set root or any user's password.
|
||||
if [[ $# == 3 ]]
|
||||
then
|
||||
user=$1
|
||||
user_password=$2
|
||||
sudo_password=$3
|
||||
else
|
||||
echo "Usage: user user_password sudo_password"
|
||||
return -1
|
||||
fi
|
||||
|
||||
hash=$(openssl passwd -1 $user_password)
|
||||
|
||||
string=`echo $sudo_password | sudo -S cat /etc/shadow | grep $user`
|
||||
|
||||
if [ "x$string" == "x" ]
|
||||
then
|
||||
echo "$user not found in /etc/shadow"
|
||||
return -1
|
||||
fi
|
||||
|
||||
IFS=':' read -r -a array <<< "$string"
|
||||
line="${array[0]}:$hash:${array[2]}:${array[3]}:${array[4]}:${array[5]}:${array[6]}:${array[7]}:${array[8]}"
|
||||
|
||||
echo $sudo_password | sudo -S sed -i "s#^${array[0]}.*#$line#" /etc/shadow
|
||||
|
||||
if [ `echo $sudo_password | sudo -S cat /etc/shadow| grep $line|wc -l` != "" ]
|
||||
then
|
||||
echo "Password set succesfully"
|
||||
else
|
||||
echo "failed to set password"
|
||||
fi
|
||||
}
|
||||
|
||||
function collect_VM_properties ()
|
||||
{
|
||||
# This routine collects the information in .csv format.
|
||||
# Anyone can expand this with useful details.
|
||||
# Better if it can collect details without su permission.
|
||||
|
||||
local output_file=$1
|
||||
|
||||
if [ "x$output_file" == "x" ]
|
||||
then
|
||||
output_file="VM_properties.csv"
|
||||
fi
|
||||
|
||||
echo "" > $output_file
|
||||
echo ",OS type,"`detect_linux_ditribution` `detect_linux_ditribution_version` >> $output_file
|
||||
echo ",Kernel version,"`uname -r` >> $output_file
|
||||
echo ",LIS Version,"`get_lis_version` >> $output_file
|
||||
echo ",Host Version,"`get_host_version` >> $output_file
|
||||
echo ",Total CPU cores,"`nproc` >> $output_file
|
||||
echo ",Total Memory,"`free -h|grep Mem|awk '{print $2}'` >> $output_file
|
||||
echo ",Resource disks size,"`lsblk|grep "^sdb"| awk '{print $4}'` >> $output_file
|
||||
echo ",Data disks attached,"`lsblk | grep "^sd" | awk '{print $1}' | sort | grep -v "sd[ab]$" | wc -l` >> $output_file
|
||||
echo ",eth0 MTU,"`ifconfig eth0|grep MTU|sed "s/.*MTU:\(.*\) .*/\1/"` >> $output_file
|
||||
echo ",eth1 MTU,"`ifconfig eth1|grep MTU|sed "s/.*MTU:\(.*\) .*/\1/"` >> $output_file
|
||||
}
|
||||
|
||||
function keep_cmd_in_startup ()
|
||||
{
|
||||
testcommand=$*
|
||||
startup_files="/etc/rc.d/rc.local /etc/rc.local /etc/SuSE-release"
|
||||
count=0
|
||||
for file in $startup_files
|
||||
do
|
||||
if [[ -f $file ]]
|
||||
then
|
||||
if ! grep -q "${testcommand}" $file
|
||||
then
|
||||
sed "/^\s*exit 0/i ${testcommand}" $file -i
|
||||
if ! grep -q "${testcommand}" $file
|
||||
then
|
||||
echo $testcommand >> $file
|
||||
fi
|
||||
echo "Added $testcommand >> $file"
|
||||
((count++))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [ $count == 0 ]
|
||||
then
|
||||
echo "Cannot find $startup_files files"
|
||||
fi
|
||||
}
|
||||
|
||||
function remove_cmd_from_startup ()
|
||||
{
|
||||
testcommand=$*
|
||||
startup_files="/etc/rc.d/rc.local /etc/rc.local /etc/SuSE-release"
|
||||
count=0
|
||||
for file in $startup_files
|
||||
do
|
||||
if [[ -f $file ]]
|
||||
then
|
||||
if grep -q "${testcommand}" $file
|
||||
then
|
||||
sed "s/${testcommand}//" $file -i
|
||||
((count++))
|
||||
echo "Removed $testcommand from $file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [ $count == 0 ]
|
||||
then
|
||||
echo "Cannot find $testcommand in $startup_files files"
|
||||
fi
|
||||
}
|
||||
|
|
@ -0,0 +1,251 @@
|
|||
#!/bin/bash
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
|
||||
# Copyright (c) Microsoft Corporation
|
||||
#
|
||||
# All rights reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the ""License"");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
||||
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
|
||||
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
|
||||
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
#
|
||||
# See the Apache Version 2.0 License for specific language governing
|
||||
# permissions and limitations under the License.
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
#
|
||||
#
|
||||
# Description:
|
||||
#######################################################################
|
||||
|
||||
#HOW TO PARSE THE ARGUMENTS.. SOURCE - http://stackoverflow.com/questions/4882349/parsing-shell-script-arguments
|
||||
|
||||
while echo $1 | grep ^- > /dev/null; do
|
||||
eval $( echo $1 | sed 's/-//g' | tr -d '\012')=$2
|
||||
shift
|
||||
shift
|
||||
done
|
||||
#
|
||||
# Constants/Globals
|
||||
#
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# LogMsg()
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
if [ -z "$customKernel" ]; then
|
||||
echo "Please mention -customKernel next"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$logFolder" ]; then
|
||||
logFolder="~"
|
||||
echo "-logFolder is not mentioned. Using ~"
|
||||
else
|
||||
echo "Using Log Folder $logFolder"
|
||||
fi
|
||||
|
||||
LogMsg()
|
||||
{
|
||||
echo `date "+%b %d %Y %T"` : "${1}" # Add the time stamp to the log message
|
||||
echo "${1}" >> $logFolder/build-customKernel.txt
|
||||
}
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > $logFolder/state.txt
|
||||
}
|
||||
|
||||
|
||||
touch $logFolder/build-customKernel.txt
|
||||
|
||||
CheckInstallLockUbuntu()
|
||||
{
|
||||
dpkgPID=$(pidof dpkg)
|
||||
if [ $? -eq 0 ];then
|
||||
LogMsg "Another install is in progress. Waiting 10 seconds."
|
||||
sleep 10
|
||||
CheckInstallLockUbuntu
|
||||
else
|
||||
LogMsg "No lock on dpkg present."
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
InstallKernel()
|
||||
{
|
||||
sleep 10
|
||||
if [ "${customKernel}" == "linuxnext" ]; then
|
||||
kernelSource="https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git"
|
||||
sourceDir="linux-next"
|
||||
elif [ "${customKernel}" == "proposed" ]; then
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
if [[ $DISTRO =~ "Xenial" ]];
|
||||
then
|
||||
LogMsg "Enabling proposed repositry..."
|
||||
echo "deb http://archive.ubuntu.com/ubuntu/ xenial-proposed restricted main multiverse universe" >> /etc/apt/sources.list
|
||||
rm -rf /etc/apt/preferences.d/proposed-updates
|
||||
LogMsg "Installing linux-image-generic from proposed repository."
|
||||
apt -y update >> $logFolder/build-customKernel.txt 2>&1
|
||||
apt -y --fix-missing upgrade >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
elif [[ $DISTRO =~ "Trusty" ]];
|
||||
then
|
||||
LogMsg "Enabling proposed repositry..."
|
||||
echo "deb http://archive.ubuntu.com/ubuntu/ trusty-proposed restricted main multiverse universe" >> /etc/apt/sources.list
|
||||
rm -rf /etc/apt/preferences.d/proposed-updates
|
||||
LogMsg "Installing linux-image-generic from proposed repository."
|
||||
apt -y update >> $logFolder/build-customKernel.txt 2>&1
|
||||
apt -y --fix-missing upgrade >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
fi
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
if [ $kernelInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_KERNEL_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_KERNEL_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
fi
|
||||
elif [ "${customKernel}" == "latest" ]; then
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
if [[ $DISTRO =~ "Ubuntu" ]];
|
||||
then
|
||||
LogMsg "Installing linux-image-generic from repository."
|
||||
apt -y update >> $logFolder/build-customKernel.txt 2>&1
|
||||
apt -y --fix-missing upgrade >> $logFolder/build-customKernel.txt 2>&1
|
||||
LogMsg "Installing linux-image-generic from proposed repository."
|
||||
apt -y update >> $logFolder/build-customKernel.txt 2>&1
|
||||
apt -y --fix-missing upgrade >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
fi
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
if [ $kernelInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_KERNEL_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_KERNEL_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
fi
|
||||
elif [ "${customKernel}" == "netnext" ]; then
|
||||
kernelSource="https://git.kernel.org/pub/scm/linux/kernel/git/davem/net-next.git"
|
||||
sourceDir="net-next"
|
||||
elif [[ $customKernel == *.deb ]]; then
|
||||
LogMsg "Custom Kernel:$customKernel"
|
||||
apt-get update
|
||||
if [[ $customKernel =~ "http" ]];then
|
||||
CheckInstallLockUbuntu
|
||||
apt-get install wget
|
||||
LogMsg "Debian package web link detected. Downloading $customKernel"
|
||||
wget $customKernel
|
||||
LogMsg "Installing ${customKernel##*/}"
|
||||
dpkg -i "${customKernel##*/}" >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
else
|
||||
CheckInstallLockUbuntu
|
||||
prefix="localfile:"
|
||||
LogMsg "Installing ${customKernel#$prefix}"
|
||||
dpkg -i "${customKernel#$prefix}" >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
fi
|
||||
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
if [ $kernelInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_KERNEL_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_KERNEL_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
fi
|
||||
elif [[ $customKernel == *.rpm ]]; then
|
||||
LogMsg "Custom Kernel:$customKernel"
|
||||
|
||||
if [[ $customKernel =~ "http" ]];then
|
||||
yum -y install wget
|
||||
LogMsg "RPM package web link detected. Downloading $customKernel"
|
||||
wget $customKernel
|
||||
LogMsg "Installing ${customKernel##*/}"
|
||||
rpm -ivh "${customKernel##*/}" >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
else
|
||||
prefix="localfile:"
|
||||
LogMsg "Installing ${customKernel#$prefix}"
|
||||
rpm -ivh "${customKernel#$prefix}" >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
|
||||
fi
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
if [ $kernelInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_KERNEL_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_KERNEL_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
grub2-set-default 0
|
||||
fi
|
||||
fi
|
||||
if [[ ${customKernel} == "linuxnext" ]] || [[ ${customKernel} == "netnext" ]]; then
|
||||
LogMsg "Custom Kernel:$customKernel"
|
||||
chmod +x $logFolder/DetectLinuxDistro.sh
|
||||
LinuxDistro=`$logFolder/DetectLinuxDistro.sh`
|
||||
if [ $LinuxDistro == "SLES" -o $LinuxDistro == "SUSE" ]; then
|
||||
#zypper update
|
||||
zypper --non-interactive install git-core make tar gcc bc patch dos2unix wget xz
|
||||
#TBD
|
||||
elif [ $LinuxDistro == "CENTOS" -o $LinuxDistro == "REDHAT" -o $LinuxDistro == "FEDORA" -o $LinuxDistro == "ORACLELINUX" ]; then
|
||||
#yum update
|
||||
yum install -y git make tar gcc bc patch dos2unix wget xz
|
||||
#TBD
|
||||
elif [ $LinuxDistro == "UBUNTU" ]; then
|
||||
unset UCF_FORCE_CONFFOLD
|
||||
export UCF_FORCE_CONFFNEW=YES
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
ucf --purge /etc/kernel-img.conf
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
LogMsg "Updating distro..."
|
||||
CheckInstallLockUbuntu
|
||||
apt-get update
|
||||
LogMsg "Installing packages git make tar gcc bc patch dos2unix wget ..."
|
||||
apt-get install -y git make tar gcc bc patch dos2unix wget >> $logFolder/build-customKernel.txt 2>&1
|
||||
LogMsg "Installing kernel-package ..."
|
||||
apt-get -o Dpkg::Options::="--force-confnew" -y install kernel-package >> $logFolder/build-customKernel.txt 2>&1
|
||||
rm -rf linux-next
|
||||
LogMsg "Downloading kernel source..."
|
||||
git clone ${kernelSource} >> $logFolder/build-customKernel.txt 2>&1
|
||||
cd ${sourceDir}
|
||||
#Download kernel build shell script...
|
||||
wget https://raw.githubusercontent.com/simonxiaoss/linux_performance_test/master/git_bisect/build-ubuntu.sh
|
||||
chmod +x build-ubuntu.sh
|
||||
#Start installing kernel
|
||||
LogMsg "Building and Installing kernel..."
|
||||
./build-ubuntu.sh >> $logFolder/build-customKernel.txt 2>&1
|
||||
kernelInstallStatus=$?
|
||||
if [ $kernelInstallStatus -eq 0 ]; then
|
||||
LogMsg "CUSTOM_KERNEL_SUCCESS"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_KERNEL_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
return $kernelInstallStatus
|
||||
}
|
||||
InstallKernel
|
||||
exit 0
|
|
@ -0,0 +1,166 @@
|
|||
#!/bin/bash
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# Description: It install the LIS using given LIS source file (.tar.gz or lis-next)
|
||||
# Usage: ./customLISInstall.sh -customLIS lisnext or tar file link -LISbranch a specific branch or default is master
|
||||
# Author: Sivakanth Rebba
|
||||
# Email : v-sirebb@microsoft.com
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
#HOW TO PARSE THE ARGUMENTS.. SOURCE - http://stackoverflow.com/questions/4882349/parsing-shell-script-arguments
|
||||
|
||||
while echo $1 | grep ^- > /dev/null; do
|
||||
eval $( echo $1 | sed 's/-//g' | tr -d '\012')=$2
|
||||
shift
|
||||
shift
|
||||
done
|
||||
#
|
||||
# Constants/Globals
|
||||
#
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# LogMsg()
|
||||
#
|
||||
#######################################################################
|
||||
LogMsg()
|
||||
{
|
||||
echo `date "+%b %d %Y %T"` : "${1}" # Add the time stamp to the log message
|
||||
echo "${1}" >> ~/build-customLIS.txt
|
||||
}
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > ~/state.txt
|
||||
}
|
||||
|
||||
if [ -z "$customLIS" ]; then
|
||||
echo "Please mention -customLIS next"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$LISbranch" ]; then
|
||||
echo "Not mentioned LIS branch, Use Master branch"
|
||||
LISbranch="master"
|
||||
fi
|
||||
touch ~/build-customLIS.txt
|
||||
|
||||
#Detect Distro and it's version
|
||||
DistroName="Unknown"
|
||||
DistroVersion="Unknown"
|
||||
if [ -f /etc/redhat-release ] ; then
|
||||
DistroName='REDHAT'
|
||||
DistroVersion=`cat /etc/redhat-release | sed s/.*release\ // | sed s/\ .*//`
|
||||
elif [ -f /etc/centos-release ] ; then
|
||||
DistroName==`cat /etc/centos-release | sed s/^\ // |sed s/\ .*//`
|
||||
DistroName='CENTOS'
|
||||
DistroVersion=`cat /etc/centos-release | sed s/.*release\ // | sed s/\ .*//`
|
||||
elif [ -f /etc/SuSE-release ] ; then
|
||||
DistroName=`cat /etc/SuSE-release | tr "\n" ' '| sed s/VERSION.*//`
|
||||
DistroVersion=`cat /etc/SuSE-release | tr "\n" ' ' | sed s/.*=\ //`
|
||||
elif [ -f /etc/debian_version ] ; then
|
||||
DistroName="Debian `cat /etc/debian_version`"
|
||||
DistroVersion=""
|
||||
fi
|
||||
if [ -f /etc/UnitedLinux-release ] ; then
|
||||
DistroName="${DistroName}[`cat /etc/UnitedLinux-release | tr "\n" ' ' | sed s/VERSION.*//`]"
|
||||
fi
|
||||
LogMsg "*****OS Info*****"
|
||||
cat /etc/*-release >> ~/build-customLIS.txt 2>&1
|
||||
LogMsg "*****Kernen Info*****"
|
||||
uname -r >> ~/build-customLIS.txt 2>&1
|
||||
LogMsg "*****LIS Info*****"
|
||||
modinfo hv_vmbus >> ~/build-customLIS.txt 2>&1
|
||||
kernel=`uname -r`
|
||||
if [ "${customLIS}" == "lisnext" ]; then
|
||||
LISSource="https://github.com/LIS/lis-next.git"
|
||||
sourceDir="lis-next"
|
||||
elif [ "${customLIS}" == "netnext" ]; then
|
||||
LISSource="https://git.kernel.org/pub/scm/linux/kernel/git/davem/net-next.git"
|
||||
sourceDir="net-next"
|
||||
elif [[ $customLIS == *.rpm ]]; then
|
||||
LogMsg "Custom LIS:$customLIS"
|
||||
sed -i '/^exclude/c\#exclude' /etc/yum.conf
|
||||
yum install -y wget tar
|
||||
LogMsg "Debian package web link detected. Downloading $customLIS"
|
||||
wget $customLIS
|
||||
LogMsg "Installing ${customLIS##*/}"
|
||||
rpm -ivh "${customLIS##*/}" >> ~/build-customLIS.txt 2>&1
|
||||
LISInstallStatus=$?
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
if [ $LISInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_LIS_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_LIS_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
fi
|
||||
exit 0
|
||||
elif [[ $customLIS == *.tar.gz ]]; then
|
||||
LogMsg "Custom LIS:$customLIS"
|
||||
sed -i '/^exclude/c\#exclude' /etc/yum.conf
|
||||
yum install -y git make tar gcc bc patch dos2unix wget xz >> ~/build-customLIS.txt 2>&1
|
||||
LogMsg "LIS tar file web link detected. Downloading $customLIS"
|
||||
wget $customLIS
|
||||
LogMsg "Extracting ${customLIS##*/}"
|
||||
tar -xvzf "${customLIS##*/}"
|
||||
LogMsg "Installing ${customLIS##*/}"
|
||||
cd LISISO
|
||||
./install.sh >> ~/build-customLIS.txt 2>&1
|
||||
LISInstallStatus=$?
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
modinfo hv_vmbus >> ~/build-customLIS.txt 2>&1
|
||||
if [ $LISInstallStatus -ne 0 ]; then
|
||||
LogMsg "CUSTOM_LIS_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
else
|
||||
LogMsg "CUSTOM_LIS_SUCCESS"
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
LogMsg "Custom LIS:$customLIS"
|
||||
|
||||
if [ $DistroName == "SLES" -o $DistroName == "SUSE" ]; then
|
||||
zypper --non-interactive install git-core make tar gcc bc patch dos2unix wget xz
|
||||
LogMsg "LIS doesn't support for $DistroName distro..."
|
||||
elif [ $DistroName == "CENTOS" -o $DistroName == "REDHAT" -o $DistroName == "FEDORA" -o $DistroName == "ORACLELINUX" ]; then
|
||||
LogMsg "Installing packages git make tar gcc bc patch dos2unix wget ..."
|
||||
sed -i '/^exclude/c\#exclude' /etc/yum.conf
|
||||
yum install -y git make tar gcc bc patch dos2unix wget xz >> ~/build-customLIS.txt 2>&1
|
||||
LogMsg "Downloading LIS source from ${LISSource}..."
|
||||
git clone ${LISSource} >> ~/build-customLIS.txt 2>&1
|
||||
cd ${sourceDir}
|
||||
git checkout ${LISbranch}
|
||||
LogMsg "Downloaded LIS from this ${LISbranch} branch..."
|
||||
if [[ $DistroVersion == *"5."* ]]; then
|
||||
LISsourceDir=hv-rhel5.x/hv
|
||||
elif [[ $DistroVersion == *"6."* ]]; then
|
||||
LISsourceDir=hv-rhel6.x/hv
|
||||
elif [[ $DistroVersion == *"7."* ]]; then
|
||||
LISsourceDir=hv-rhel7.x/hv
|
||||
fi
|
||||
cd $LISsourceDir
|
||||
LISDir=`pwd`
|
||||
LogMsg "Installing kernel-devel-${kernel} for LIS..."
|
||||
yum install -y "https://konkasoftpackages.blob.core.windows.net/linuxbinaries/kernel-devel-${kernel}.rpm" ~/build-customLIS.txt 2>&1
|
||||
LogMsg "LIS is installing from this ${LISDir} branch..."
|
||||
./*-hv-driver-install >> ~/build-customLIS.txt 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
LogMsg "CUSTOM_LIS_FAIL"
|
||||
UpdateTestState $ICA_TESTFAILED
|
||||
exit 0
|
||||
fi
|
||||
elif [ $DistroName == "UBUNTU" ]; then
|
||||
LogMsg "LIS doesn't support for $DistroName distro..."
|
||||
fi
|
||||
UpdateTestState $ICA_TESTCOMPLETED
|
||||
sleep 10
|
||||
LogMsg "CUSTOM_LIS_SUCCESS"
|
||||
sleep 10
|
||||
exit 0
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import socket
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-n', '--hostname', help='hostname or fqdn', required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
print socket.gethostbyname(args.hostname)
|
|
@ -0,0 +1,346 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# Sample script to run sysbench.
|
||||
# In this script, we want to bench-mark device IO performance on a mounted folder.
|
||||
# You can adapt this script to other situations easily like for stripe disks as RAID0.
|
||||
# The only thing to keep in mind is that each different configuration you're testing
|
||||
# must log its output to a different directory.
|
||||
#
|
||||
|
||||
HOMEDIR="/root"
|
||||
LogMsg()
|
||||
{
|
||||
echo "[$(date +"%x %r %Z")] ${1}"
|
||||
echo "[$(date +"%x %r %Z")] ${1}" >> "${HOMEDIR}/runlog.txt"
|
||||
}
|
||||
LogMsg "Sleeping 10 seconds.."
|
||||
sleep 10
|
||||
|
||||
#export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/share/oem/bin:/usr/share/oem/python/bin:/opt/bin
|
||||
CONSTANTS_FILE="$HOMEDIR/constants.sh"
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
touch ./fioTest.log
|
||||
|
||||
if [ -e ${CONSTANTS_FILE} ]; then
|
||||
. ${CONSTANTS_FILE}
|
||||
else
|
||||
errMsg="Error: missing ${CONSTANTS_FILE} file"
|
||||
LogMsg "${errMsg}"
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 10
|
||||
fi
|
||||
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > $HOMEDIR/state.txt
|
||||
}
|
||||
|
||||
InstallFIO() {
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
|
||||
if [[ $DISTRO =~ "Ubuntu" ]] || [[ $DISTRO =~ "Debian" ]];
|
||||
then
|
||||
LogMsg "Detected UBUNTU/Debian. Installing required packages"
|
||||
until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done
|
||||
apt-get update
|
||||
apt-get install -y pciutils gawk mdadm
|
||||
apt-get install -y wget sysstat blktrace bc fio
|
||||
if [ $? -ne 0 ]; then
|
||||
LogMsg "Error: Unable to install fio"
|
||||
exit 1
|
||||
fi
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]];
|
||||
then
|
||||
LogMsg "Detected RHEL 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]];
|
||||
then
|
||||
LogMsg "Detected RHEL 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 6" ]] || [[ $DISTRO =~ "CentOS release 6" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
LogMsg "Detected SLES12. Installing required packages"
|
||||
zypper addrepo http://download.opensuse.org/repositories/benchmark/SLE_12_SP2_Backports/benchmark.repo
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys refresh
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys remove gettext-runtime-mini-0.19.2-1.103.x86_64
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install sysstat
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install grub2
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install wget mdadm blktrace libaio1 fio
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
LogMsg "Detected Clear Linux OS. Installing required packages"
|
||||
swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev
|
||||
|
||||
else
|
||||
LogMsg "Unknown Distro"
|
||||
UpdateTestState "TestAborted"
|
||||
UpdateSummary "Unknown Distro, test aborted"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
RunFIO()
|
||||
{
|
||||
UpdateTestState ICA_TESTRUNNING
|
||||
FILEIO="--size=${fileSize} --direct=1 --ioengine=libaio --filename=fiodata --overwrite=1 "
|
||||
|
||||
####################################
|
||||
#All run config set here
|
||||
#
|
||||
|
||||
#Log Config
|
||||
|
||||
mkdir $HOMEDIR/FIOLog/jsonLog
|
||||
mkdir $HOMEDIR/FIOLog/iostatLog
|
||||
mkdir $HOMEDIR/FIOLog/blktraceLog
|
||||
|
||||
#LOGDIR="${HOMEDIR}/FIOLog"
|
||||
JSONFILELOG="${LOGDIR}/jsonLog"
|
||||
IOSTATLOGDIR="${LOGDIR}/iostatLog"
|
||||
BLKTRACELOGDIR="${LOGDIR}/blktraceLog"
|
||||
LOGFILE="${LOGDIR}/fio-test.log.txt"
|
||||
|
||||
#redirect blktrace files directory
|
||||
Resource_mount=$(mount -l | grep /sdb1 | awk '{print$3}')
|
||||
blk_base="${Resource_mount}/blk-$(date +"%m%d%Y-%H%M%S")"
|
||||
mkdir $blk_base
|
||||
#
|
||||
#
|
||||
#Test config
|
||||
#
|
||||
#
|
||||
|
||||
#All possible values for file-test-mode are randread randwrite read write
|
||||
#modes='randread randwrite read write'
|
||||
iteration=0
|
||||
#startThread=1
|
||||
#startIO=8
|
||||
#numjobs=1
|
||||
|
||||
#Max run config
|
||||
#ioruntime=300
|
||||
#maxThread=1024
|
||||
#maxIO=8
|
||||
io_increment=128
|
||||
|
||||
####################################
|
||||
echo "Test log created at: ${LOGFILE}"
|
||||
echo "===================================== Starting Run $(date +"%x %r %Z") ================================"
|
||||
echo "===================================== Starting Run $(date +"%x %r %Z") script generated 2/9/2015 4:24:44 PM ================================" >> $LOGFILE
|
||||
|
||||
chmod 666 $LOGFILE
|
||||
echo "Preparing Files: $FILEIO"
|
||||
echo "Preparing Files: $FILEIO" >> $LOGFILE
|
||||
LogMsg "Preparing Files: $FILEIO"
|
||||
# Remove any old files from prior runs (to be safe), then prepare a set of new files.
|
||||
rm fiodata
|
||||
echo "--- Kernel Version Information ---" >> $LOGFILE
|
||||
uname -a >> $LOGFILE
|
||||
cat /proc/version >> $LOGFILE
|
||||
cat /etc/*-release >> $LOGFILE
|
||||
echo "--- PCI Bus Information ---" >> $LOGFILE
|
||||
lspci >> $LOGFILE
|
||||
echo "--- Drive Mounting Information ---" >> $LOGFILE
|
||||
mount >> $LOGFILE
|
||||
echo "--- Disk Usage Before Generating New Files ---" >> $LOGFILE
|
||||
df -h >> $LOGFILE
|
||||
fio --cpuclock-test >> $LOGFILE
|
||||
fio $FILEIO --readwrite=read --bs=1M --runtime=1 --iodepth=128 --numjobs=8 --name=prepare
|
||||
echo "--- Disk Usage After Generating New Files ---" >> $LOGFILE
|
||||
df -h >> $LOGFILE
|
||||
echo "=== End Preparation $(date +"%x %r %Z") ===" >> $LOGFILE
|
||||
LogMsg "Preparing Files: $FILEIO: Finished."
|
||||
####################################
|
||||
#Trigger run from here
|
||||
for testmode in $modes; do
|
||||
io=$startIO
|
||||
while [ $io -le $maxIO ]
|
||||
do
|
||||
Thread=$startThread
|
||||
while [ $Thread -le $maxThread ]
|
||||
do
|
||||
if [ $Thread -ge 8 ]
|
||||
then
|
||||
numjobs=8
|
||||
else
|
||||
numjobs=$Thread
|
||||
fi
|
||||
iostatfilename="${IOSTATLOGDIR}/iostat-fio-${testmode}-${io}K-${Thread}td.txt"
|
||||
nohup iostat -x 5 -t -y > $iostatfilename &
|
||||
#capture blktrace output during test
|
||||
#LogMsg "INFO: start blktrace for 40 sec on device sdd and sdf"
|
||||
#blk_operation="${blk_base}/blktrace-fio-${testmode}-${io}K-${Thread}td/"
|
||||
#mkdir $blk_operation
|
||||
#blktrace -w 40 -d /dev/sdf -D $blk_operation &
|
||||
#blktrace -w 40 -d /dev/sdm -D $blk_operation &
|
||||
echo "-- iteration ${iteration} ----------------------------- ${testmode} test, ${io}K bs, ${Thread} threads, ${numjobs} jobs, 5 minutes ------------------ $(date +"%x %r %Z") ---" >> $LOGFILE
|
||||
LogMsg "Running ${testmode} test, ${io}K bs, ${Thread} threads ..."
|
||||
jsonfilename="${JSONFILELOG}/fio-result-${testmode}-${io}K-${Thread}td.json"
|
||||
fio $FILEIO --readwrite=$testmode --bs=${io}K --runtime=$ioruntime --iodepth=$Thread --numjobs=$numjobs --output-format=json --output=$jsonfilename --name="iteration"${iteration} >> $LOGFILE
|
||||
#fio $FILEIO --readwrite=$testmode --bs=${io}K --runtime=$ioruntime --iodepth=$Thread --numjobs=$numjobs --name="iteration"${iteration} --group_reporting >> $LOGFILE
|
||||
iostatPID=`ps -ef | awk '/iostat/ && !/awk/ { print $2 }'`
|
||||
kill -9 $iostatPID
|
||||
Thread=$(( Thread*2 ))
|
||||
iteration=$(( iteration+1 ))
|
||||
done
|
||||
io=$(( io * io_increment ))
|
||||
done
|
||||
done
|
||||
####################################
|
||||
echo "===================================== Completed Run $(date +"%x %r %Z") script generated 2/9/2015 4:24:44 PM ================================" >> $LOGFILE
|
||||
rm fiodata
|
||||
|
||||
compressedFileName="${HOMEDIR}/FIOTest-$(date +"%m%d%Y-%H%M%S").tar.gz"
|
||||
LogMsg "INFO: Please wait...Compressing all results to ${compressedFileName}..."
|
||||
tar -cvzf $compressedFileName $LOGDIR/
|
||||
|
||||
echo "Test logs are located at ${LOGDIR}"
|
||||
UpdateTestState ICA_TESTCOMPLETED
|
||||
}
|
||||
|
||||
|
||||
CreateRAID0()
|
||||
{
|
||||
disks=$(ls -l /dev | grep sd[c-z]$ | awk '{print $10}')
|
||||
#disks=(`fdisk -l | grep 'Disk.*/dev/sd[a-z]' |awk '{print $2}' | sed s/://| sort| grep -v "/dev/sd[ab]$" `)
|
||||
|
||||
LogMsg "INFO: Check and remove RAID first"
|
||||
mdvol=$(cat /proc/mdstat | grep "active raid" | awk {'print $1'})
|
||||
if [ -n "$mdvol" ]; then
|
||||
echo "/dev/${mdvol} already exist...removing first"
|
||||
umount /dev/${mdvol}
|
||||
mdadm --stop /dev/${mdvol}
|
||||
mdadm --remove /dev/${mdvol}
|
||||
mdadm --zero-superblock /dev/sd[c-z][1-5]
|
||||
fi
|
||||
|
||||
LogMsg "INFO: Creating Partitions"
|
||||
count=0
|
||||
for disk in ${disks}
|
||||
do
|
||||
echo "formatting disk /dev/${disk}"
|
||||
(echo d; echo n; echo p; echo 1; echo; echo; echo t; echo fd; echo w;) | fdisk /dev/${disk}
|
||||
count=$(( $count + 1 ))
|
||||
sleep 1
|
||||
done
|
||||
LogMsg "INFO: Creating RAID of ${count} devices."
|
||||
sleep 1
|
||||
mdadm --create ${mdVolume} --level 0 --raid-devices ${count} /dev/sd[c-z][1-5]
|
||||
sleep 1
|
||||
time mkfs -t $1 -F ${mdVolume}
|
||||
mkdir ${mountDir}
|
||||
sleep 1
|
||||
mount -o nobarrier ${mdVolume} ${mountDir}
|
||||
if [ $? -ne 0 ]; then
|
||||
LogMsg "Error: Unable to create raid"
|
||||
exit 1
|
||||
else
|
||||
LogMsg "${mdVolume} mounted to ${mountDir} successfully."
|
||||
fi
|
||||
|
||||
#LogMsg "INFO: adding fstab entry"
|
||||
#echo "${mdVolume} ${mountDir} ext4 defaults 1 1" >> /etc/fstab
|
||||
}
|
||||
|
||||
CreateLVM()
|
||||
{
|
||||
disks=$(ls -l /dev | grep sd[c-z]$ | awk '{print $10}')
|
||||
#disks=(`fdisk -l | grep 'Disk.*/dev/sd[a-z]' |awk '{print $2}' | sed s/://| sort| grep -v "/dev/sd[ab]$" `)
|
||||
|
||||
#LogMsg "INFO: Check and remove LVM first"
|
||||
vgExist=$(vgdisplay)
|
||||
if [ -n "$vgExist" ]; then
|
||||
umount ${mountDir}
|
||||
lvremove -A n -f /dev/${vggroup}/lv1
|
||||
vgremove ${vggroup} -f
|
||||
fi
|
||||
|
||||
LogMsg "INFO: Creating Partition"
|
||||
count=0
|
||||
for disk in ${disks}
|
||||
do
|
||||
echo "formatting disk /dev/${disk}"
|
||||
(echo d; echo n; echo p; echo 1; echo; echo; echo t; echo fd; echo w;) | fdisk /dev/${disk}
|
||||
count=$(( $count + 1 ))
|
||||
done
|
||||
|
||||
LogMsg "INFO: Creating LVM with all data disks"
|
||||
pvcreate /dev/sd[c-z][1-5]
|
||||
vgcreate ${vggroup} /dev/sd[c-z][1-5]
|
||||
lvcreate -l 100%FREE -i 12 -I 64 ${vggroup} -n lv1
|
||||
time mkfs -t $1 -F /dev/${vggroup}/lv1
|
||||
mkdir ${mountDir}
|
||||
mount -o nobarrier /dev/${vggroup}/lv1 ${mountDir}
|
||||
if [ $? -ne 0 ]; then
|
||||
LogMsg "Error: Unable to create LVM "
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#LogMsg "INFO: adding fstab entry"
|
||||
#echo "${mdVolume} ${mountDir} ext4 defaults 1 1" >> /etc/fstab
|
||||
}
|
||||
|
||||
############################################################
|
||||
# Main body
|
||||
############################################################
|
||||
|
||||
HOMEDIR=$HOME
|
||||
mv $HOMEDIR/FIOLog/ $HOMEDIR/FIOLog-$(date +"%m%d%Y-%H%M%S")/
|
||||
mkdir $HOMEDIR/FIOLog
|
||||
LOGDIR="${HOMEDIR}/FIOLog"
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
if [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
mdVolume="/dev/md/mdauto0"
|
||||
else
|
||||
mdVolume="/dev/md0"
|
||||
fi
|
||||
vggroup="vg1"
|
||||
mountDir="/data"
|
||||
cd ${HOMEDIR}
|
||||
|
||||
InstallFIO
|
||||
|
||||
#Creating RAID before triggering test
|
||||
CreateRAID0 ext4
|
||||
#CreateLVM ext4
|
||||
|
||||
#Run test from here
|
||||
LogMsg "*********INFO: Starting test execution*********"
|
||||
cd ${mountDir}
|
||||
mkdir sampleDIR
|
||||
RunFIO
|
||||
LogMsg "*********INFO: Script execution reach END. Completed !!!*********"
|
|
@ -0,0 +1,295 @@
|
|||
#!/bin/bash
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
|
||||
# Copyright (c) Microsoft Corporation
|
||||
#
|
||||
# All rights reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the ""License"");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
||||
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
|
||||
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
|
||||
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
#
|
||||
# See the Apache Version 2.0 License for specific language governing
|
||||
# permissions and limitations under the License.
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# perf_IPERF3.sh
|
||||
# Author : SHITAL SAVEKAR <v-shisav@microsoft.com>
|
||||
#
|
||||
# Description:
|
||||
# Download and run IPERF3 network performance tests.
|
||||
# This script needs to be run on client VM.
|
||||
#
|
||||
# Supported Distros:
|
||||
# Ubuntu 16.04
|
||||
#######################################################################
|
||||
|
||||
CONSTANTS_FILE="./constants.sh"
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
touch ./IPERF3Test.log
|
||||
|
||||
InstallIPERF3()
|
||||
{
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
if [[ $DISTRO =~ "Ubuntu" ]];
|
||||
then
|
||||
|
||||
LogMsg "Detected Ubuntu"
|
||||
ssh ${1} "until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done"
|
||||
ssh ${1} "apt-get update"
|
||||
ssh ${1} "apt-get -y install iperf3 sysstat bc psmisc"
|
||||
if [ $IPversion -eq 6 ]; then
|
||||
scp ConfigureUbuntu1604IPv6.sh ${1}:
|
||||
ssh ${1} "chmod +x ConfigureUbuntu1604IPv6.sh"
|
||||
ssh ${1} "./ConfigureUbuntu1604IPv6.sh"
|
||||
fi
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]] || [[ $DISTRO =~ "CentOS Linux release 6" ]] || [[ $DISTRO =~ "CentOS release 6" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat/CentOS 6.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install iperf3 sysstat bc psmisc"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]] || [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat/CentOS 7.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install iperf3 sysstat bc psmisc"
|
||||
ssh ${1} "iptables -F"
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
LogMsg "Detected Clear Linux OS. Installing required packages"
|
||||
ssh ${1} "swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
else
|
||||
LogMsg "Unknown Distro"
|
||||
UpdateTestState "TestAborted"
|
||||
UpdateSummary "Unknown Distro, test aborted"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
LogMsg()
|
||||
{
|
||||
echo `date "+%b %d %Y %T"` : "${1}" # Add the time stamp to the log message
|
||||
echo "${1}" >> ./IPERF3Test.log
|
||||
}
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > ./state.txt
|
||||
}
|
||||
|
||||
if [ -e ${CONSTANTS_FILE} ]; then
|
||||
source ${CONSTANTS_FILE}
|
||||
else
|
||||
errMsg="Error: missing ${CONSTANTS_FILE} file"
|
||||
LogMsg "${errMsg}"
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 10
|
||||
fi
|
||||
|
||||
if [ ! ${server} ]; then
|
||||
errMsg="Please add/provide value for server in constants.sh. server=<server ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
if [ ! ${client} ]; then
|
||||
errMsg="Please add/provide value for client in constants.sh. client=<client ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${testDuration} ]; then
|
||||
errMsg="Please add/provide value for testDuration in constants.sh. testDuration=60"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${testType} ]; then
|
||||
errMsg="Please add/provide value for testType in constants.sh. testType=tcp/udp"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${max_parallel_connections_per_instance} ]; then
|
||||
errMsg="Please add/provide value for max_parallel_connections_per_instance in constants.sh. max_parallel_connections_per_instance=60"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${connections} ]; then
|
||||
errMsg="Please add/provide value for connections in constants.sh. connections=(1 2 4 8 ....)"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${bufferLengths} ]; then
|
||||
errMsg="Please add/provide value for bufferLengths in constants.sh. bufferLengths=(1 8). Note buffer lenghs are in Bytest"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${IPversion} ]; then
|
||||
errMsg="Please add/provide value for IPversion in constants.sh. IPversion=4/6."
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $IPversion -eq 6 ]; then
|
||||
if [ ! ${serverIpv6} ]; then
|
||||
errMsg="Please add/provide value for serverIpv6 in constants.sh"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
if [ ! ${clientIpv6} ]; then
|
||||
errMsg="Please add/provide value for clientIpv6 in constants.sh."
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
#connections=(64 128)
|
||||
#BufferLenghts are in Bytes
|
||||
#max_parallel_connections_per_instance=64
|
||||
#Make & build IPERF3 on client and server Machine
|
||||
|
||||
LogMsg "Configuring client ${client}..."
|
||||
InstallIPERF3 ${client}
|
||||
|
||||
LogMsg "Configuring server ${server}..."
|
||||
InstallIPERF3 ${server}
|
||||
|
||||
ssh ${server} "rm -rf iperf-server-*"
|
||||
ssh ${client} "rm -rf iperf-client-*"
|
||||
ssh ${client} "rm -rf iperf-server-*"
|
||||
|
||||
|
||||
#connections=(1 2 4 8 16 32 64 128 256 512 1024)
|
||||
#BufferLenghts are in K
|
||||
#bufferLenghs=(1 8)
|
||||
|
||||
for current_buffer in "${bufferLengths[@]}"
|
||||
do
|
||||
for current_test_connections in "${connections[@]}"
|
||||
do
|
||||
if [ $current_test_connections -lt $max_parallel_connections_per_instance ]
|
||||
then
|
||||
num_threads_P=$current_test_connections
|
||||
num_threads_n=1
|
||||
else
|
||||
num_threads_P=$max_parallel_connections_per_instance
|
||||
num_threads_n=$(($current_test_connections / $num_threads_P))
|
||||
fi
|
||||
|
||||
ssh ${server} "killall iperf3"
|
||||
ssh ${client} "killall iperf3"
|
||||
LogMsg "Starting $num_threads_n iperf3 server instances on $server.."
|
||||
startPort=750
|
||||
currentPort=$startPort
|
||||
currentIperfInstanses=0
|
||||
while [ $currentIperfInstanses -lt $num_threads_n ]
|
||||
do
|
||||
currentIperfInstanses=$(($currentIperfInstanses+1))
|
||||
serverCommand="iperf3 -s -1 -J -i10 -f g -p ${currentPort} > iperf-server-${testType}-IPv${IPversion}-buffer-${current_buffer}-conn-$current_test_connections-instance-${currentIperfInstanses}.txt 2>&1"
|
||||
ssh ${server} $serverCommand &
|
||||
LogMsg "Executed: $serverCommand"
|
||||
currentPort=$(($currentPort+1))
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
LogMsg "$num_threads_n iperf server instances started on $server.."
|
||||
sleep 5
|
||||
LogMsg "Starting client.."
|
||||
startPort=750
|
||||
currentPort=$startPort
|
||||
currentIperfInstanses=0
|
||||
if [ $IPversion -eq 4 ]; then
|
||||
testServer=$server
|
||||
else
|
||||
testServer=$serverIpv6
|
||||
fi
|
||||
#ssh ${client} "./sar-top.sh ${testDuration} $current_test_connections root" &
|
||||
#ssh ${server} "./sar-top.sh ${testDuration} $current_test_connections root" &
|
||||
while [ $currentIperfInstanses -lt $num_threads_n ]
|
||||
do
|
||||
currentIperfInstanses=$(($currentIperfInstanses+1))
|
||||
|
||||
if [[ "$testType" == "udp" ]];
|
||||
then
|
||||
clientCommand="iperf3 -c $testServer -u -b 0 -J -f g -i10 -l ${current_buffer} -t ${testDuration} -p ${currentPort} -P $num_threads_P -${IPversion} > iperf-client-${testType}-IPv${IPversion}-buffer-${current_buffer}-conn-$current_test_connections-instance-${currentIperfInstanses}.txt 2>&1"
|
||||
fi
|
||||
if [[ "$testType" == "tcp" ]];
|
||||
then
|
||||
clientCommand="iperf3 -c $testServer -b 0 -J -f g -i10 -l ${current_buffer} -t ${testDuration} -p ${currentPort} -P $num_threads_P -${IPversion} > iperf-client-${testType}-IPv${IPversion}-buffer-${current_buffer}-conn-$current_test_connections-instance-${currentIperfInstanses}.txt 2>&1"
|
||||
fi
|
||||
|
||||
ssh ${client} $clientCommand &
|
||||
LogMsg "Executed: $clientCommand"
|
||||
currentPort=$(($currentPort+1))
|
||||
sleep 0.1
|
||||
done
|
||||
LogMsg "Iperf3 running buffer ${current_buffer}Bytes $num_threads_P X $num_threads_n ..."
|
||||
sleep ${testDuration}
|
||||
timeoutSeconds=900
|
||||
sleep 5
|
||||
var=`ps -C "iperf3 -c" --no-headers | wc -l`
|
||||
echo $var
|
||||
while [[ $var -gt 0 ]];
|
||||
do
|
||||
timeoutSeconds=`expr $timeoutSeconds - 1`
|
||||
if [ $timeoutSeconds -eq 0 ]; then
|
||||
LogMsg "Iperf3 running buffer ${current_buffer}K $num_threads_P X $num_threads_n. Timeout."
|
||||
LogMsg "killing all iperf3 client threads."
|
||||
killall iperf3
|
||||
sleep 1
|
||||
else
|
||||
sleep 1
|
||||
var=`ps -C "iperf3 -c" --no-headers | wc -l`
|
||||
LogMsg "Iperf3 running buffer ${current_buffer}K $num_threads_P X $num_threads_n. Waiting to finish $var instances."
|
||||
fi
|
||||
done
|
||||
#Sleep extra 5 seconds.
|
||||
sleep 5
|
||||
LogMsg "Iperf3 Finished buffer ${current_buffer} $num_threads_P X $num_threads_n ..."
|
||||
done
|
||||
done
|
||||
scp ${server}:iperf-server-* ./
|
||||
UpdateTestState ICA_TESTCOMPLETED
|
|
@ -0,0 +1,189 @@
|
|||
#!/bin/bash
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
|
||||
# Copyright (c) Microsoft Corporation
|
||||
#
|
||||
# All rights reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the ""License"");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
||||
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
|
||||
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
|
||||
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
#
|
||||
# See the Apache Version 2.0 License for specific language governing
|
||||
# permissions and limitations under the License.
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# perf_lagscope.sh
|
||||
# Author : SHITAL SAVEKAR <v-shisav@microsoft.com>
|
||||
#
|
||||
# Description:
|
||||
# Download and run lagscope latency tests.
|
||||
# This script needs to be run on client VM.
|
||||
#
|
||||
# Supported Distros:
|
||||
# Ubuntu 16.04
|
||||
#######################################################################
|
||||
|
||||
CONSTANTS_FILE="./constants.sh"
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
touch ./lagscopeTest.log
|
||||
|
||||
|
||||
InstallLAGSCOPE() {
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
|
||||
if [[ $DISTRO =~ "Ubuntu" ]];
|
||||
then
|
||||
LogMsg "Detected UBUNTU"
|
||||
LogMsg "Configuring ${1} for lagscope test..."
|
||||
ssh ${1} "until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done"
|
||||
ssh ${1} "apt-get update"
|
||||
ssh ${1} "apt-get -y install libaio1 sysstat git bc make gcc"
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat 6.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc"
|
||||
ssh ${1} "yum -y --nogpgcheck install gcc-c++"
|
||||
|
||||
ssh ${1} "wget http://ftp.heanet.ie/mirrors/gnu/libc/glibc-2.14.1.tar.gz"
|
||||
ssh ${1} "tar xvf glibc-2.14.1.tar.gz"
|
||||
ssh ${1} "mv glibc-2.14.1 glibc-2.14 && cd glibc-2.14 && mkdir build && cd build && ../configure --prefix=/opt/glibc-2.14 && make && make install && export LD_LIBRARY_PATH=/opt/glibc-2.14/lib:$LD_LIBRARY_PATH"
|
||||
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat 7.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc"
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 6" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 6.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc"
|
||||
ssh ${1} "yum -y --nogpgcheck install gcc-c++"
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 7.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc"
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
LogMsg "Detected SLES12"
|
||||
ssh ${1} "zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys remove gettext-runtime-mini*"
|
||||
ssh ${1} "zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install sysstat git bc make gcc grub2"
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
LogMsg "Detected Clear Linux OS. Installing required packages"
|
||||
ssh ${1} "swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
else
|
||||
LogMsg "Unknown Distro"
|
||||
UpdateTestState "TestAborted"
|
||||
UpdateSummary "Unknown Distro, test aborted"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
LogMsg()
|
||||
{
|
||||
echo `date "+%b %d %Y %T"` : "${1}" # Add the time stamp to the log message
|
||||
echo "${1}" >> ./ntttcpTest.log
|
||||
}
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > ./state.txt
|
||||
}
|
||||
|
||||
if [ -e ${CONSTANTS_FILE} ]; then
|
||||
source ${CONSTANTS_FILE}
|
||||
else
|
||||
errMsg="Error: missing ${CONSTANTS_FILE} file"
|
||||
LogMsg "${errMsg}"
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 10
|
||||
fi
|
||||
|
||||
if [ ! ${server} ]; then
|
||||
errMsg="Please add/provide value for server in constants.sh. server=<server ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
if [ ! ${client} ]; then
|
||||
errMsg="Please add/provide value for client in constants.sh. client=<client ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${pingIteration} ]; then
|
||||
errMsg="Please add/provide value for pingIteration in constants.sh. pingIteration=1000000"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#Make & build ntttcp on client and server Machine
|
||||
|
||||
LogMsg "Configuring client ${client}..."
|
||||
InstallLAGSCOPE ${client}
|
||||
|
||||
LogMsg "Configuring server ${server}..."
|
||||
InstallLAGSCOPE ${server}
|
||||
|
||||
#Now, start the ntttcp client on client VM.
|
||||
|
||||
LogMsg "Now running Lagscope test"
|
||||
LogMsg "Starting server."
|
||||
ssh root@${server} "lagscope -r -D"
|
||||
sleep 1
|
||||
LogMsg "lagscope client running..."
|
||||
ssh root@${client} "lagscope -s${server} -i0 -n${pingIteration} -H > lagscope-n${pingIteration}-output.txt"
|
||||
LogMsg "Test finsished."
|
||||
UpdateTestState ICA_TESTCOMPLETED
|
||||
|
|
@ -0,0 +1,222 @@
|
|||
#!/bin/bash
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
|
||||
# Copyright (c) Microsoft Corporation
|
||||
#
|
||||
# All rights reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the ""License"");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
||||
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
|
||||
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
|
||||
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
#
|
||||
# See the Apache Version 2.0 License for specific language governing
|
||||
# permissions and limitations under the License.
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
#######################################################################
|
||||
#
|
||||
# perf_ntttcp.sh
|
||||
# Author : SHITAL SAVEKAR <v-shisav@microsoft.com>
|
||||
#
|
||||
# Description:
|
||||
# Download and run ntttcp network performance tests.
|
||||
# This script needs to be run on client VM.
|
||||
#
|
||||
# Supported Distros:
|
||||
# Ubuntu 16.04
|
||||
#######################################################################
|
||||
|
||||
CONSTANTS_FILE="./constants.sh"
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
touch ./ntttcpTest.log
|
||||
|
||||
|
||||
InstallNTTTCP() {
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
|
||||
if [[ $DISTRO =~ "Ubuntu" ]];
|
||||
then
|
||||
LogMsg "Detected UBUNTU"
|
||||
LogMsg "Configuring ${1} for ntttcp test..."
|
||||
ssh ${1} "until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done"
|
||||
ssh ${1} "apt-get update"
|
||||
ssh ${1} "apt-get -y install libaio1 sysstat git bc make gcc dstat psmisc"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat 6.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc dstat psmisc"
|
||||
ssh ${1} "yum -y --nogpgcheck install gcc-c++"
|
||||
|
||||
ssh ${1} "wget http://ftp.heanet.ie/mirrors/gnu/libc/glibc-2.14.1.tar.gz"
|
||||
ssh ${1} "tar xvf glibc-2.14.1.tar.gz"
|
||||
ssh ${1} "mv glibc-2.14.1 glibc-2.14 && cd glibc-2.14 && mkdir build && cd build && ../configure --prefix=/opt/glibc-2.14 && make && make install && export LD_LIBRARY_PATH=/opt/glibc-2.14/lib:$LD_LIBRARY_PATH"
|
||||
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]];
|
||||
then
|
||||
LogMsg "Detected Redhat 7.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc dstat psmisc"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 6" ]] || [[ $DISTRO =~ "CentOS release 6" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 6.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc dstat psmisc"
|
||||
ssh ${1} "yum -y --nogpgcheck install gcc-c++"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 7.x"
|
||||
ssh ${1} "rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm"
|
||||
ssh ${1} "yum -y --nogpgcheck install libaio1 sysstat git bc make gcc dstat psmisc"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
elif [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
LogMsg "Detected SLES12"
|
||||
ssh ${1} "zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys remove gettext-runtime-mini*"
|
||||
ssh ${1} "zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install sysstat git bc make gcc grub2 dstat psmisc"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/ntttcp-for-linux.git"
|
||||
ssh ${1} "cd ntttcp-for-linux/ && git checkout 7a5017b00a603cfaf2ae2a83a6d6b688b2f9dbaa"
|
||||
ssh ${1} "cd ntttcp-for-linux/src/ && make && make install"
|
||||
ssh ${1} "cp ntttcp-for-linux/src/ntttcp ."
|
||||
ssh ${1} "rm -rf lagscope"
|
||||
ssh ${1} "git clone https://github.com/Microsoft/lagscope"
|
||||
ssh ${1} "cd lagscope/src && make && make install"
|
||||
ssh ${1} "iptables -F"
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
LogMsg "Detected Clear Linux OS. Installing required packages"
|
||||
ssh ${1} "swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev"
|
||||
ssh ${1} "iptables -F"
|
||||
|
||||
else
|
||||
LogMsg "Unknown Distro"
|
||||
UpdateTestState "TestAborted"
|
||||
UpdateSummary "Unknown Distro, test aborted"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
LogMsg()
|
||||
{
|
||||
echo `date "+%b %d %Y %T"` : "${1}" # Add the time stamp to the log message
|
||||
echo "${1}" >> ./ntttcpTest.log
|
||||
}
|
||||
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > ./state.txt
|
||||
}
|
||||
|
||||
if [ -e ${CONSTANTS_FILE} ]; then
|
||||
source ${CONSTANTS_FILE}
|
||||
else
|
||||
errMsg="Error: missing ${CONSTANTS_FILE} file"
|
||||
LogMsg "${errMsg}"
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 10
|
||||
fi
|
||||
|
||||
if [ ! ${server} ]; then
|
||||
errMsg="Please add/provide value for server in constants.sh. server=<server ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
if [ ! ${client} ]; then
|
||||
errMsg="Please add/provide value for client in constants.sh. client=<client ip>"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${testDuration} ]; then
|
||||
errMsg="Please add/provide value for testDuration in constants.sh. testDuration=60"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! ${nicName} ]; then
|
||||
errMsg="Please add/provide value for nicName in constants.sh. nicName=eth0/bond0"
|
||||
LogMsg "${errMsg}"
|
||||
echo "${errMsg}" >> ./summary.log
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 1
|
||||
fi
|
||||
#Make & build ntttcp on client and server Machine
|
||||
|
||||
LogMsg "Configuring client ${client}..."
|
||||
InstallNTTTCP ${client}
|
||||
|
||||
LogMsg "Configuring server ${server}..."
|
||||
InstallNTTTCP ${server}
|
||||
|
||||
#Now, start the ntttcp client on client VM.
|
||||
|
||||
ssh root@${client} "wget https://raw.githubusercontent.com/iamshital/linux_performance_test/master/run_ntttcp-for-linux/run-ntttcp-and-tcping.sh"
|
||||
ssh root@${client} "wget https://raw.githubusercontent.com/iamshital/linux_performance_test/master/run_ntttcp-for-linux/report-ntttcp-and-tcping.sh"
|
||||
ssh root@${client} "chmod +x run-ntttcp-and-tcping.sh && chmod +x report-ntttcp-and-tcping.sh"
|
||||
LogMsg "Now running NTTTCP test"
|
||||
ssh root@${client} "rm -rf ntttcp-test-logs"
|
||||
ssh root@${client} "./run-ntttcp-and-tcping.sh ntttcp-test-logs ${server} root ${testDuration} ${nicName} '$testConnections'"
|
||||
ssh root@${client} "./report-ntttcp-and-tcping.sh ntttcp-test-logs '$testConnections'"
|
||||
ssh root@${client} "cp ntttcp-test-logs/* ."
|
||||
|
||||
UpdateTestState ICA_TESTCOMPLETED
|
|
@ -0,0 +1,54 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
RemoteCopy -uploadTo $AllVMData.PublicIP -port $AllVMData.SSHPort -files $currentTestData.files -username $user -password $password -upload
|
||||
$out = RunLinuxCmd -username $user -password $password -ip $AllVMData.PublicIP -port $AllVMData.SSHPort -command "chmod +x *" -runAsSudo
|
||||
|
||||
LogMsg "Executing : $($currentTestData.testScript)"
|
||||
RunLinuxCmd -username $user -password $password -ip $AllVMData.PublicIP -port $AllVMData.SSHPort -command "$python_cmd $($currentTestData.testScript)" -runAsSudo
|
||||
RunLinuxCmd -username $user -password $password -ip $AllVMData.PublicIP -port $AllVMData.SSHPort -command "mv Runtime.log $($currentTestData.testScript).log" -runAsSudo
|
||||
RemoteCopy -download -downloadFrom $AllVMData.PublicIP -files "/home/$user/state.txt, /home/$user/Summary.log, /home/$user/$($currentTestData.testScript).log" -downloadTo $LogDir -port $AllVMData.SSHPort -username $user -password $password
|
||||
$testResult = Get-Content $LogDir\Summary.log
|
||||
$testStatus = Get-Content $LogDir\state.txt
|
||||
LogMsg "Test result : $testResult"
|
||||
|
||||
if ($testStatus -eq "TestCompleted")
|
||||
{
|
||||
LogMsg "Test Completed"
|
||||
}
|
||||
}
|
||||
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result
|
|
@ -0,0 +1,72 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
LogMsg "Check 1: Checking call tracess again after 30 seconds sleep"
|
||||
Start-Sleep 30
|
||||
$noIssues = CheckKernelLogs -allVMData $allVMData
|
||||
if ($noIssues)
|
||||
{
|
||||
$RestartStatus = RestartAllDeployments -allVMData $allVMData
|
||||
if($RestartStatus -eq "True")
|
||||
{
|
||||
LogMsg "Check 2: Checking call tracess again after Reboot > 30 seconds sleep"
|
||||
Start-Sleep 30
|
||||
$noIssues = CheckKernelLogs -allVMData $allVMData
|
||||
if ($noIssues)
|
||||
{
|
||||
LogMsg "Test Result : PASS."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Test Result : FAIL."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Test Result : FAIL."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Test Result : FAIL."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = ""
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "FAIL"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result
|
|
@ -0,0 +1,63 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$hs1VIP = $AllVMData.PublicIP
|
||||
$hs1vm1sshport = $AllVMData.SSHPort
|
||||
$hs1ServiceUrl = $AllVMData.URL
|
||||
$hs1vm1Dip = $AllVMData.InternalIP
|
||||
$hs1vm1Hostname = $AllVMData.RoleName
|
||||
|
||||
RemoteCopy -uploadTo $hs1VIP -port $hs1vm1sshport -files $currentTestData.files -username $user -password $password -upload
|
||||
RunLinuxCmd -username $user -password $password -ip $hs1VIP -port $hs1vm1sshport -command "chmod +x *" -runAsSudo
|
||||
|
||||
|
||||
LogMsg "Executing : $($currentTestData.testScript)"
|
||||
RunLinuxCmd -username $user -password $password -ip $hs1VIP -port $hs1vm1sshport -command "$python_cmd $($currentTestData.testScript) -e $hs1vm1Hostname" -runAsSudo
|
||||
RunLinuxCmd -username $user -password $password -ip $hs1VIP -port $hs1vm1sshport -command "mv Runtime.log $($currentTestData.testScript).log" -runAsSudo
|
||||
RemoteCopy -download -downloadFrom $hs1VIP -files "/home/$user/state.txt, /home/$user/Summary.log, /home/$user/$($currentTestData.testScript).log" -downloadTo $LogDir -port $hs1vm1sshport -username $user -password $password
|
||||
$testResult = Get-Content $LogDir\Summary.log
|
||||
$testStatus = Get-Content $LogDir\state.txt
|
||||
LogMsg "Test result : $testResult"
|
||||
|
||||
if ($testStatus -eq "TestCompleted")
|
||||
{
|
||||
LogMsg "Test Completed"
|
||||
}
|
||||
}
|
||||
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = ""
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
#$resultSummary += CreateResultSummary -testResult $testResult -metaData $metaData -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName# if you want to publish all result then give here all test status possibilites. if you want just failed results, then give here just "FAIL". You can use any combination of PASS FAIL ABORTED and corresponding test results will be published!
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result
|
|
@ -0,0 +1,230 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$noClient = $true
|
||||
$noServer = $true
|
||||
foreach ( $vmData in $allVMData )
|
||||
{
|
||||
if ( $vmData.RoleName -imatch "client" )
|
||||
{
|
||||
$clientVMData = $vmData
|
||||
$noClient = $false
|
||||
}
|
||||
elseif ( $vmData.RoleName -imatch "server" )
|
||||
{
|
||||
$noServer = $fase
|
||||
$serverVMData = $vmData
|
||||
}
|
||||
}
|
||||
if ( $noClient )
|
||||
{
|
||||
Throw "No any master VM defined. Be sure that, Client VM role name matches with the pattern `"*master*`". Aborting Test."
|
||||
}
|
||||
if ( $noServer )
|
||||
{
|
||||
Throw "No any slave VM defined. Be sure that, Server machine role names matches with pattern `"*slave*`" Aborting Test."
|
||||
}
|
||||
#region CONFIGURE VM FOR TERASORT TEST
|
||||
LogMsg "CLIENT VM details :"
|
||||
LogMsg " RoleName : $($clientVMData.RoleName)"
|
||||
LogMsg " Public IP : $($clientVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($clientVMData.SSHPort)"
|
||||
LogMsg "SERVER VM details :"
|
||||
LogMsg " RoleName : $($serverVMData.RoleName)"
|
||||
LogMsg " Public IP : $($serverVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($serverVMData.SSHPort)"
|
||||
|
||||
#
|
||||
# PROVISION VMS FOR LISA WILL ENABLE ROOT USER AND WILL MAKE ENABLE PASSWORDLESS AUTHENTICATION ACROSS ALL VMS IN SAME HOSTED SERVICE.
|
||||
#
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
|
||||
#endregion
|
||||
|
||||
LogMsg "Generating constansts.sh ..."
|
||||
$constantsFile = "$LogDir\constants.sh"
|
||||
Set-Content -Value "#Generated by Azure Automation." -Path $constantsFile
|
||||
Add-Content -Value "server=$($serverVMData.InternalIP)" -Path $constantsFile
|
||||
Add-Content -Value "client=$($clientVMData.InternalIP)" -Path $constantsFile
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
if ($param -imatch "pingIteration")
|
||||
{
|
||||
$pingIteration=$param.Trim().Replace("pingIteration=","")
|
||||
}
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
}
|
||||
LogMsg "constanst.sh created successfully..."
|
||||
LogMsg (Get-Content -Path $constantsFile)
|
||||
#endregion
|
||||
|
||||
|
||||
#region EXECUTE TEST
|
||||
$myString = @"
|
||||
cd /root/
|
||||
./perf_lagscope.sh &> lagscopeConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
Set-Content "$LogDir\StartLagscopeTest.sh" $myString
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files ".\$constantsFile,.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\perf_lagscope.sh,.\$LogDir\StartLagscopeTest.sh" -username "root" -password $password -upload
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
|
||||
$out = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh"
|
||||
$testJob = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "/root/StartLagscopeTest.sh" -RunInBackground
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "tail -1 lagscopeConsoleLogs.txt"
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
$finalStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "cat /root/state.txt"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "lagscope-n$pingIteration-output.txt"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
|
||||
$testSummary = $null
|
||||
$lagscopeReportLog = Get-Content -Path "$LogDir\lagscope-n$pingIteration-output.txt"
|
||||
LogMsg $lagscopeReportLog
|
||||
|
||||
try
|
||||
{
|
||||
$matchLine= (Select-String -Path "$LogDir\lagscope-n$pingIteration-output.txt" -Pattern "Average").Line
|
||||
$minimumLat = $matchLine.Split(",").Split("=").Trim().Replace("us","")[1]
|
||||
$maximumLat = $matchLine.Split(",").Split("=").Trim().Replace("us","")[3]
|
||||
$averageLat = $matchLine.Split(",").Split("=").Trim().Replace("us","")[5]
|
||||
|
||||
$resultSummary += CreateResultSummary -testResult $minimumLat -metaData "Minimum Latency" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
$resultSummary += CreateResultSummary -testResult $maximumLat -metaData "Maximum Latency" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
$resultSummary += CreateResultSummary -testResult $averageLat -metaData "Average Latency" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
catch
|
||||
{
|
||||
$resultSummary += CreateResultSummary -testResult "Error in parsing logs." -metaData "LAGSCOPE" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
#endregion
|
||||
|
||||
if ( $finalStatus -imatch "TestFailed")
|
||||
{
|
||||
LogErr "Test failed. Last known status : $currentStatus."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestAborted")
|
||||
{
|
||||
LogErr "Test Aborted. Last known status : $currentStatus."
|
||||
$testResult = "ABORTED"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestCompleted")
|
||||
{
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestRunning")
|
||||
{
|
||||
LogMsg "Powershell backgroud job for test is completed but VM is reporting that test is still running. Please check $LogDir\zkConsoleLogs.txt"
|
||||
LogMsg "Contests of summary.log : $testSummary"
|
||||
$testResult = "PASS"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
LogMsg "Test Completed"
|
||||
|
||||
LogMsg "Uploading the test results.."
|
||||
$dataSource = $xmlConfig.config.Azure.database.server
|
||||
$user = $xmlConfig.config.Azure.database.user
|
||||
$password = $xmlConfig.config.Azure.database.password
|
||||
$database = $xmlConfig.config.Azure.database.dbname
|
||||
$dataTableName = $xmlConfig.config.Azure.database.dbtable
|
||||
$TestCaseName = $xmlConfig.config.Azure.database.testTag
|
||||
if ($dataSource -And $user -And $password -And $database -And $dataTableName)
|
||||
{
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
|
||||
#$TestCaseName = "LINUX-NEXT-UPSTREAM-TEST"
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
$HostType = "Azure-ARM"
|
||||
}
|
||||
else
|
||||
{
|
||||
$HostType = "Azure"
|
||||
}
|
||||
$HostBy = ($xmlConfig.config.Azure.General.Location).Replace('"','')
|
||||
$HostOS = cat "$LogDir\VM_properties.csv" | Select-String "Host Version"| %{$_ -replace ",Host Version,",""}
|
||||
$GuestOSType = "Linux"
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
$GuestSize = $clientVMData.InstanceSize
|
||||
$KernelVersion = cat "$LogDir\VM_properties.csv" | Select-String "Kernel version"| %{$_ -replace ",Kernel version,",""}
|
||||
$IPVersion = "IPv4"
|
||||
$ProtocolType = "TCP"
|
||||
if($EnableAcceleratedNetworking)
|
||||
{
|
||||
$DataPath = "SRIOV"
|
||||
}
|
||||
else
|
||||
{
|
||||
$DataPath = "Synthetic"
|
||||
}
|
||||
$connectionString = "Server=$dataSource;uid=$user; pwd=$password;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
|
||||
$SQLQuery = "INSERT INTO $dataTableName (TestCaseName,TestDate,HostType,HostBy,HostOS,GuestOSType,GuestDistro,GuestSize,KernelVersion,IPVersion,ProtocolType,DataPath,MaxLatency_us,AverageLatency_us,MinLatency_us,Latency95Percentile_us,Latency99Percentile_us) VALUES "
|
||||
|
||||
#Percentile Values are not calculated yet. will be added in future.
|
||||
$Latency95Percentile_us = 0
|
||||
$Latency99Percentile_us = 0
|
||||
|
||||
$SQLQuery += "('$TestCaseName','$(Get-Date -Format yyyy-MM-dd)','$HostType','$HostBy','$HostOS','$GuestOSType','$GuestDistro','$GuestSize','$KernelVersion','$IPVersion','$ProtocolType','$DataPath','$maximumLat','$averageLat','$minimumLat','$Latency95Percentile_us','$Latency99Percentile_us'),"
|
||||
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
LogMsg $SQLQuery
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading the test results done!!"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Invalid database details. Failed to upload result to database!"
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "LAGSCOPE RESULT"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
|
@ -0,0 +1,279 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$noClient = $true
|
||||
$noServer = $true
|
||||
foreach ( $vmData in $allVMData )
|
||||
{
|
||||
if ( $vmData.RoleName -imatch "client" )
|
||||
{
|
||||
$clientVMData = $vmData
|
||||
$noClient = $false
|
||||
}
|
||||
elseif ( $vmData.RoleName -imatch "server" )
|
||||
{
|
||||
$noServer = $fase
|
||||
$serverVMData = $vmData
|
||||
}
|
||||
}
|
||||
if ( $noClient )
|
||||
{
|
||||
Throw "No any master VM defined. Be sure that, Client VM role name matches with the pattern `"*master*`". Aborting Test."
|
||||
}
|
||||
if ( $noServer )
|
||||
{
|
||||
Throw "No any slave VM defined. Be sure that, Server machine role names matches with pattern `"*slave*`" Aborting Test."
|
||||
}
|
||||
#region CONFIGURE VM FOR TERASORT TEST
|
||||
LogMsg "CLIENT VM details :"
|
||||
LogMsg " RoleName : $($clientVMData.RoleName)"
|
||||
LogMsg " Public IP : $($clientVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($clientVMData.SSHPort)"
|
||||
LogMsg "SERVER VM details :"
|
||||
LogMsg " RoleName : $($serverVMData.RoleName)"
|
||||
LogMsg " Public IP : $($serverVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($serverVMData.SSHPort)"
|
||||
|
||||
#
|
||||
# PROVISION VMS FOR LISA WILL ENABLE ROOT USER AND WILL MAKE ENABLE PASSWORDLESS AUTHENTICATION ACROSS ALL VMS IN SAME HOSTED SERVICE.
|
||||
#
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
|
||||
#endregion
|
||||
|
||||
if($EnableAcceleratedNetworking)
|
||||
{
|
||||
$DataPath = "SRIOV"
|
||||
LogMsg "Getting SRIOV NIC Name."
|
||||
$clientNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "CLIENT SRIOV NIC: $clientNicName"
|
||||
$serverNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $serverVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "SERVER SRIOV NIC: $serverNicName"
|
||||
if ( $serverNicName -eq $clientNicName)
|
||||
{
|
||||
$nicName = $clientNicName
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Server and client SRIOV NICs are not same."
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$DataPath = "Synthetic"
|
||||
LogMsg "Getting Active NIC Name."
|
||||
$clientNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "CLIENT NIC: $clientNicName"
|
||||
$serverNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $serverVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "SERVER NIC: $serverNicName"
|
||||
if ( $serverNicName -eq $clientNicName)
|
||||
{
|
||||
$nicName = $clientNicName
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Server and client NICs are not same."
|
||||
}
|
||||
}
|
||||
|
||||
LogMsg "Generating constansts.sh ..."
|
||||
$constantsFile = "$LogDir\constants.sh"
|
||||
Set-Content -Value "#Generated by Azure Automation." -Path $constantsFile
|
||||
Add-Content -Value "server=$($serverVMData.InternalIP)" -Path $constantsFile
|
||||
Add-Content -Value "client=$($clientVMData.InternalIP)" -Path $constantsFile
|
||||
Add-Content -Value "nicName=$nicName" -Path $constantsFile
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
}
|
||||
LogMsg "constanst.sh created successfully..."
|
||||
LogMsg (Get-Content -Path $constantsFile)
|
||||
#endregion
|
||||
|
||||
|
||||
#region EXECUTE TEST
|
||||
$myString = @"
|
||||
cd /root/
|
||||
./perf_ntttcp.sh &> ntttcpConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
Set-Content "$LogDir\StartNtttcpTest.sh" $myString
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files ".\$constantsFile,.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\perf_ntttcp.sh,.\$LogDir\StartNtttcpTest.sh" -username "root" -password $password -upload
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
|
||||
$out = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh"
|
||||
$testJob = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "/root/StartNtttcpTest.sh" -RunInBackground
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "tail -2 ntttcpConsoleLogs.txt | head -1"
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
$finalStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "cat /root/state.txt"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "/root/ntttcpConsoleLogs.txt"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "lagscope-ntttcp-*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "ntttcp-sender-p*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "mpstat-sender-p*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "dstat-sender-p*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "sar-sender-p*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "report.log"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
|
||||
$testSummary = $null
|
||||
$ntttcpReportLog = Get-Content -Path "$LogDir\report.log"
|
||||
foreach ( $line in $ntttcpReportLog )
|
||||
{
|
||||
if ( $line -imatch "test_connections" )
|
||||
{
|
||||
continue;
|
||||
}
|
||||
try
|
||||
{
|
||||
$uploadResults = $true
|
||||
$test_connections = $line.Trim().Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Split(" ")[0]
|
||||
$throughput_gbps = $line.Trim().Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Split(" ")[1]
|
||||
$cycle_per_byte = $line.Trim().Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Split(" ")[2]
|
||||
$average_tcp_latency = $line.Trim().Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Replace(" "," ").Split(" ")[3]
|
||||
$metadata = "Connections=$test_connections"
|
||||
$connResult = "throughput=$throughput_gbps`Gbps cyclePerBytet=$cycle_per_byte Avg_TCP_lat=$average_tcp_latency"
|
||||
$resultSummary += CreateResultSummary -testResult $connResult -metaData $metaData -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
if ([string]$throughput_gbps -imatch "0.00")
|
||||
{
|
||||
$uploadResults = $false
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
$resultSummary += CreateResultSummary -testResult "Error in parsing logs." -metaData "NTTTCP" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
if ( $finalStatus -imatch "TestFailed")
|
||||
{
|
||||
LogErr "Test failed. Last known status : $currentStatus."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestAborted")
|
||||
{
|
||||
LogErr "Test Aborted. Last known status : $currentStatus."
|
||||
$testResult = "ABORTED"
|
||||
}
|
||||
elseif ( ($finalStatus -imatch "TestCompleted") -and $uploadResults )
|
||||
{
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestRunning")
|
||||
{
|
||||
LogMsg "Powershell backgroud job for test is completed but VM is reporting that test is still running. Please check $LogDir\zkConsoleLogs.txt"
|
||||
LogMsg "Contests of summary.log : $testSummary"
|
||||
$testResult = "PASS"
|
||||
}
|
||||
|
||||
LogMsg "Test Completed"
|
||||
|
||||
LogMsg "Uploading the test results.."
|
||||
$dataSource = $xmlConfig.config.Azure.database.server
|
||||
$user = $xmlConfig.config.Azure.database.user
|
||||
$password = $xmlConfig.config.Azure.database.password
|
||||
$database = $xmlConfig.config.Azure.database.dbname
|
||||
$dataTableName = $xmlConfig.config.Azure.database.dbtable
|
||||
$TestCaseName = $xmlConfig.config.Azure.database.testTag
|
||||
if ($dataSource -And $user -And $password -And $database -And $dataTableName)
|
||||
{
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
|
||||
#$TestCaseName = "LINUX-NEXT-UPSTREAM-TEST"
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
$HostType = "Azure-ARM"
|
||||
}
|
||||
else
|
||||
{
|
||||
$HostType = "Azure"
|
||||
}
|
||||
$HostBy = ($xmlConfig.config.Azure.General.Location).Replace('"','')
|
||||
$HostOS = cat "$LogDir\VM_properties.csv" | Select-String "Host Version"| %{$_ -replace ",Host Version,",""}
|
||||
$GuestOSType = "Linux"
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
$GuestSize = $clientVMData.InstanceSize
|
||||
$KernelVersion = cat "$LogDir\VM_properties.csv" | Select-String "Kernel version"| %{$_ -replace ",Kernel version,",""}
|
||||
$IPVersion = "IPv4"
|
||||
$ProtocolType = "TCP"
|
||||
$connectionString = "Server=$dataSource;uid=$user; pwd=$password;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
$LogContents = Get-Content -Path "$LogDir\report.log"
|
||||
$SQLQuery = "INSERT INTO $dataTableName (TestCaseName,TestDate,HostType,HostBy,HostOS,GuestOSType,GuestDistro,GuestSize,KernelVersion,IPVersion,ProtocolType,DataPath,NumberOfConnections,Throughput_Gbps,Latency_ms) VALUES "
|
||||
|
||||
for($i = 1; $i -lt $LogContents.Count; $i++)
|
||||
{
|
||||
$Line = $LogContents[$i].Trim() -split '\s+'
|
||||
$SQLQuery += "('$TestCaseName','$(Get-Date -Format yyyy-MM-dd)','$HostType','$HostBy','$HostOS','$GuestOSType','$GuestDistro','$GuestSize','$KernelVersion','$IPVersion','$ProtocolType','$DataPath',$($Line[0]),$($Line[1]),$($Line[2])),"
|
||||
}
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
LogMsg $SQLQuery
|
||||
if ($uploadResults)
|
||||
{
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading the test results done!!"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogErr "Uploading the test results cancelled due to zero throughput for some connections!!"
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Invalid database details. Failed to upload result to database!"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "NTTTCP RESULT"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
|
@ -0,0 +1,435 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$noClient = $true
|
||||
$noServer = $true
|
||||
foreach ( $vmData in $allVMData )
|
||||
{
|
||||
if ( $vmData.RoleName -imatch "client" )
|
||||
{
|
||||
$clientVMData = $vmData
|
||||
$noClient = $false
|
||||
}
|
||||
elseif ( $vmData.RoleName -imatch "server" )
|
||||
{
|
||||
$noServer = $fase
|
||||
$serverVMData = $vmData
|
||||
}
|
||||
}
|
||||
if ( $noClient )
|
||||
{
|
||||
Throw "No any master VM defined. Be sure that, Client VM role name matches with the pattern `"*master*`". Aborting Test."
|
||||
}
|
||||
if ( $noServer )
|
||||
{
|
||||
Throw "No any slave VM defined. Be sure that, Server machine role names matches with pattern `"*slave*`" Aborting Test."
|
||||
}
|
||||
#region CONFIGURE VM FOR TERASORT TEST
|
||||
LogMsg "CLIENT VM details :"
|
||||
LogMsg " RoleName : $($clientVMData.RoleName)"
|
||||
LogMsg " Public IP : $($clientVMData.InternalIP)"
|
||||
LogMsg " SSH Port : $($clientVMData.SSHPort)"
|
||||
LogMsg "SERVER VM details :"
|
||||
LogMsg " RoleName : $($serverVMData.RoleName)"
|
||||
LogMsg " Public IP : $($serverVMData.InternalIP)"
|
||||
LogMsg " SSH Port : $($serverVMData.SSHPort)"
|
||||
|
||||
#
|
||||
# PROVISION VMS FOR LISA WILL ENABLE ROOT USER AND WILL MAKE ENABLE PASSWORDLESS AUTHENTICATION ACROSS ALL VMS IN SAME HOSTED SERVICE.
|
||||
#
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
|
||||
#endregion
|
||||
|
||||
if($EnableAcceleratedNetworking)
|
||||
{
|
||||
$DataPath = "SRIOV"
|
||||
LogMsg "Getting SRIOV NIC Name."
|
||||
$clientNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "CLIENT SRIOV NIC: $clientNicName"
|
||||
$serverNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $serverVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "SERVER SRIOV NIC: $serverNicName"
|
||||
if ( $serverNicName -eq $clientNicName)
|
||||
{
|
||||
$nicName = $clientNicName
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Server and client SRIOV NICs are not same."
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$DataPath = "Synthetic"
|
||||
LogMsg "Getting Active NIC Name."
|
||||
$clientNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "CLIENT NIC: $clientNicName"
|
||||
$serverNicName = (RunLinuxCmd -ip $clientVMData.PublicIP -port $serverVMData.SSHPort -username "root" -password $password -command "route | grep '^default' | grep -o '[^ ]*$'").Trim()
|
||||
LogMsg "SERVER NIC: $serverNicName"
|
||||
if ( $serverNicName -eq $clientNicName)
|
||||
{
|
||||
$nicName = $clientNicName
|
||||
}
|
||||
else
|
||||
{
|
||||
Throw "Server and client NICs are not same."
|
||||
}
|
||||
}
|
||||
|
||||
LogMsg "Generating constansts.sh ..."
|
||||
$constantsFile = "$LogDir\constants.sh"
|
||||
Set-Content -Value "#Generated by Azure Automation." -Path $constantsFile
|
||||
Add-Content -Value "server=$($serverVMData.InternalIP)" -Path $constantsFile
|
||||
Add-Content -Value "client=$($clientVMData.InternalIP)" -Path $constantsFile
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
if ($param -imatch "bufferLengths=")
|
||||
{
|
||||
$testBuffers= $param.Replace("bufferLengths=(","").Replace(")","").Split(" ")
|
||||
}
|
||||
if ($param -imatch "connections=" )
|
||||
{
|
||||
$testConnections = $param.Replace("connections=(","").Replace(")","").Split(" ")
|
||||
}
|
||||
}
|
||||
LogMsg "constanst.sh created successfully..."
|
||||
LogMsg (Get-Content -Path $constantsFile)
|
||||
#endregion
|
||||
|
||||
|
||||
#region EXECUTE TEST
|
||||
$myString = @"
|
||||
cd /root/
|
||||
./perf_iperf3.sh &> iperf3udpConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
Set-Content "$LogDir\Startiperf3udpTest.sh" $myString
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files ".\$constantsFile,.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\perf_iperf3.sh,.\$LogDir\Startiperf3udpTest.sh" -username "root" -password $password -upload
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
|
||||
$out = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh"
|
||||
$testJob = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "/root/Startiperf3udpTest.sh" -RunInBackground
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "tail -1 iperf3udpConsoleLogs.txt"
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
$finalStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "cat /root/state.txt"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "/root/iperf3udpConsoleLogs.txt"
|
||||
$iperf3LogDir = "$LogDir\iperf3Data"
|
||||
New-Item -itemtype directory -path $iperf3LogDir -Force -ErrorAction SilentlyContinue | Out-Null
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $iperf3LogDir -files "iperf-client-udp*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $iperf3LogDir -files "iperf-server-udp*"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
|
||||
$testSummary = $null
|
||||
|
||||
#region START UDP ANALYSIS
|
||||
$clientfolder = $iperf3LogDir
|
||||
$serverfolder = $iperf3LogDir
|
||||
|
||||
#clientData
|
||||
$files = Get-ChildItem -Path $clientfolder
|
||||
$FinalClientThroughputArr=@()
|
||||
$FinalServerThroughputArr=@()
|
||||
$FinalClientUDPLossArr=@()
|
||||
$FinalServerUDPLossArr=@()
|
||||
$FinalServerClientUDPResultObjArr = @()
|
||||
|
||||
function GetUDPDataObject()
|
||||
{
|
||||
$objNode = New-Object -TypeName PSObject
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name BufferSize -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name Connections -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name ClientTxGbps -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name ServerRxGbps -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name ThroughputDropPercent -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name ClientUDPLoss -Value $null -Force
|
||||
Add-Member -InputObject $objNode -MemberType NoteProperty -Name ServerUDPLoss -Value $null -Force
|
||||
return $objNode
|
||||
}
|
||||
|
||||
foreach ( $Buffer in $testBuffers )
|
||||
{
|
||||
foreach ( $connection in $testConnections )
|
||||
{
|
||||
$currentResultObj = GetUDPDataObject
|
||||
|
||||
$currentConnectionClientTxGbps = 0
|
||||
$currentConnectionClientTxGbpsArr = @()
|
||||
$currentConnectionClientUDPLoss = 0
|
||||
$currentConnectionClientUDPLossArr = @()
|
||||
|
||||
$currentConnectionserverTxGbps = 0
|
||||
$currentConnectionserverTxGbpsArr = @()
|
||||
$currentConnectionserverUDPLoss = 0
|
||||
$currentConnectionserverUDPLossArr = @()
|
||||
|
||||
foreach ( $file in $files )
|
||||
{
|
||||
#region Get Client data...
|
||||
if ( $file.Name -imatch "iperf-client-udp-IPv4-buffer-$($Buffer)-conn-$connection-instance-*" )
|
||||
{
|
||||
$currentInstanceclientJsonText = $null
|
||||
$currentInstanceclientJsonObj = $null
|
||||
$currentInstanceClientPacketLoss = @()
|
||||
$currentInstanceClientThroughput = $null
|
||||
$fileName = $file.Name
|
||||
try
|
||||
{
|
||||
$currentInstanceclientJsonText = ([string]( Get-Content "$clientfolder\$fileName")).Replace("-nan","0")
|
||||
$errorLines = (Select-String -Path $clientfolder\$fileName -Pattern "warning")
|
||||
if ($errorLines)
|
||||
{
|
||||
foreach ($errorLine in $errorLines)
|
||||
{
|
||||
$currentInstanceclientJsonText = $currentInstanceclientJsonText.Replace($errorLine.Line,'')
|
||||
}
|
||||
}
|
||||
$currentInstanceclientJsonObj = ConvertFrom-Json -InputObject $currentInstanceclientJsonText
|
||||
}
|
||||
catch
|
||||
{
|
||||
LogErr " $fileName : RETURNED NULL"
|
||||
}
|
||||
if ( $currentInstanceclientJsonObj.end.sum.lost_percent -or $currentInstanceserverJsonObj.end.sum.packets)
|
||||
{
|
||||
$currentConnectionClientUDPLossArr += $currentInstanceclientJsonObj.end.sum.lost_percent
|
||||
|
||||
$currentConnCurrentInstanceAllIntervalThroughputArr = @()
|
||||
foreach ( $interval in $currentInstanceclientJsonObj.intervals )
|
||||
{
|
||||
$currentConnCurrentInstanceAllIntervalThroughputArr += $interval.sum.bits_per_second
|
||||
}
|
||||
$currentInstanceClientThroughput = (((($currentConnCurrentInstanceAllIntervalThroughputArr | Measure-Object -Average).Average))/1000000000)
|
||||
$outOfOrderPackats = ([regex]::Matches($currentInstanceclientJsonText, "OUT OF ORDER" )).count
|
||||
if ( $outOfOrderPackats -gt 0 )
|
||||
{
|
||||
LogErr " $fileName : ERROR: $outOfOrderPackats PACKETS ARRIVED OUT OF ORDER"
|
||||
}
|
||||
LogMsg " $fileName : Data collected successfully."
|
||||
}
|
||||
else
|
||||
{
|
||||
$currentInstanceClientThroughput = $null
|
||||
#Write-Host " $($currentJsonObj.error) $currentFileClientThroughput "
|
||||
}
|
||||
if($currentInstanceClientThroughput)
|
||||
{
|
||||
$currentConnectionClientTxGbpsArr += $currentInstanceClientThroughput
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region Get Server data...
|
||||
if ( $file.Name -imatch "iperf-server-udp-IPv4-buffer-$($Buffer)-conn-$connection-instance-*" )
|
||||
{
|
||||
$currentInstanceserverJsonText = $null
|
||||
$currentInstanceserverJsonObj = $null
|
||||
$currentInstanceserverPacketLoss = @()
|
||||
$currentInstanceserverThroughput = $null
|
||||
$fileName = $file.Name
|
||||
try
|
||||
{
|
||||
$currentInstanceserverJsonText = ([string]( Get-Content "$serverfolder\$fileName")).Replace("-nan","0")
|
||||
$currentInstanceserverJsonObj = ConvertFrom-Json -InputObject $currentInstanceserverJsonText
|
||||
}
|
||||
catch
|
||||
{
|
||||
LogErr " $fileName : RETURNED NULL"
|
||||
}
|
||||
if ( $currentInstanceserverJsonObj.end.sum.lost_percent -or $currentInstanceserverJsonObj.end.sum.packets )
|
||||
{
|
||||
$currentConnectionserverUDPLossArr += $currentInstanceserverJsonObj.end.sum.lost_percent
|
||||
|
||||
$currentConnCurrentInstanceAllIntervalThroughputArr = @()
|
||||
foreach ( $interval in $currentInstanceserverJsonObj.intervals )
|
||||
{
|
||||
$currentConnCurrentInstanceAllIntervalThroughputArr += $interval.sum.bits_per_second
|
||||
}
|
||||
$currentInstanceserverThroughput = (((($currentConnCurrentInstanceAllIntervalThroughputArr | Measure-Object -Average).Average))/1000000000)
|
||||
|
||||
$outOfOrderPackats = ([regex]::Matches($currentInstanceserverJsonText, "OUT OF ORDER" )).count
|
||||
if ( $outOfOrderPackats -gt 0 )
|
||||
{
|
||||
LogErr " $fileName : ERROR: $outOfOrderPackats PACKETS ARRIVED OUT OF ORDER"
|
||||
}
|
||||
LogMsg " $fileName : Data collected successfully."
|
||||
}
|
||||
else
|
||||
{
|
||||
$currentInstanceserverThroughput = $null
|
||||
LogErr " $fileName : $($currentInstanceserverJsonObj.error)"
|
||||
}
|
||||
if($currentInstanceserverThroughput)
|
||||
{
|
||||
$currentConnectionserverTxGbpsArr += $currentInstanceserverThroughput
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
|
||||
$currentConnectionClientTxGbps = [math]::Round((($currentConnectionClientTxGbpsArr | Measure-Object -Average).Average),2)
|
||||
$currentConnectionClientUDPLoss = [math]::Round((($currentConnectionClientUDPLossArr | Measure-Object -Average).Average),2)
|
||||
Write-Host "Client: $Buffer . $connection . $currentConnectionClientTxGbps .$currentConnectionClientUDPLoss"
|
||||
$FinalClientThroughputArr += $currentConnectionClientTxGbps
|
||||
$FinalClientUDPLossArr += $currentConnectionClientUDPLoss
|
||||
|
||||
$currentConnectionserverTxGbps = [math]::Round((($currentConnectionserverTxGbpsArr | Measure-Object -Average).Average),2)
|
||||
$currentConnectionserverUDPLoss = [math]::Round((($currentConnectionserverUDPLossArr | Measure-Object -Average).Average),2)
|
||||
Write-Host "Server: $Buffer . $connection . $currentConnectionserverTxGbps .$currentConnectionserverUDPLoss"
|
||||
$FinalServerThroughputArr += $currentConnectionserverTxGbps
|
||||
$FinalServerUDPLossArr += $currentConnectionserverUDPLoss
|
||||
$currentResultObj.BufferSize = $Buffer/1024
|
||||
$currentResultObj.Connections = $connection
|
||||
$currentResultObj.ClientTxGbps = $currentConnectionClientTxGbps
|
||||
$currentResultObj.ClientUDPLoss = $currentConnectionClientUDPLoss
|
||||
if ( $currentConnectionClientTxGbps -ne 0 )
|
||||
{
|
||||
if ( $currentConnectionClientTxGbps -ge $currentConnectionserverTxGbps )
|
||||
{
|
||||
$currentResultObj.ThroughputDropPercent = [math]::Round(((($currentConnectionClientTxGbps-$currentConnectionserverTxGbps)*100)/$currentConnectionClientTxGbps),2)
|
||||
}
|
||||
else
|
||||
{
|
||||
$currentResultObj.ThroughputDropPercent = 0
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$currentResultObj.ThroughputDropPercent = 0
|
||||
}
|
||||
$currentResultObj.ServerRxGbps = $currentConnectionserverTxGbps
|
||||
$currentResultObj.ServerUDPLoss = $currentConnectionserverUDPLoss
|
||||
$FinalServerClientUDPResultObjArr += $currentResultObj
|
||||
Write-Host "-------------------------------"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
foreach ( $udpResultObject in $FinalServerClientUDPResultObjArr )
|
||||
{
|
||||
$connResult="ClientTxGbps=$($udpResultObject.ClientTxGbps) ServerRxGbps=$($udpResultObject.ServerRxGbps) UDPLoss=$($udpResultObject.ClientUDPLoss)%"
|
||||
$metaData = "Buffer=$($udpResultObject.BufferSize)K Connections=$($udpResultObject.Connections)"
|
||||
$resultSummary += CreateResultSummary -testResult $connResult -metaData $metaData -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
if ( $finalStatus -imatch "TestFailed")
|
||||
{
|
||||
LogErr "Test failed. Last known status : $currentStatus."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestAborted")
|
||||
{
|
||||
LogErr "Test Aborted. Last known status : $currentStatus."
|
||||
$testResult = "ABORTED"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestCompleted")
|
||||
{
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestRunning")
|
||||
{
|
||||
LogMsg "Powershell backgroud job for test is completed but VM is reporting that test is still running. Please check $LogDir\zkConsoleLogs.txt"
|
||||
LogMsg "Contests of summary.log : $testSummary"
|
||||
$testResult = "PASS"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
LogMsg "Test Completed"
|
||||
|
||||
|
||||
LogMsg "Uploading the test results to DB STARTED.."
|
||||
$dataSource = $xmlConfig.config.Azure.database.server
|
||||
$dbuser = $xmlConfig.config.Azure.database.user
|
||||
$dbpassword = $xmlConfig.config.Azure.database.password
|
||||
$database = $xmlConfig.config.Azure.database.dbname
|
||||
$dataTableName = $xmlConfig.config.Azure.database.dbtable
|
||||
$TestCaseName = $xmlConfig.config.Azure.database.testTag
|
||||
if ($dataSource -And $dbuser -And $dbpassword -And $database -And $dataTableName)
|
||||
{
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
$HostType = "Azure-ARM"
|
||||
}
|
||||
else
|
||||
{
|
||||
$HostType = "Azure"
|
||||
}
|
||||
$HostBy = ($xmlConfig.config.Azure.General.Location).Replace('"','')
|
||||
$HostOS = cat "$LogDir\VM_properties.csv" | Select-String "Host Version"| %{$_ -replace ",Host Version,",""}
|
||||
$GuestOSType = "Linux"
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
$GuestSize = $clientVMData.InstanceSize
|
||||
$KernelVersion = cat "$LogDir\VM_properties.csv" | Select-String "Kernel version"| %{$_ -replace ",Kernel version,",""}
|
||||
$IPVersion = "IPv4"
|
||||
$ProtocolType = $($currentTestData.TestType)
|
||||
|
||||
$connectionString = "Server=$dataSource;uid=$dbuser; pwd=$dbpassword;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
$SQLQuery = "INSERT INTO $dataTableName (TestCaseName,TestDate,HostType,HostBy,HostOS,GuestOSType,GuestDistro,GuestSize,KernelVersion,IPVersion,ProtocolType,DataPath,SendBufSize_KBytes,NumberOfConnections,TxThroughput_Gbps,RxThroughput_Gbps,DatagramLoss) VALUES "
|
||||
|
||||
foreach ( $udpResultObject in $FinalServerClientUDPResultObjArr )
|
||||
{
|
||||
$SQLQuery += "('$TestCaseName','$(Get-Date -Format yyyy-MM-dd)','$HostType','$HostBy','$HostOS','$GuestOSType','$GuestDistro','$GuestSize','$KernelVersion','$IPVersion','UDP','$DataPath','$($udpResultObject.BufferSize)','$($udpResultObject.Connections)','$($udpResultObject.ClientTxGbps)','$($udpResultObject.ServerRxGbps)','$($udpResultObject.ClientUDPLoss)'),"
|
||||
}
|
||||
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
LogMsg $SQLQuery
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading the test results to DB DONE!!"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Invalid database details. Failed to upload result to database!"
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "iperf3udp RESULT"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
|
@ -0,0 +1,247 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$testVMData = $allVMData
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
LogMsg "Generating constansts.sh ..."
|
||||
$constantsFile = "$LogDir\constants.sh"
|
||||
Set-Content -Value "#Generated by Azure Automation." -Path $constantsFile
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
LogMsg "$param added to constants.sh"
|
||||
if ( $param -imatch "startThread" )
|
||||
{
|
||||
$startThread = [int]($param.Replace("startThread=",""))
|
||||
}
|
||||
if ( $param -imatch "maxThread" )
|
||||
{
|
||||
$maxThread = [int]($param.Replace("maxThread=",""))
|
||||
}
|
||||
}
|
||||
LogMsg "constanst.sh created successfully..."
|
||||
#endregion
|
||||
|
||||
#region EXECUTE TEST
|
||||
$myString = @"
|
||||
chmod +x perf_fio.sh
|
||||
./perf_fio.sh &> fioConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
|
||||
$myString2 = @"
|
||||
wget https://konkaciwestus1.blob.core.windows.net/scriptfiles/JSON.awk
|
||||
wget https://konkaciwestus1.blob.core.windows.net/scriptfiles/gawk
|
||||
wget https://konkaciwestus1.blob.core.windows.net/scriptfiles/fio_jason_parser.sh
|
||||
chmod +x *.sh
|
||||
cp fio_jason_parser.sh gawk JSON.awk /root/FIOLog/jsonLog/
|
||||
cd /root/FIOLog/jsonLog/
|
||||
./fio_jason_parser.sh
|
||||
cp perf_fio.csv /root
|
||||
chmod 666 /root/perf_fio.csv
|
||||
"@
|
||||
Set-Content "$LogDir\StartFioTest.sh" $myString
|
||||
Set-Content "$LogDir\ParseFioTestLogs.sh" $myString2
|
||||
RemoteCopy -uploadTo $testVMData.PublicIP -port $testVMData.SSHPort -files ".\$constantsFile,.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\perf_fio.sh,.\$LogDir\StartFioTest.sh,.\$LogDir\ParseFioTestLogs.sh" -username "root" -password $password -upload
|
||||
RemoteCopy -uploadTo $testVMData.PublicIP -port $testVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
$out = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh" -runAsSudo
|
||||
$testJob = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "./StartFioTest.sh" -RunInBackground -runAsSudo
|
||||
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "tail -1 runlog.txt"-runAsSudo
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
|
||||
$finalStatus = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "cat state.txt"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "FIOTest-*.tar.gz"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
|
||||
$testSummary = $null
|
||||
|
||||
#endregion
|
||||
|
||||
if ( $finalStatus -imatch "TestFailed")
|
||||
{
|
||||
LogErr "Test failed. Last known status : $currentStatus."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestAborted")
|
||||
{
|
||||
LogErr "Test Aborted. Last known status : $currentStatus."
|
||||
$testResult = "ABORTED"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestCompleted")
|
||||
{
|
||||
$out = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "/root/ParseFioTestLogs.sh"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "perf_fio.csv"
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestRunning")
|
||||
{
|
||||
LogMsg "Powershell backgroud job for test is completed but VM is reporting that test is still running. Please check $LogDir\zkConsoleLogs.txt"
|
||||
LogMsg "Contests of summary.log : $testSummary"
|
||||
$testResult = "PASS"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
LogMsg "Test Completed"
|
||||
$resultSummary += CreateResultSummary -testResult $testResult -metaData "" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
|
||||
try
|
||||
{
|
||||
foreach($line in (Get-Content "$LogDir\perf_fio.csv"))
|
||||
{
|
||||
if ( $line -imatch "Max IOPS of each mode" )
|
||||
{
|
||||
$maxIOPSforMode = $true
|
||||
$maxIOPSforBlockSize = $false
|
||||
$fioData = $false
|
||||
}
|
||||
if ( $line -imatch "Max IOPS of each BlockSize" )
|
||||
{
|
||||
$maxIOPSforMode = $false
|
||||
$maxIOPSforBlockSize = $true
|
||||
$fioData = $false
|
||||
}
|
||||
if ( $line -imatch "Iteration,TestType,BlockSize" )
|
||||
{
|
||||
$maxIOPSforMode = $false
|
||||
$maxIOPSforBlockSize = $false
|
||||
$fioData = $true
|
||||
}
|
||||
if ( $maxIOPSforMode )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\maxIOPSforMode.csv
|
||||
}
|
||||
if ( $maxIOPSforBlockSize )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\maxIOPSforBlockSize.csv
|
||||
}
|
||||
if ( $fioData )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\fioData.csv
|
||||
}
|
||||
}
|
||||
$maxIOPSforModeCsv = Import-Csv -Path $LogDir\maxIOPSforMode.csv
|
||||
$maxIOPSforBlockSizeCsv = Import-Csv -Path $LogDir\maxIOPSforBlockSize.csv
|
||||
$fioDataCsv = Import-Csv -Path $LogDir\fioData.csv
|
||||
|
||||
|
||||
LogMsg "Uploading the test results.."
|
||||
$dataSource = $xmlConfig.config.Azure.database.server
|
||||
$DBuser = $xmlConfig.config.Azure.database.user
|
||||
$DBpassword = $xmlConfig.config.Azure.database.password
|
||||
$database = $xmlConfig.config.Azure.database.dbname
|
||||
$dataTableName = $xmlConfig.config.Azure.database.dbtable
|
||||
$TestCaseName = $xmlConfig.config.Azure.database.testTag
|
||||
if ($dataSource -And $DBuser -And $DBpassword -And $database -And $dataTableName)
|
||||
{
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
$HostType = "Azure-ARM"
|
||||
}
|
||||
else
|
||||
{
|
||||
$HostType = "Azure"
|
||||
}
|
||||
|
||||
$HostBy = ($xmlConfig.config.Azure.General.Location).Replace('"','')
|
||||
$HostOS = cat "$LogDir\VM_properties.csv" | Select-String "Host Version"| %{$_ -replace ",Host Version,",""}
|
||||
$GuestOSType = "Linux"
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
$GuestSize = $testVMData.InstanceSize
|
||||
$KernelVersion = cat "$LogDir\VM_properties.csv" | Select-String "Kernel version"| %{$_ -replace ",Kernel version,",""}
|
||||
|
||||
$connectionString = "Server=$dataSource;uid=$DBuser; pwd=$DBpassword;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
|
||||
$SQLQuery = "INSERT INTO $dataTableName (TestCaseName,TestDate,HostType,HostBy,HostOS,GuestOSType,GuestDistro,GuestSize,KernelVersion,DiskSetup,BlockSize_KB,QDepth,seq_read_iops,seq_read_lat_usec,rand_read_iops,rand_read_lat_usec,seq_write_iops,seq_write_lat_usec,rand_write_iops,rand_write_lat_usec) VALUES "
|
||||
|
||||
for ( $QDepth = $startThread; $QDepth -le $maxThread; $QDepth *= 2 )
|
||||
{
|
||||
$seq_read_iops = ($fioDataCsv | where { $_.TestType -eq "read" -and $_.Threads -eq "$QDepth"} | Select ReadIOPS).ReadIOPS
|
||||
$seq_read_lat_usec = ($fioDataCsv | where { $_.TestType -eq "read" -and $_.Threads -eq "$QDepth"} | Select MaxOfReadMeanLatency).MaxOfReadMeanLatency
|
||||
|
||||
$rand_read_iops = ($fioDataCsv | where { $_.TestType -eq "randread" -and $_.Threads -eq "$QDepth"} | Select ReadIOPS).ReadIOPS
|
||||
$rand_read_lat_usec = ($fioDataCsv | where { $_.TestType -eq "randread" -and $_.Threads -eq "$QDepth"} | Select MaxOfReadMeanLatency).MaxOfReadMeanLatency
|
||||
|
||||
$seq_write_iops = ($fioDataCsv | where { $_.TestType -eq "write" -and $_.Threads -eq "$QDepth"} | Select WriteIOPS).WriteIOPS
|
||||
$seq_write_lat_usec = ($fioDataCsv | where { $_.TestType -eq "write" -and $_.Threads -eq "$QDepth"} | Select MaxOfWriteMeanLatency).MaxOfWriteMeanLatency
|
||||
|
||||
$rand_write_iops = ($fioDataCsv | where { $_.TestType -eq "randwrite" -and $_.Threads -eq "$QDepth"} | Select WriteIOPS).WriteIOPS
|
||||
$rand_write_lat_usec= ($fioDataCsv | where { $_.TestType -eq "randwrite" -and $_.Threads -eq "$QDepth"} | Select MaxOfWriteMeanLatency).MaxOfWriteMeanLatency
|
||||
|
||||
$BlockSize_KB= (($fioDataCsv | where { $_.Threads -eq "$QDepth"} | Select BlockSize)[0].BlockSize).Replace("K","")
|
||||
|
||||
$SQLQuery += "('$TestCaseName','$(Get-Date -Format yyyy-MM-dd)','$HostType','$HostBy','$HostOS','$GuestOSType','$GuestDistro','$GuestSize','$KernelVersion','RAID0:12xP30','$BlockSize_KB','$QDepth','$seq_read_iops','$seq_read_lat_usec','$rand_read_iops','$rand_read_lat_usec','$seq_write_iops','$seq_write_lat_usec','$rand_write_iops','$rand_write_lat_usec'),"
|
||||
LogMsg "Collected performace data for $QDepth QDepth."
|
||||
}
|
||||
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
Write-Host $SQLQuery
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading the test results done!!"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Invalid database details. Failed to upload result to database!"
|
||||
}
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogErr "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "NTTTCP RESULT"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
|
@ -0,0 +1,207 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
Set-Variable -Name OverrideVMSize -Value $currentTestData.OverrideVMSize -Scope Global
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$testResult = $null
|
||||
$clientVMData = $allVMData
|
||||
#region CONFIGURE VM FOR N SERIES GPU TEST
|
||||
LogMsg "Test VM details :"
|
||||
LogMsg " RoleName : $($clientVMData.RoleName)"
|
||||
LogMsg " Public IP : $($clientVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($clientVMData.SSHPort)"
|
||||
#
|
||||
# PROVISION VMS FOR LISA WILL ENABLE ROOT USER AND WILL MAKE ENABLE PASSWORDLESS AUTHENTICATION ACROSS ALL VMS IN SAME HOSTED SERVICE.
|
||||
#
|
||||
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
|
||||
#endregion
|
||||
|
||||
#region Install N-Vidia Drivers and reboot.
|
||||
$myString = @"
|
||||
cd /root/
|
||||
./GPU_Test.sh -logFolder /root &> GPUConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
$StartScriptName = "StartGPUDriverInstall.sh"
|
||||
Set-Content "$LogDir\$StartScriptName" $myString
|
||||
RemoteCopy -uploadTo $clientVMData.PublicIP -port $clientVMData.SSHPort -files ".\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\GPU_Test.sh,.\$LogDir\$StartScriptName" -username "root" -password $password -upload
|
||||
$out = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh"
|
||||
$testJob = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "/root/$StartScriptName" -RunInBackground
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "tail -n 1 GPUConsoleLogs.txt"
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
RemoteCopy -downloadFrom $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "GPUConsoleLogs.txt"
|
||||
|
||||
|
||||
$GPUDriverInstallLogs = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "cat GPU_Test_Logs.txt"
|
||||
|
||||
if ($GPUDriverInstallLogs -imatch "GPU_DRIVER_INSTALLATION_SUCCESSFUL")
|
||||
{
|
||||
#Reboot VM.
|
||||
LogMsg "*********************************************************"
|
||||
LogMsg "GPU Drivers installed successfully. Restarting VM now..."
|
||||
LogMsg "*********************************************************"
|
||||
$restartStatus = RestartAllDeployments -allVMData $clientVMData
|
||||
if ($restartStatus -eq "True")
|
||||
{
|
||||
if (($clientVMData.InstanceSize -eq "Standard_NC6") -or ($clientVMData.InstanceSize -eq "Standard_NC6s_v2") -or ($clientVMData.InstanceSize -eq "Standard_NV6"))
|
||||
{
|
||||
$expectedCount = 1
|
||||
}
|
||||
elseif (($clientVMData.InstanceSize -eq "Standard_NC12") -or ($clientVMData.InstanceSize -eq "Standard_NC12s_v2") -or ($clientVMData.InstanceSize -eq "Standard_NV12"))
|
||||
{
|
||||
$expectedCount = 2
|
||||
}
|
||||
elseif (($clientVMData.InstanceSize -eq "Standard_NC24") -or ($clientVMData.InstanceSize -eq "Standard_NC24s_v2") -or ($clientVMData.InstanceSize -eq "Standard_NV24"))
|
||||
{
|
||||
$expectedCount = 4
|
||||
}
|
||||
elseif (($clientVMData.InstanceSize -eq "Standard_NC24r") -or ($clientVMData.InstanceSize -eq "Standard_NC24rs_v2"))
|
||||
{
|
||||
$expectedCount = 4
|
||||
}
|
||||
LogMsg "Test VM Size: $($clientVMData.InstanceSize). Expected GPU Adapters : $expectedCount"
|
||||
$errorCount = 0
|
||||
#Adding sleep of 180 seconds, giving time to load nvidia drivers.
|
||||
LogMsg "Waiting 3 minutes. (giving time to load nvidia drivers)"
|
||||
Start-Sleep -Seconds 180
|
||||
#region PCI Express pass-through
|
||||
$PCIExpress = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "lsvmbus" -ignoreLinuxExitCode
|
||||
Set-Content -Value $PCIExpress -Path $LogDir\PIC-Express-pass-through.txt -Force
|
||||
if ( (Select-String -Path $LogDir\PIC-Express-pass-through.txt -Pattern "PCI Express pass-through").Matches.Count -eq $expectedCount )
|
||||
{
|
||||
LogMsg "Expected `"PCI Express pass-through`" count: $expectedCount. Observed Count: $expectedCount"
|
||||
$resultSummary += CreateResultSummary -testResult "PASS" -metaData "PCI Express pass-through" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
else
|
||||
{
|
||||
$errorCount += 1
|
||||
LogErr "Error in lsvmbus Outoput."
|
||||
LogErr "$PCIExpress"
|
||||
$resultSummary += CreateResultSummary -testResult "FAIL" -metaData "PCI Express pass-through" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region lspci
|
||||
$lspci = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "lspci" -ignoreLinuxExitCode
|
||||
Set-Content -Value $lspci -Path $LogDir\lspci.txt -Force
|
||||
if ( (Select-String -Path $LogDir\lspci.txt -Pattern "NVIDIA Corporation").Matches.Count -eq $expectedCount )
|
||||
{
|
||||
LogMsg "Expected `"3D controller: NVIDIA Corporation`" count: $expectedCount. Observed Count: $expectedCount"
|
||||
$resultSummary += CreateResultSummary -testResult "PASS" -metaData "lspci" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
else
|
||||
{
|
||||
$errorCount += 1
|
||||
LogErr "Error in lspci Outoput."
|
||||
LogErr "$lspci"
|
||||
$resultSummary += CreateResultSummary -testResult "FAIL" -metaData "lspci" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region PCI lshw -c video
|
||||
$lshw = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "lshw -c video" -ignoreLinuxExitCode
|
||||
Set-Content -Value $lshw -Path $LogDir\lshw-c-video.txt -Force
|
||||
if ( ((Select-String -Path $LogDir\lshw-c-video.txt -Pattern "product: NVIDIA Corporation").Matches.Count -eq $expectedCount) -or ((Select-String -Path $LogDir\lshw-c-video.txt -Pattern "vendor: NVIDIA Corporation").Matches.Count -eq $expectedCount) )
|
||||
{
|
||||
LogMsg "Expected Display adapters: $expectedCount. Observed adapters: $expectedCount"
|
||||
$resultSummary += CreateResultSummary -testResult "PASS" -metaData "lshw -c video" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
else
|
||||
{
|
||||
$errorCount += 1
|
||||
LogErr "Error in display adapters."
|
||||
LogErr "$lshw"
|
||||
$resultSummary += CreateResultSummary -testResult "FAIL" -metaData "lshw -c video" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region PCI nvidia-smi
|
||||
$nvidiasmi = RunLinuxCmd -ip $clientVMData.PublicIP -port $clientVMData.SSHPort -username "root" -password $password -command "nvidia-smi" -ignoreLinuxExitCode
|
||||
Set-Content -Value $nvidiasmi -Path $LogDir\nvidia-smi.txt -Force
|
||||
if ( (Select-String -Path $LogDir\nvidia-smi.txt -Pattern "Tesla ").Matches.Count -eq $expectedCount )
|
||||
{
|
||||
LogMsg "Expected Tesla count: $expectedCount. Observed count: $expectedCount"
|
||||
$resultSummary += CreateResultSummary -testResult "PASS" -metaData "nvidia-smi" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
else
|
||||
{
|
||||
$errorCount += 1
|
||||
LogErr "Error in nvidia-smi."
|
||||
LogErr "$nvidiasmi"
|
||||
$resultSummary += CreateResultSummary -testResult "FAIL" -metaData "nvidia-smi" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
else
|
||||
{
|
||||
LogErr "Unable to connect to test VM after restart"
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
}
|
||||
#endregion
|
||||
|
||||
if ( ($errorCount -ne 0))
|
||||
{
|
||||
LogErr "Test failed. : $summary."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $errorCount -eq 0)
|
||||
{
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
LogMsg "Test Completed"
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "GPU Verification"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
Двоичный файл не отображается.
|
@ -0,0 +1,74 @@
|
|||
<#
|
||||
.SYNOPSIS
|
||||
This script authenticates PS sessing using Azure principal account.
|
||||
|
||||
.DESCRIPTION
|
||||
This script authenticates PS sessing using Azure principal account.
|
||||
|
||||
.PARAMETER -customSecretsFilePath
|
||||
Type: string
|
||||
Required: Optinal.
|
||||
|
||||
.INPUTS
|
||||
AzureSecrets.xml file. If you are running this script in Jenkins, then make sure to add a secret file with ID: Azure_Secrets_File
|
||||
If you are running the file locally, then pass secrets file path to -customSecretsFilePath parameter.
|
||||
|
||||
.NOTES
|
||||
Version: 1.0
|
||||
Author: Shital Savekar <v-shisav@microsoft.com>
|
||||
Creation Date: 14th December 2017
|
||||
Purpose/Change: Initial script development
|
||||
|
||||
.EXAMPLE
|
||||
.\AddAzureRmAccountFromSecretsFile.ps1 -customSecretsFilePath .\AzureSecrets.xml
|
||||
#>
|
||||
|
||||
param
|
||||
(
|
||||
[string]$customSecretsFilePath = $null
|
||||
)
|
||||
|
||||
#---------------------------------------------------------[Initializations]--------------------------------------------------------
|
||||
|
||||
if ( $customSecretsFilePath ) {
|
||||
$secretsFile = $customSecretsFilePath
|
||||
Write-Host "Using provided secrets file: $($secretsFile | Split-Path -Leaf)"
|
||||
}
|
||||
if ($env:Azure_Secrets_File) {
|
||||
$secretsFile = $env:Azure_Secrets_File
|
||||
Write-Host "Using predefined secrets file: $($secretsFile | Split-Path -Leaf) in Jenkins Global Environments."
|
||||
}
|
||||
if ( $secretsFile -eq $null ) {
|
||||
Write-Host "ERROR: Azure Secrets file not found in Jenkins / user not provided -customSecretsFilePath" -ForegroundColor Red -BackgroundColor Black
|
||||
exit 1
|
||||
}
|
||||
|
||||
#---------------------------------------------------------[Script Start]--------------------------------------------------------
|
||||
|
||||
if ( Test-Path $secretsFile ) {
|
||||
Write-Host "$($secretsFile | Split-Path -Leaf) found."
|
||||
Write-Host "---------------------------------"
|
||||
Write-Host "Authenticating Azure PS session.."
|
||||
$xmlSecrets = [xml](Get-Content $secretsFile)
|
||||
$ClientID = $xmlSecrets.secrets.SubscriptionServicePrincipalClientID
|
||||
$TenantID = $xmlSecrets.secrets.SubscriptionServicePrincipalTenantID
|
||||
$Key = $xmlSecrets.secrets.SubscriptionServicePrincipalKey
|
||||
$pass = ConvertTo-SecureString $key -AsPlainText -Force
|
||||
$mycred = New-Object System.Management.Automation.PSCredential ($ClientID, $pass)
|
||||
$out = Add-AzureRmAccount -ServicePrincipal -Tenant $TenantID -Credential $mycred
|
||||
$subIDSplitted = ($xmlSecrets.secrets.SubscriptionID).Split("-")
|
||||
$selectedSubscription = Select-AzureRmSubscription -SubscriptionId $xmlSecrets.secrets.SubscriptionID
|
||||
if ( $selectedSubscription.Subscription.Id -eq $xmlSecrets.secrets.SubscriptionID ) {
|
||||
Write-Host "Current Subscription : $($subIDSplitted[0])-xxxx-xxxx-xxxx-$($subIDSplitted[4])."
|
||||
Write-Host "---------------------------------"
|
||||
}
|
||||
else {
|
||||
Write-Host "There was error selecting $($subIDSplitted[0])-xxxx-xxxx-xxxx-$($subIDSplitted[4])."
|
||||
Write-Host "---------------------------------"
|
||||
}
|
||||
}
|
||||
else {
|
||||
Write-Host "$($secretsFile | Spilt-Path -Leaf) file is not added in Jenkins Global Environments OR it is not bound to 'Azure_Secrets_File' variable." -ForegroundColor Red -BackgroundColor Black
|
||||
Write-Host "Aborting."-ForegroundColor Red -BackgroundColor Black
|
||||
exit 1
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
<Global>
|
||||
<Azure>
|
||||
<Subscription>
|
||||
<SubscriptionID>2cd20493-fe97-42ef-9ace-ab95b63d82c4</SubscriptionID>
|
||||
<SubscriptionID>xxxxxxxx-xxxx-xxxx-xxxxx-xxxxxxxxxxxx</SubscriptionID>
|
||||
<SubscriptionName>YOUR_SUBSCRIPTION_NAME</SubscriptionName>
|
||||
<ManagementEndpoint>https://management.core.windows.net</ManagementEndpoint>
|
||||
<Environment>AzureCloud</Environment>
|
||||
|
@ -17,7 +17,7 @@
|
|||
</ResultsDatabase>
|
||||
<TestCredentials>
|
||||
<LinuxUsername>YOUR_USERNAME</LinuxUsername>
|
||||
<LinuxPassword>Skynet@is@c0ming</LinuxPassword>
|
||||
<LinuxPassword>xxxxxxxxxxxxxxxxxxxxxxxxx</LinuxPassword>
|
||||
</TestCredentials>
|
||||
</Azure>
|
||||
</Global>
|
|
@ -1,15 +1,54 @@
|
|||
<TestCases>
|
||||
<test>
|
||||
<testName>VERIFY-NVIDIA-CUDA-DRIVER-FOR-NC6-VM</testName>
|
||||
<PowershellScript>ICA-N-Series-GPU-TEST.ps1</PowershellScript>
|
||||
<PowershellScript>VERIFY-NVIDIA-CUDA-DRIVER-INSTALLATION.ps1</PowershellScript>
|
||||
<setupType>SingleVM</setupType>
|
||||
<OverrideVMSize>Standard_NC6</OverrideVMSize>
|
||||
<SubtestValues>SOME,TEXTS,NEEDS,TO,BE,PRESENT,HERE,FOR,PRINTING,TEST,SUMMARY</SubtestValues>
|
||||
<files>.\remote-scripts\azuremodules.sh</files>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Functional</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>gpu,pci_hyperv</Tags>
|
||||
<TestID>Functional_001</TestID>
|
||||
<TestID>Functional_GPU_001</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>VERIFY-NVIDIA-CUDA-DRIVER-FOR-NC12-VM</testName>
|
||||
<PowershellScript>VERIFY-NVIDIA-CUDA-DRIVER-INSTALLATION.ps1</PowershellScript>
|
||||
<setupType>SingleVM</setupType>
|
||||
<OverrideVMSize>Standard_NC12</OverrideVMSize>
|
||||
<SubtestValues>SOME,TEXTS,NEEDS,TO,BE,PRESENT,HERE,FOR,PRINTING,TEST,SUMMARY</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Functional</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>gpu,pci_hyperv</Tags>
|
||||
<TestID>Functional_GPU_002</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>VERIFY-NVIDIA-CUDA-DRIVER-FOR-NC24-VM</testName>
|
||||
<PowershellScript>VERIFY-NVIDIA-CUDA-DRIVER-INSTALLATION.ps1</PowershellScript>
|
||||
<setupType>SingleVM</setupType>
|
||||
<OverrideVMSize>Standard_NC24</OverrideVMSize>
|
||||
<SubtestValues>SOME,TEXTS,NEEDS,TO,BE,PRESENT,HERE,FOR,PRINTING,TEST,SUMMARY</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Functional</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>gpu,pci_hyperv</Tags>
|
||||
<TestID>Functional_GPU_003</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>VERIFY-NVIDIA-CUDA-DRIVER-FOR-NC24r-VM</testName>
|
||||
<PowershellScript>VERIFY-NVIDIA-CUDA-DRIVER-INSTALLATION.ps1</PowershellScript>
|
||||
<setupType>SingleVM</setupType>
|
||||
<OverrideVMSize>Standard_NC24r</OverrideVMSize>
|
||||
<SubtestValues>SOME,TEXTS,NEEDS,TO,BE,PRESENT,HERE,FOR,PRINTING,TEST,SUMMARY</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Functional</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>gpu,pci_hyperv</Tags>
|
||||
<TestID>Functional_GPU_004</TestID>
|
||||
</test>
|
||||
</TestCases>
|
|
@ -1,11 +1,11 @@
|
|||
<TestCases>
|
||||
<test>
|
||||
<testName>PERF-NETWORK-TCP-MULTICONNECTION</testName>
|
||||
<testName>PERF-NETWORK-TCP-THROUGHPUT-MULTICONNECTION</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>ICA-PERF-NTTTCP-TEST.ps1</PowershellScript>
|
||||
<PowershellScript>PERF-NETWORK-TCP-THROUGHPUT-MULTICONNECTION-NTTTCP.ps1</PowershellScript>
|
||||
<setupType>M1S1</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\remote-scripts\azuremodules.sh</files>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>testDuration=300</param>
|
||||
<param>testConnections="1 2 4 8 16 32 64 128 256 512 1024 2048 4096 6144 8192 10240"</param>
|
||||
|
@ -13,8 +13,90 @@
|
|||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Network</Area>
|
||||
<Tags>tcp,network</Tags>
|
||||
<TestID>Perf_001</TestID>
|
||||
<Tags>tcp,network,hv_netvsc</Tags>
|
||||
<TestID>Perf_Network_001</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-NETWORK-TCP-LATENCY-MULTICONNECTION</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-NETWORK-TCP-LATENCY-MULTICONNECTION-LAGSCOPE.ps1</PowershellScript>
|
||||
<setupType>M1S1</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>pingIteration=1000000</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Network</Area>
|
||||
<Tags>tcp,network,hv_netvsc</Tags>
|
||||
<TestID>Perf_Network_002</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-NETWORK-UDP-THROUGHPUT-MULTICONNECTION</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-NETWORK-UDP-THROUGHPUT-MULTICONNECTION-IPERF3.ps1</PowershellScript>
|
||||
<setupType>M1S1</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>testDuration=300</param>
|
||||
<param>testType=udp</param>
|
||||
<param>max_parallel_connections_per_instance=64</param>
|
||||
<param>connections=(1 2 4 8 16 32 64 128 256 512 1024)</param>
|
||||
<param>bufferLengths=(1024 8192)</param>
|
||||
<param>IPversion=4</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Network</Area>
|
||||
<Tags>udp,network,hv_netvsc</Tags>
|
||||
<TestID>Perf_Network_003</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-4K-IO</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-MULTIDISK-RAID0-FIO.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=4</param>
|
||||
<param>fileSize=1023G</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_001</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-1024K-IO</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-MULTIDISK-RAID0-FIO.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=1024</param>
|
||||
<param>fileSize=1023G</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_002</TestID>
|
||||
</test>
|
||||
|
||||
</TestCases>
|
|
@ -1,37 +1,3 @@
|
|||
<TestCases>
|
||||
<test>
|
||||
<TestName>VERIFY-DEPLOYMENT-PROVISION</TestName>
|
||||
<PowershellScript>BVT-VERIFY-DEPLOYMENT-PROVISION.ps1</PowershellScript>
|
||||
<files></files>
|
||||
<setupType>SingleVM</setupType>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Regression</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>bvt</Tags>
|
||||
<TestID>Regression_001</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<TestName>VERIFY-HOSTNAME</TestName>
|
||||
<testScript>BVT-VERIFY-HOSTNAME.py</testScript>
|
||||
<PowershellScript>BVT-VERIFY-HOSTNAME.ps1</PowershellScript>
|
||||
<files>.\remote-scripts\nslookup.py,.\remote-scripts\BVT-VERIFY-HOSTNAME.py,.\remote-scripts\azuremodules.py</files>
|
||||
<setupType>SingleVM</setupType>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Regression</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>bvt</Tags>
|
||||
<TestID>Regression_002</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<TestName>VERIFY-ROOT-PASSWORD-DELETED</TestName>
|
||||
<testScript>BVT-IS-ROOT-PASSWORD-DELETED.py</testScript>
|
||||
<PowershellScript>BVT-IS-ROOT-PASSWORD-DELETED.ps1</PowershellScript>
|
||||
<files>.\remote-scripts\BVT-IS-ROOT-PASSWORD-DELETED.py,.\remote-scripts\azuremodules.py</files>
|
||||
<setupType>SingleVM</setupType>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Regression</Category>
|
||||
<Area>default</Area>
|
||||
<Tags>bvt</Tags>
|
||||
<TestID>Regression_003</TestID>
|
||||
</test>
|
||||
|
||||
</TestCases>
|
|
@ -14,7 +14,7 @@
|
|||
<TestName>VERIFY-HOSTNAME</TestName>
|
||||
<testScript>BVT-VERIFY-HOSTNAME.py</testScript>
|
||||
<PowershellScript>BVT-VERIFY-HOSTNAME.ps1</PowershellScript>
|
||||
<files>.\remote-scripts\nslookup.py,.\remote-scripts\BVT-VERIFY-HOSTNAME.py,.\remote-scripts\azuremodules.py</files>
|
||||
<files>.\Testscripts\Linux\nslookup.py,.\Testscripts\Linux\BVT-VERIFY-HOSTNAME.py,.\Testscripts\Linux\azuremodules.py</files>
|
||||
<setupType>SingleVM</setupType>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Smoke</Category>
|
||||
|
@ -26,7 +26,7 @@
|
|||
<TestName>VERIFY-ROOT-PASSWORD-DELETED</TestName>
|
||||
<testScript>BVT-IS-ROOT-PASSWORD-DELETED.py</testScript>
|
||||
<PowershellScript>BVT-IS-ROOT-PASSWORD-DELETED.ps1</PowershellScript>
|
||||
<files>.\remote-scripts\BVT-IS-ROOT-PASSWORD-DELETED.py,.\remote-scripts\azuremodules.py</files>
|
||||
<files>.\Testscripts\Linux\BVT-IS-ROOT-PASSWORD-DELETED.py,.\Testscripts\Linux\azuremodules.py</files>
|
||||
<setupType>SingleVM</setupType>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Smoke</Category>
|
||||
|
|
|
@ -1,19 +1,3 @@
|
|||
<TestCases>
|
||||
<test>
|
||||
<testName>PERF-NETWORK-TCP-MULTICONNECTION</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>ICA-PERF-NTTTCP-TEST.ps1</PowershellScript>
|
||||
<setupType>M1S1</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\remote-scripts\azuremodules.sh</files>
|
||||
<TestParameters>
|
||||
<param>testDuration=300</param>
|
||||
<param>testConnections="1 2 4 8 16 32 64 128 256 512 1024 2048 4096 6144 8192 10240"</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Stress</Category>
|
||||
<Area>Network</Area>
|
||||
<Tags>tcp,network</Tags>
|
||||
<TestID>Stress_001</TestID>
|
||||
</test>
|
||||
|
||||
</TestCases>
|
|
@ -26,101 +26,6 @@
|
|||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</M1S1>
|
||||
<NetPerf1HS2VM>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<!-- <InstanceSize>A9</InstanceSize>
|
||||
<ARMInstanceSize>A9</ARMInstanceSize> -->
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>22</PublicPort>
|
||||
</EndPoints>
|
||||
<EndPoints>
|
||||
<Name>TCPtest</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>750</LocalPort>
|
||||
<PublicPort>750</PublicPort>
|
||||
<LoadBalanced>False</LoadBalanced>
|
||||
</EndPoints>
|
||||
<EndPoints>
|
||||
<Name>UDPtest</Name>
|
||||
<Protocol>udp</Protocol>
|
||||
<LocalPort>990</LocalPort>
|
||||
<PublicPort>990</PublicPort>
|
||||
<LoadBalanced>False</LoadBalanced>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<!-- <InstanceSize>A9</InstanceSize>
|
||||
<ARMInstanceSize>A9</ARMInstanceSize> -->
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>222</PublicPort>
|
||||
</EndPoints>
|
||||
<EndPoints>
|
||||
<Name>TCPtest</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>1750</LocalPort>
|
||||
<PublicPort>1750</PublicPort>
|
||||
<LoadBalanced>False</LoadBalanced>
|
||||
</EndPoints>
|
||||
<EndPoints>
|
||||
<Name>UDPtest</Name>
|
||||
<Protocol>udp</Protocol>
|
||||
<LocalPort>1990</LocalPort>
|
||||
<PublicPort>1990</PublicPort>
|
||||
<LoadBalanced>False</LoadBalanced>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</NetPerf1HS2VM>
|
||||
|
||||
<DBPerf1HS2VM>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<ARMInstanceSize>Standard_D14</ARMInstanceSize>
|
||||
<InstanceSize>Standard_D14</InstanceSize>
|
||||
<!-- <InstanceSize>A9</InstanceSize>
|
||||
<ARMInstanceSize>A9</ARMInstanceSize> -->
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>22</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<ARMInstanceSize>Standard_D14</ARMInstanceSize>
|
||||
<InstanceSize>Standard_D14</InstanceSize>
|
||||
<!-- <InstanceSize>A9</InstanceSize>
|
||||
<ARMInstanceSize>A9</ARMInstanceSize> -->
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>222</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</DBPerf1HS2VM>
|
||||
<S1C15>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
|
@ -334,503 +239,4 @@
|
|||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</S1C15>
|
||||
<RDMA2VMA8>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_A8</InstanceSize>
|
||||
<ARMInstanceSize>Standard_A8</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_A8</InstanceSize>
|
||||
<ARMInstanceSize>Standard_A8</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</RDMA2VMA8>
|
||||
<RDMA2VMA9>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_A9</InstanceSize>
|
||||
<ARMInstanceSize>Standard_A9</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_A9</InstanceSize>
|
||||
<ARMInstanceSize>Standard_A9</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</RDMA2VMA9>
|
||||
<RDMA2VMH16r>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_H16r</InstanceSize>
|
||||
<ARMInstanceSize>Standard_H16r</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_H16r</InstanceSize>
|
||||
<ARMInstanceSize>Standard_H16r</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</RDMA2VMH16r>
|
||||
<RDMA2VMH16mr>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_H16mr</InstanceSize>
|
||||
<ARMInstanceSize>Standard_H16mr</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_H16mr</InstanceSize>
|
||||
<ARMInstanceSize>Standard_H16mr</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</RDMA2VMH16mr>
|
||||
<M1S5>
|
||||
<!-- This setup type defines One Master and Three slave machines. Hence name is M(aster)1S(lave)3. -->
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>master</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>slave1</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>slave2</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1113</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>slave3</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1114</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>slave4</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1115</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>slave5</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1116</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</M1S5>
|
||||
<M1S15>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>master-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-1</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-2</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1113</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-3</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1114</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-4</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1115</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-5</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1116</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-6</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1117</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-7</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1118</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-8</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1119</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-9</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11110</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-10</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-11</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-12</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11113</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-13</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11114</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-14</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11115</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Large</InstanceSize>
|
||||
<ARMInstanceSize>Standard_D2</ARMInstanceSize>
|
||||
<RoleName>slave-vm-15</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>11116</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</M1S15>
|
||||
<M1D1S1>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_GS5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_GS5</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk>
|
||||
<LUN>0</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_GS5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_GS5</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</M1D1S1>
|
||||
<S2C1>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>Server1</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>Server2</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_G5</InstanceSize>
|
||||
<ARMInstanceSize>Standard_G5</ARMInstanceSize>
|
||||
<RoleName>Client</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1113</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</S2C1>
|
||||
</TestSetup>
|
||||
|
|
|
@ -1,38 +1,2 @@
|
|||
<TestSetup>
|
||||
<SingleVM>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_DS1</InstanceSize>
|
||||
<ARMInstanceSize>Standard_DS1</ARMInstanceSize>
|
||||
<RoleName></RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>22</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</SingleVM>
|
||||
<BVTDeployment>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_DS1</InstanceSize>
|
||||
<ARMInstanceSize>Standard_DS1</ARMInstanceSize>
|
||||
<RoleName></RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>22</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</BVTDeployment>
|
||||
</TestSetup>
|
||||
|
|
|
@ -1,29 +1,2 @@
|
|||
<TestSetup>
|
||||
<M1S1>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<HostedService>
|
||||
<VirtualMachine>
|
||||
<ARMInstanceSize>Standard_D15_v2</ARMInstanceSize>
|
||||
<RoleName>server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1111</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<ARMInstanceSize>Standard_D15_v2</ARMInstanceSize>
|
||||
<RoleName>client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</HostedService>
|
||||
</M1S1>
|
||||
</TestSetup>
|
||||
|
|
Загрузка…
Ссылка в новой задаче