зеркало из https://github.com/microsoft/lisa.git
Merge branch 'master' of https://github.com/iamshital/LISAv2
This commit is contained in:
Коммит
6acbba0eb1
|
@ -9,7 +9,6 @@ try
|
|||
$ExitCode = 1
|
||||
#region Update All ARM Images
|
||||
$tab = " "
|
||||
$OutputFilePath = "."
|
||||
$Location = "northeurope"
|
||||
$allRMPubs = $Publishers.Split(",") | Sort-Object
|
||||
$ARMImages = "Publisher Offer SKU Version`n"
|
||||
|
@ -56,6 +55,7 @@ try
|
|||
}
|
||||
}
|
||||
$ARMImages = $ARMImages.TrimEnd("`n")
|
||||
LogMsg "$OutputFilePath Saving successfully."
|
||||
Set-Content -Value $ARMImages -Path $OutputFilePath -Force -NoNewline
|
||||
LogMsg "$OutputFilePath Saved successfully."
|
||||
$ExitCode = 0
|
||||
|
|
|
@ -1149,7 +1149,7 @@ Function RemoteCopy($uploadTo, $downloadFrom, $downloadTo, $port, $files, $usern
|
|||
{
|
||||
LogMsg "Uploading $tarFileName to $username : $uploadTo, port $port using Password authentication"
|
||||
$curDir = $PWD
|
||||
$uploadStatusRandomFile = "UploadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$uploadStatusRandomFile = ".\Temp\UploadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$uploadStartTime = Get-Date
|
||||
$uploadJob = Start-Job -ScriptBlock { cd $args[0]; Write-Host $args; Set-Content -Value "1" -Path $args[6]; $username = $args[4]; $uploadTo = $args[5]; echo y | .\tools\pscp -v -pw $args[1] -q -P $args[2] $args[3] $username@${uploadTo}: ; Set-Content -Value $LASTEXITCODE -Path $args[6];} -ArgumentList $curDir,$password,$port,$tarFileName,$username,${uploadTo},$uploadStatusRandomFile
|
||||
sleep -Milliseconds 100
|
||||
|
@ -1234,7 +1234,7 @@ Function RemoteCopy($uploadTo, $downloadFrom, $downloadTo, $port, $files, $usern
|
|||
{
|
||||
LogMsg "Uploading $testFile to $username : $uploadTo, port $port using Password authentication"
|
||||
$curDir = $PWD
|
||||
$uploadStatusRandomFile = "UploadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$uploadStatusRandomFile = ".\Temp\UploadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$uploadStartTime = Get-Date
|
||||
$uploadJob = Start-Job -ScriptBlock { cd $args[0]; Write-Host $args; Set-Content -Value "1" -Path $args[6]; $username = $args[4]; $uploadTo = $args[5]; echo y | .\tools\pscp -v -pw $args[1] -q -P $args[2] $args[3] $username@${uploadTo}: ; Set-Content -Value $LASTEXITCODE -Path $args[6];} -ArgumentList $curDir,$password,$port,$testFile,$username,${uploadTo},$uploadStatusRandomFile
|
||||
sleep -Milliseconds 100
|
||||
|
@ -1303,7 +1303,7 @@ Function RemoteCopy($uploadTo, $downloadFrom, $downloadTo, $port, $files, $usern
|
|||
{
|
||||
LogMsg "Downloading $testFile from $username : $downloadFrom,port $port to $downloadTo using PrivateKey authentication"
|
||||
$curDir = $PWD
|
||||
$downloadStatusRandomFile = "DownloadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$downloadStatusRandomFile = ".\Temp\DownloadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
$downloadStartTime = Get-Date
|
||||
$downloadJob = Start-Job -ScriptBlock { $curDir=$args[0];$sshKey=$args[1];$port=$args[2];$testFile=$args[3];$username=$args[4];${downloadFrom}=$args[5];$downloadTo=$args[6];$downloadStatusRandomFile=$args[7]; cd $curDir; Set-Content -Value "1" -Path $args[6]; echo y | .\tools\pscp -i .\ssh\$sshKey -q -P $port $username@${downloadFrom}:$testFile $downloadTo; Set-Content -Value $LASTEXITCODE -Path $downloadStatusRandomFile;} -ArgumentList $curDir,$sshKey,$port,$testFile,$username,${downloadFrom},$downloadTo,$downloadStatusRandomFile
|
||||
sleep -Milliseconds 100
|
||||
|
@ -1330,8 +1330,8 @@ Function RemoteCopy($uploadTo, $downloadFrom, $downloadTo, $port, $files, $usern
|
|||
{
|
||||
LogMsg "Downloading $testFile from $username : $downloadFrom,port $port to $downloadTo using Password authentication"
|
||||
$curDir = (Get-Item -Path ".\" -Verbose).FullName
|
||||
$downloadStatusRandomFile = "DownloadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
Set-Content -Value "1" -Path $downloadStatusRandomFile;
|
||||
$downloadStatusRandomFile = ".\Temp\DownloadStatusFile" + (Get-Random -Maximum 9999 -Minimum 1111) + ".txt"
|
||||
Set-Content -Value "1" -Path $downloadStatusRandomFile
|
||||
$downloadStartTime = Get-Date
|
||||
$downloadJob = Start-Job -ScriptBlock {
|
||||
$curDir=$args[0];
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
#!/bin/bash
|
||||
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
if [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
mdVolume="/dev/md/mdauto0"
|
||||
else
|
||||
mdVolume="/dev/md0"
|
||||
fi
|
||||
mountDir="/data"
|
||||
raidFileSystem="ext4"
|
||||
|
||||
#Install Required Packages.
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
|
||||
if [[ $DISTRO =~ "Ubuntu" ]] || [[ $DISTRO =~ "Debian" ]];
|
||||
then
|
||||
echo "Detected UBUNTU/Debian. Installing required packages"
|
||||
until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done
|
||||
apt-get update
|
||||
apt-get install -y mdadm
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: Unable to install mdadm"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]];
|
||||
then
|
||||
echo "Detected RHEL 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install mdadm
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]];
|
||||
then
|
||||
echo "Detected RHEL 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install mdadm
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 6" ]] || [[ $DISTRO =~ "CentOS release 6" ]];
|
||||
then
|
||||
echo "Detected CentOS 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install mdadm
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
echo "Detected CentOS 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install mdadm
|
||||
|
||||
elif [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
echo "Detected SLES12. Installing required packages"
|
||||
zypper addrepo http://download.opensuse.org/repositories/benchmark/SLE_12_SP2_Backports/benchmark.repo
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys refresh
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys remove gettext-runtime-mini-0.19.2-1.103.x86_64
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install sysstat
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install grub2
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install mdadm
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
echo "Detected Clear Linux OS. Installing required packages"
|
||||
swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev
|
||||
else
|
||||
echo "Unknown Distro"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
#Create Raid of All available Data disks
|
||||
umount /data
|
||||
disks=$(ls -l /dev | grep sd[c-z]$ | awk '{print $10}')
|
||||
echo "INFO: Check and remove active RAID first"
|
||||
mdvol=$(cat /proc/mdstat | grep "active raid" | awk {'print $1'})
|
||||
if [ -n "$mdvol" ]; then
|
||||
echo "/dev/${mdvol} already exist...removing first"
|
||||
umount /dev/${mdvol}
|
||||
mdadm --stop /dev/${mdvol}
|
||||
mdadm --remove /dev/${mdvol}
|
||||
mdadm --zero-superblock /dev/sd[c-z][1-5]
|
||||
fi
|
||||
echo "INFO: Creating Partitions"
|
||||
count=0
|
||||
for disk in ${disks}
|
||||
do
|
||||
echo "formatting disk /dev/${disk}"
|
||||
(echo d; echo n; echo p; echo 1; echo; echo; echo t; echo fd; echo w;) | fdisk /dev/${disk}
|
||||
count=$(( $count + 1 ))
|
||||
sleep 1
|
||||
done
|
||||
echo "INFO: Creating RAID of ${count} devices."
|
||||
sleep 1
|
||||
mdadm --create ${mdVolume} --level 0 --raid-devices ${count} /dev/sd[c-z][1-5]
|
||||
sleep 1
|
||||
time mkfs -t $raidFileSystem -F ${mdVolume}
|
||||
mkdir ${mountDir}
|
||||
sleep 1
|
||||
mount -o nobarrier ${mdVolume} ${mountDir}
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: Unable to create raid"
|
||||
exit 1
|
||||
else
|
||||
echo "${mdVolume} mounted to ${mountDir} successfully."
|
||||
exit 0
|
||||
fi
|
|
@ -0,0 +1,249 @@
|
|||
# #!/usr/bin/awk -f
|
||||
#
|
||||
# Software: JSON.awk - a practical JSON parser written in awk
|
||||
# Version: 1.11
|
||||
# Author: step- on github.com
|
||||
# License: This software is licensed under the MIT or the Apache 2 license.
|
||||
# Project home: https://github.com/step-/JSON.awk.git
|
||||
# Credits: This software includes major portions of JSON.sh, a pipeable JSON
|
||||
# parser written in Bash, retrieved on 20130313
|
||||
# https://github.com/dominictarr/JSON.sh
|
||||
#
|
||||
|
||||
BEGIN { #{{{
|
||||
# option_BRIEF(1) - parse() omits printing non-leaf nodes
|
||||
BRIEF=1;
|
||||
# option_STREAM(0) - parse() omits stdout and stores jpaths in JPATHS[]
|
||||
STREAM=1;
|
||||
# for each input file:
|
||||
# TOKENS[], NTOKENS, ITOKENS - tokens after tokenize()
|
||||
# JPATHS[], NJPATHS - parsed data (when STREAM=0)
|
||||
# at script exit:
|
||||
# FAILS[] - maps names of invalid files to logged error lines
|
||||
delete FAILS
|
||||
|
||||
# filepathnames from stdin
|
||||
# usage: echo -e "file1\nfile2\n" | awk -f JSON.awk
|
||||
# usage: { echo -; echo; cat; } | awk -f JSON.awk
|
||||
while (getline ARGV[++ARGC] < "/dev/stdin") {
|
||||
if (ARGV[ARGC] == "")
|
||||
break
|
||||
}
|
||||
# set file slurping mode
|
||||
srand(); RS="n/o/m/a/t/c/h" rand()
|
||||
}
|
||||
#}}}
|
||||
|
||||
{ # main loop: process each file in turn {{{
|
||||
reset() # See important application note in reset()
|
||||
|
||||
tokenize($0) # while(get_token()) {print TOKEN}
|
||||
if (0 == parse()) {
|
||||
apply(JPATHS, NJPATHS)
|
||||
}
|
||||
}
|
||||
#}}}
|
||||
|
||||
END { # process invalid files {{{
|
||||
for(name in FAILS) {
|
||||
print "invalid: " name
|
||||
print FAILS[name]
|
||||
}
|
||||
}
|
||||
#}}}
|
||||
|
||||
function apply (ary, size, i) { # stub {{{
|
||||
for (i=1; i<size; i++)
|
||||
print ary[i]
|
||||
}
|
||||
#}}}
|
||||
|
||||
function get_token() { #{{{
|
||||
# usage: {tokenize($0); while(get_token()) {print TOKEN}}
|
||||
|
||||
# return getline TOKEN # for external tokenizer
|
||||
|
||||
TOKEN = TOKENS[++ITOKENS] # for internal tokenize()
|
||||
return ITOKENS < NTOKENS
|
||||
}
|
||||
#}}}
|
||||
|
||||
function parse_array(a1, idx,ary,ret) { #{{{
|
||||
idx=0
|
||||
ary=""
|
||||
get_token()
|
||||
#scream("parse_array(" a1 ") TOKEN=" TOKEN)
|
||||
if (TOKEN != "]") {
|
||||
while (1) {
|
||||
if (ret = parse_value(a1, idx)) {
|
||||
return ret
|
||||
}
|
||||
idx=idx+1
|
||||
ary=ary VALUE
|
||||
get_token()
|
||||
if (TOKEN == "]") {
|
||||
break
|
||||
} else if (TOKEN == ",") {
|
||||
ary = ary ","
|
||||
} else {
|
||||
report(", or ]", TOKEN ? TOKEN : "EOF")
|
||||
return 2
|
||||
}
|
||||
get_token()
|
||||
}
|
||||
}
|
||||
if (1 != BRIEF) {
|
||||
VALUE=sprintf("[%s]", ary)
|
||||
} else {
|
||||
VALUE=""
|
||||
}
|
||||
return 0
|
||||
}
|
||||
#}}}
|
||||
|
||||
function parse_object(a1, key,obj) { #{{{
|
||||
obj=""
|
||||
get_token()
|
||||
#scream("parse_object(" a1 ") TOKEN=" TOKEN)
|
||||
if (TOKEN != "}") {
|
||||
while (1) {
|
||||
if (TOKEN ~ /^".*"$/) {
|
||||
key=TOKEN
|
||||
} else {
|
||||
report("string", TOKEN ? TOKEN : "EOF")
|
||||
return 3
|
||||
}
|
||||
get_token()
|
||||
if (TOKEN != ":") {
|
||||
report(":", TOKEN ? TOKEN : "EOF")
|
||||
return 4
|
||||
}
|
||||
get_token()
|
||||
if (parse_value(a1, key)) {
|
||||
return 5
|
||||
}
|
||||
obj=obj key ":" VALUE
|
||||
get_token()
|
||||
if (TOKEN == "}") {
|
||||
break
|
||||
} else if (TOKEN == ",") {
|
||||
obj=obj ","
|
||||
} else {
|
||||
report(", or }", TOKEN ? TOKEN : "EOF")
|
||||
return 6
|
||||
}
|
||||
get_token()
|
||||
}
|
||||
}
|
||||
if (1 != BRIEF) {
|
||||
VALUE=sprintf("{%s}", obj)
|
||||
} else {
|
||||
VALUE=""
|
||||
}
|
||||
return 0
|
||||
}
|
||||
#}}}
|
||||
|
||||
function parse_value(a1, a2, jpath,ret,x) { #{{{
|
||||
jpath=(a1!="" ? a1 "," : "") a2 # "${1:+$1,}$2"
|
||||
#scream("parse_value(" a1 "," a2 ") TOKEN=" TOKEN ", jpath=" jpath)
|
||||
if (TOKEN == "{") {
|
||||
if (parse_object(jpath)) {
|
||||
return 7
|
||||
}
|
||||
} else if (TOKEN == "[") {
|
||||
if (ret = parse_array(jpath)) {
|
||||
return ret
|
||||
}
|
||||
} else if (TOKEN == "") { #test case 20150410 #4
|
||||
report("value", "EOF")
|
||||
return 9
|
||||
} else if (TOKEN ~ /^([^0-9])$/) {
|
||||
# At this point, the only valid single-character tokens are digits.
|
||||
report("value", TOKEN)
|
||||
return 9
|
||||
} else {
|
||||
VALUE=TOKEN
|
||||
}
|
||||
if (! (1 == BRIEF && ("" == jpath || "" == VALUE))) {
|
||||
x=sprintf("[%s]\t%s", jpath, VALUE)
|
||||
if(0 == STREAM) {
|
||||
JPATHS[++NJPATHS] = x
|
||||
} else {
|
||||
print x
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
#}}}
|
||||
|
||||
function parse( ret) { #{{{
|
||||
get_token()
|
||||
if (ret = parse_value()) {
|
||||
return ret
|
||||
}
|
||||
if (get_token()) {
|
||||
report("EOF", TOKEN)
|
||||
return 11
|
||||
}
|
||||
return 0
|
||||
}
|
||||
#}}}
|
||||
|
||||
function report(expected, got, i,from,to,context) { #{{{
|
||||
from = ITOKENS - 10; if (from < 1) from = 1
|
||||
to = ITOKENS + 10; if (to > NTOKENS) to = NTOKENS
|
||||
for (i = from; i < ITOKENS; i++)
|
||||
context = context sprintf("%s ", TOKENS[i])
|
||||
context = context "<<" got ">> "
|
||||
for (i = ITOKENS + 1; i <= to; i++)
|
||||
context = context sprintf("%s ", TOKENS[i])
|
||||
scream("expected <" expected "> but got <" got "> at input token " ITOKENS "\n" context)
|
||||
}
|
||||
#}}}
|
||||
|
||||
function reset() { #{{{
|
||||
# Application Note:
|
||||
# If you need to build JPATHS[] incrementally from multiple input files:
|
||||
# 1) Comment out below: delete JPATHS; NJPATHS=0
|
||||
# otherwise each new input file would reset JPATHS[].
|
||||
# 2) Move the call to apply() from the main loop to the END statement.
|
||||
# 3) In the main loop consider adding code that deletes partial JPATHS[]
|
||||
# elements that would result from parsing invalid JSON files.
|
||||
|
||||
TOKEN=""; delete TOKENS; NTOKENS=ITOKENS=0
|
||||
delete JPATHS; NJPATHS=0
|
||||
VALUE=""
|
||||
}
|
||||
#}}}
|
||||
|
||||
function scream(msg) { #{{{
|
||||
FAILS[FILENAME] = FAILS[FILENAME] (FAILS[FILENAME]!="" ? "\n" : "") msg
|
||||
msg = FILENAME ": " msg
|
||||
print msg >"/dev/stderr"
|
||||
}
|
||||
#}}}
|
||||
|
||||
function tokenize(a1, pq,pb,ESCAPE,CHAR,STRING,NUMBER,KEYWORD,SPACE) { #{{{
|
||||
# usage A: {for(i=1; i<=tokenize($0); i++) print TOKENS[i]}
|
||||
# see also get_token()
|
||||
|
||||
# POSIX character classes (gawk) - contact me for non-[:class:] notation
|
||||
# Replaced regex constant for string constant, see https://github.com/step-/JSON.awk/issues/1
|
||||
# ESCAPE="(\\[^u[:cntrl:]]|\\u[0-9a-fA-F]{4})"
|
||||
# CHAR="[^[:cntrl:]\\\"]"
|
||||
# STRING="\"" CHAR "*(" ESCAPE CHAR "*)*\""
|
||||
# NUMBER="-?(0|[1-9][0-9]*)([.][0-9]*)?([eE][+-]?[0-9]*)?"
|
||||
# KEYWORD="null|false|true"
|
||||
SPACE="[[:space:]]+"
|
||||
|
||||
# gsub(STRING "|" NUMBER "|" KEYWORD "|" SPACE "|.", "\n&", a1)
|
||||
gsub(/\"[^[:cntrl:]\"\\]*((\\[^u[:cntrl:]]|\\u[0-9a-fA-F]{4})[^[:cntrl:]\"\\]*)*\"|-?(0|[1-9][0-9]*)([.][0-9]*)?([eE][+-]?[0-9]*)?|null|false|true|[[:space:]]+|./, "\n&", a1)
|
||||
gsub("\n" SPACE, "\n", a1)
|
||||
sub(/^\n/, "", a1)
|
||||
ITOKENS=0 # get_token() helper
|
||||
return NTOKENS = split(a1, TOKENS, /\n/)
|
||||
}
|
||||
#}}}
|
||||
|
||||
# vim:fdm=marker:
|
|
@ -0,0 +1,70 @@
|
|||
csv_file=perf_fio.csv
|
||||
csv_file_tmp=output_tmp.csv
|
||||
echo $file_name
|
||||
echo $csv_file
|
||||
rm -rf $csv_file
|
||||
echo "Iteration,TestType,BlockSize,Threads,Jobs,TotalIOPS,ReadIOPS,MaxOfReadMeanLatency,ReadMaxLatency,ReadBw,WriteIOPS,MaxOfWriteMeanLatency,WriteMaxLatency,WriteBw" > $csv_file_tmp
|
||||
|
||||
json_list=(`ls *.json`)
|
||||
count=0
|
||||
while [ "x${json_list[$count]}" != "x" ]
|
||||
do
|
||||
file_name=${json_list[$count]}
|
||||
Iteration=`echo -e $file_name |gawk -f JSON.awk|grep '"jobname"'| tail -1| sed 's/.*]//'| sed 's/[[:blank:]]//g'| sed 's/"iteration\(.*\)"/\1/'`
|
||||
Jobs=`echo -e $file_name |awk -f JSON.awk|grep '"jobname"'| wc -l`
|
||||
ReadIOPS=`echo -e $file_name |awk -f JSON.awk|grep '"read","iops"'| sed 's/.*]//' | paste -sd+ - | bc`
|
||||
MaxOfReadMeanLatency=`echo -e $file_name |awk -f JSON.awk|grep '"read","lat","mean"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'|sort -g|tail -1`
|
||||
ReadMaxLatency=`echo -e $file_name |awk -f JSON.awk|grep '"read","lat","max"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'|sort -g|tail -1`
|
||||
ReadBw=`echo -e $file_name |awk -f JSON.awk|grep '"read","bw"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'| paste -sd+ - | bc`
|
||||
WriteIOPS=`echo -e $file_name |awk -f JSON.awk|grep '"write","iops"'| sed 's/.*]//' | paste -sd+ - | bc`
|
||||
MaxOfWriteMeanLatency=`echo -e $file_name |awk -f JSON.awk|grep '"write","lat","mean"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'|sort -g|tail -1`
|
||||
WriteMaxLatency=`echo -e $file_name |awk -f JSON.awk|grep '"write","lat","max"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'|sort -g|tail -1`
|
||||
WriteBw=`echo -e $file_name |awk -f JSON.awk|grep '"write","bw"'| sed 's/.*]//'| sed 's/[[:blank:]]//g'| paste -sd+ - | bc`
|
||||
IFS='-' read -r -a array <<< "$file_name"
|
||||
TestType=${array[2]}
|
||||
BlockSize=${array[3]}
|
||||
Threads=`echo "${array[4]}"| sed "s/td\.json//"`
|
||||
TotalIOPS=`echo $ReadIOPS $WriteIOPS | awk '{printf "%d\n", $1+$2}'`
|
||||
echo "$Iteration,$TestType,$BlockSize,$Threads,$Jobs,$TotalIOPS,$ReadIOPS,$MaxOfReadMeanLatency,$ReadMaxLatency,$ReadBw,$WriteIOPS,$MaxOfWriteMeanLatency,$WriteMaxLatency,$WriteBw" >> $csv_file_tmp
|
||||
((count++))
|
||||
done
|
||||
|
||||
echo ",Max IOPS of each mode," >> $csv_file
|
||||
echo ",Test Mode,Max IOPS (BSize-iodepth)," >> $csv_file
|
||||
modes='randread randwrite read write'
|
||||
for testmode in $modes
|
||||
do
|
||||
max_iops=`cat $csv_file_tmp | grep ",$testmode" | awk '{split($0,arr,","); print arr[6]}'| sort -g|tail -1`
|
||||
max_bs=`cat $csv_file_tmp | grep ",$testmode"| grep ",$max_iops" | awk '{split($0,arr,","); print arr[3]}'`
|
||||
max_iodepth=`cat $csv_file_tmp | grep ",$testmode"| grep ",$max_iops" | awk '{split($0,arr,","); print arr[4]}'`
|
||||
if [ "x$max_iops" != "x" ]
|
||||
then
|
||||
echo ",$testmode,$max_iops ($max_bs-$max_iodepth)," >> $csv_file
|
||||
fi
|
||||
done
|
||||
|
||||
echo "" >> $csv_file
|
||||
echo ",Max IOPS of each BlockSize," >> $csv_file
|
||||
modes='randread randwrite read write'
|
||||
block_sizes='1K 2K 4K 8K 16K 32K 64K 128K 256K 512K 1024K 2048K'
|
||||
echo ",Test Mode,Block Size,iodepth,Max IOPS (BSize-iodepth)," >> $csv_file
|
||||
for testmode in $modes
|
||||
do
|
||||
for block in $block_sizes
|
||||
do
|
||||
max_iops=`cat $csv_file_tmp | grep ",$testmode" | grep ",$block" | awk '{split($0,arr,","); print arr[6]}'| sort -g|tail -1`
|
||||
|
||||
max_bs=`cat $csv_file_tmp | grep ",$testmode"| grep ",$block"| grep ",$max_iops" | awk '{split($0,arr,","); print arr[3]}'`
|
||||
max_iodepth=`cat $csv_file_tmp | grep ",$testmode"| grep ",$block"| grep ",$max_iops" | awk '{split($0,arr,","); print arr[4]}'`
|
||||
|
||||
if [ "x$max_iops" != "x" ]
|
||||
then
|
||||
echo ",$testmode,$block,$iodepth,$max_iops ($max_bs-$max_iodepth)," >> $csv_file
|
||||
fi
|
||||
done
|
||||
done
|
||||
echo "" >> $csv_file
|
||||
cat $csv_file_tmp >> $csv_file
|
||||
rm -rf $csv_file_tmp
|
||||
echo "Parsing completed!"
|
||||
exit 0
|
|
@ -0,0 +1,255 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# Sample script to run sysbench.
|
||||
# In this script, we want to bench-mark device IO performance on a mounted folder.
|
||||
# You can adapt this script to other situations easily like for stripe disks as RAID0.
|
||||
# The only thing to keep in mind is that each different configuration you're testing
|
||||
# must log its output to a different directory.
|
||||
#
|
||||
|
||||
HOMEDIR="/root"
|
||||
LogMsg()
|
||||
{
|
||||
echo "[$(date +"%x %r %Z")] ${1}"
|
||||
echo "[$(date +"%x %r %Z")] ${1}" >> "${HOMEDIR}/runlog.txt"
|
||||
}
|
||||
|
||||
CONSTANTS_FILE="$HOMEDIR/constants.sh"
|
||||
ICA_TESTRUNNING="TestRunning" # The test is running
|
||||
ICA_TESTCOMPLETED="TestCompleted" # The test completed successfully
|
||||
ICA_TESTABORTED="TestAborted" # Error during the setup of the test
|
||||
ICA_TESTFAILED="TestFailed" # Error occurred during the test
|
||||
touch ./fioTest.log
|
||||
|
||||
if [ -e ${CONSTANTS_FILE} ]; then
|
||||
. ${CONSTANTS_FILE}
|
||||
else
|
||||
errMsg="Error: missing ${CONSTANTS_FILE} file"
|
||||
LogMsg "${errMsg}"
|
||||
UpdateTestState $ICA_TESTABORTED
|
||||
exit 10
|
||||
fi
|
||||
UpdateTestState()
|
||||
{
|
||||
echo "${1}" > $HOMEDIR/state.txt
|
||||
}
|
||||
|
||||
InstallFIO()
|
||||
{
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux\|clear-linux-os" /etc/{issue,*release,*version} /usr/lib/os-release`
|
||||
|
||||
if [[ $DISTRO =~ "Ubuntu" ]] || [[ $DISTRO =~ "Debian" ]];
|
||||
then
|
||||
LogMsg "Detected UBUNTU/Debian. Installing required packages"
|
||||
until dpkg --force-all --configure -a; sleep 10; do echo 'Trying again...'; done
|
||||
apt-get update
|
||||
apt-get install -y pciutils gawk mdadm
|
||||
apt-get install -y wget sysstat blktrace bc fio nfs-common
|
||||
if [ $? -ne 0 ]; then
|
||||
LogMsg "Error: Unable to install fio"
|
||||
exit 1
|
||||
fi
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 6" ]];
|
||||
then
|
||||
LogMsg "Detected RHEL 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio nfs-common
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "Red Hat Enterprise Linux Server release 7" ]];
|
||||
then
|
||||
LogMsg "Detected RHEL 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio nfs-common
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 6" ]] || [[ $DISTRO =~ "CentOS release 6" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 6.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio nfs-common
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "CentOS Linux release 7" ]];
|
||||
then
|
||||
LogMsg "Detected CentOS 7.x; Installing required packages"
|
||||
rpm -ivh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
|
||||
yum -y --nogpgcheck install wget sysstat mdadm blktrace libaio fio nfs-common
|
||||
mount -t debugfs none /sys/kernel/debug
|
||||
|
||||
elif [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
LogMsg "Detected SLES12. Installing required packages"
|
||||
zypper addrepo http://download.opensuse.org/repositories/benchmark/SLE_12_SP2_Backports/benchmark.repo
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys refresh
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys remove gettext-runtime-mini-0.19.2-1.103.x86_64
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install sysstat
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install grub2
|
||||
zypper --no-gpg-checks --non-interactive --gpg-auto-import-keys install wget mdadm blktrace libaio1 fio nfs-common
|
||||
elif [[ $DISTRO =~ "clear-linux-os" ]];
|
||||
then
|
||||
LogMsg "Detected Clear Linux OS. Installing required packages"
|
||||
swupd bundle-add dev-utils-dev sysadmin-basic performance-tools os-testsuite-phoronix network-basic openssh-server dev-utils os-core os-core-dev
|
||||
|
||||
else
|
||||
LogMsg "Unknown Distro"
|
||||
UpdateTestState "TestAborted"
|
||||
UpdateSummary "Unknown Distro, test aborted"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
RunFIO()
|
||||
{
|
||||
UpdateTestState ICA_TESTRUNNING
|
||||
FILEIO="--size=${fileSize} --direct=1 --ioengine=libaio --filename=fiodata --overwrite=1 "
|
||||
|
||||
####################################
|
||||
#All run config set here
|
||||
#
|
||||
|
||||
#Log Config
|
||||
|
||||
mkdir $HOMEDIR/FIOLog/jsonLog
|
||||
mkdir $HOMEDIR/FIOLog/iostatLog
|
||||
mkdir $HOMEDIR/FIOLog/blktraceLog
|
||||
|
||||
#LOGDIR="${HOMEDIR}/FIOLog"
|
||||
JSONFILELOG="${LOGDIR}/jsonLog"
|
||||
IOSTATLOGDIR="${LOGDIR}/iostatLog"
|
||||
BLKTRACELOGDIR="${LOGDIR}/blktraceLog"
|
||||
LOGFILE="${LOGDIR}/fio-test.log.txt"
|
||||
|
||||
#redirect blktrace files directory
|
||||
Resource_mount=$(mount -l | grep /sdb1 | awk '{print$3}')
|
||||
blk_base="${Resource_mount}/blk-$(date +"%m%d%Y-%H%M%S")"
|
||||
mkdir $blk_base
|
||||
io_increment=128
|
||||
|
||||
####################################
|
||||
echo "Test log created at: ${LOGFILE}"
|
||||
echo "===================================== Starting Run $(date +"%x %r %Z") ================================"
|
||||
echo "===================================== Starting Run $(date +"%x %r %Z") script generated 2/9/2015 4:24:44 PM ================================" >> $LOGFILE
|
||||
|
||||
chmod 666 $LOGFILE
|
||||
echo "Preparing Files: $FILEIO"
|
||||
echo "Preparing Files: $FILEIO" >> $LOGFILE
|
||||
LogMsg "Preparing Files: $FILEIO"
|
||||
# Remove any old files from prior runs (to be safe), then prepare a set of new files.
|
||||
rm fiodata
|
||||
echo "--- Kernel Version Information ---" >> $LOGFILE
|
||||
uname -a >> $LOGFILE
|
||||
cat /proc/version >> $LOGFILE
|
||||
cat /etc/*-release >> $LOGFILE
|
||||
echo "--- PCI Bus Information ---" >> $LOGFILE
|
||||
lspci >> $LOGFILE
|
||||
echo "--- Drive Mounting Information ---" >> $LOGFILE
|
||||
mount >> $LOGFILE
|
||||
echo "--- Disk Usage Before Generating New Files ---" >> $LOGFILE
|
||||
df -h >> $LOGFILE
|
||||
fio --cpuclock-test >> $LOGFILE
|
||||
fio $FILEIO --readwrite=read --bs=1M --runtime=1 --iodepth=128 --numjobs=8 --name=prepare
|
||||
echo "--- Disk Usage After Generating New Files ---" >> $LOGFILE
|
||||
df -h >> $LOGFILE
|
||||
echo "=== End Preparation $(date +"%x %r %Z") ===" >> $LOGFILE
|
||||
LogMsg "Preparing Files: $FILEIO: Finished."
|
||||
####################################
|
||||
#Trigger run from here
|
||||
for testmode in $modes; do
|
||||
io=$startIO
|
||||
while [ $io -le $maxIO ]
|
||||
do
|
||||
Thread=$startThread
|
||||
while [ $Thread -le $maxThread ]
|
||||
do
|
||||
if [ $Thread -ge 8 ]
|
||||
then
|
||||
numjobs=8
|
||||
else
|
||||
numjobs=$Thread
|
||||
fi
|
||||
iostatfilename="${IOSTATLOGDIR}/iostat-fio-${testmode}-${io}K-${Thread}td.txt"
|
||||
nohup iostat -x 5 -t -y > $iostatfilename &
|
||||
echo "-- iteration ${iteration} ----------------------------- ${testmode} test, ${io}K bs, ${Thread} threads, ${numjobs} jobs, 5 minutes ------------------ $(date +"%x %r %Z") ---" >> $LOGFILE
|
||||
LogMsg "Running ${testmode} test, ${io}K bs, ${Thread} threads ..."
|
||||
jsonfilename="${JSONFILELOG}/fio-result-${testmode}-${io}K-${Thread}td.json"
|
||||
fio $FILEIO --readwrite=$testmode --bs=${io}K --runtime=$ioruntime --iodepth=$Thread --numjobs=$numjobs --output-format=json --output=$jsonfilename --name="iteration"${iteration} >> $LOGFILE
|
||||
iostatPID=`ps -ef | awk '/iostat/ && !/awk/ { print $2 }'`
|
||||
kill -9 $iostatPID
|
||||
Thread=$(( Thread*2 ))
|
||||
iteration=$(( iteration+1 ))
|
||||
done
|
||||
io=$(( io * io_increment ))
|
||||
done
|
||||
done
|
||||
####################################
|
||||
echo "===================================== Completed Run $(date +"%x %r %Z") script generated 2/9/2015 4:24:44 PM ================================" >> $LOGFILE
|
||||
rm fiodata
|
||||
|
||||
compressedFileName="${HOMEDIR}/FIOTest-$(date +"%m%d%Y-%H%M%S").tar.gz"
|
||||
LogMsg "INFO: Please wait...Compressing all results to ${compressedFileName}..."
|
||||
tar -cvzf $compressedFileName $LOGDIR/
|
||||
|
||||
echo "Test logs are located at ${LOGDIR}"
|
||||
UpdateTestState ICA_TESTCOMPLETED
|
||||
}
|
||||
|
||||
############################################################
|
||||
# Main body
|
||||
############################################################
|
||||
|
||||
#Creating RAID before triggering test
|
||||
scp /root/CreateRaid.sh root@nfs-server-vm:
|
||||
ssh root@nfs-server-vm "chmod +x /root/CreateRaid.sh"
|
||||
ssh root@nfs-server-vm "/root/CreateRaid.sh"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
HOMEDIR=$HOME
|
||||
mv $HOMEDIR/FIOLog/ $HOMEDIR/FIOLog-$(date +"%m%d%Y-%H%M%S")/
|
||||
mkdir $HOMEDIR/FIOLog
|
||||
LOGDIR="${HOMEDIR}/FIOLog"
|
||||
DISTRO=`grep -ihs "buntu\|Suse\|Fedora\|Debian\|CentOS\|Red Hat Enterprise Linux" /etc/{issue,*release,*version}`
|
||||
if [[ $DISTRO =~ "SUSE Linux Enterprise Server 12" ]];
|
||||
then
|
||||
mdVolume="/dev/md/mdauto0"
|
||||
else
|
||||
mdVolume="/dev/md0"
|
||||
fi
|
||||
mountDir="/data"
|
||||
cd ${HOMEDIR}
|
||||
InstallFIO
|
||||
|
||||
#Start NFS Server
|
||||
ssh root@nfs-server-vm "apt update"
|
||||
ssh root@nfs-server-vm "apt install -y nfs-kernel-server"
|
||||
ssh root@nfs-server-vm "echo '/data nfs-client-vm(rw,sync,no_root_squash)' >> /etc/exports"
|
||||
ssh root@nfs-server-vm "service nfs-kernel-server restart"
|
||||
#Mount NFS Directory.
|
||||
mkdir -p ${mountDir}
|
||||
mount -t nfs -o proto=${nfsprotocol},vers=3 nfs-server-vm:${mountDir} ${mountDir}
|
||||
if [ $? -eq 0 ]; then
|
||||
LogMsg "*********INFO: Starting test execution*********"
|
||||
cd ${mountDir}
|
||||
mkdir sampleDIR
|
||||
RunFIO
|
||||
LogMsg "*********INFO: Script execution reach END. Completed !!!*********"
|
||||
else
|
||||
LogMsg "Failed to mount NSF directory."
|
||||
fi
|
||||
#Run test from here
|
||||
|
||||
else
|
||||
LogMsg "Error: Unable to Create RAID on NSF server"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
@ -8,9 +8,25 @@ if ($isDeployed)
|
|||
{
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
RemoteCopy -uploadTo $allVMData.PublicIP -port $allVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
|
||||
$constantsFile = ".\Temp\xfstests-config.config"
|
||||
LogMsg "Generating $constantsFile ..."
|
||||
Set-Content -Value "" -Path $constantsFile -NoNewline
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
if ( $param -imatch "FSTYP=" )
|
||||
{
|
||||
$TestFileSystem = ($param.Replace("FSTYP=",""))
|
||||
Add-Content -Value "[$TestFileSystem]" -Path $constantsFile
|
||||
LogMsg "[$TestFileSystem] added to constants.sh"
|
||||
}
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
LogMsg "$param added to constants.sh"
|
||||
}
|
||||
LogMsg "$constantsFile created successfully..."
|
||||
RemoteCopy -uploadTo $allVMData.PublicIP -port $allVMData.SSHPort -files $constantsFile -username "root" -password $password -upload
|
||||
|
||||
$out = RunLinuxCmd -ip $allVMData.PublicIP -port $allVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh"
|
||||
$TestFileSystem = $currentTestData.TestParameters.param.Replace("TestFileSystem=","")
|
||||
$testJob = RunLinuxCmd -ip $allVMData.PublicIP -port $allVMData.SSHPort -username "root" -password $password -command "/root/perf_xfstesting.sh -TestFileSystem $TestFileSystem" -RunInBackground
|
||||
#endregion
|
||||
|
||||
|
|
|
@ -0,0 +1,282 @@
|
|||
$result = ""
|
||||
$testResult = ""
|
||||
$resultArr = @()
|
||||
|
||||
$isDeployed = DeployVMS -setupType $currentTestData.setupType -Distro $Distro -xmlConfig $xmlConfig
|
||||
if ($isDeployed)
|
||||
{
|
||||
try
|
||||
{
|
||||
$noClient = $true
|
||||
$noServer = $true
|
||||
foreach ( $vmData in $allVMData )
|
||||
{
|
||||
if ( $vmData.RoleName -imatch "client" )
|
||||
{
|
||||
$clientVMData = $vmData
|
||||
$noClient = $false
|
||||
}
|
||||
elseif ( $vmData.RoleName -imatch "server" )
|
||||
{
|
||||
$noServer = $fase
|
||||
$serverVMData = $vmData
|
||||
}
|
||||
}
|
||||
if ( $noClient )
|
||||
{
|
||||
Throw "No any master VM defined. Be sure that, Client VM role name matches with the pattern `"*master*`". Aborting Test."
|
||||
}
|
||||
if ( $noServer )
|
||||
{
|
||||
Throw "No any slave VM defined. Be sure that, Server machine role names matches with pattern `"*slave*`" Aborting Test."
|
||||
}
|
||||
#region CONFIGURE VM FOR TERASORT TEST
|
||||
LogMsg "NFS Client details :"
|
||||
LogMsg " RoleName : $($clientVMData.RoleName)"
|
||||
LogMsg " Public IP : $($clientVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($clientVMData.SSHPort)"
|
||||
LogMsg "NSF SERVER details :"
|
||||
LogMsg " RoleName : $($serverVMData.RoleName)"
|
||||
LogMsg " Public IP : $($serverVMData.PublicIP)"
|
||||
LogMsg " SSH Port : $($serverVMData.SSHPort)"
|
||||
|
||||
$testVMData = $clientVMData
|
||||
|
||||
ProvisionVMsForLisa -allVMData $allVMData -installPackagesOnRoleNames "none"
|
||||
|
||||
LogMsg "Generating constansts.sh ..."
|
||||
$constantsFile = "$LogDir\constants.sh"
|
||||
Set-Content -Value "#Generated by Azure Automation." -Path $constantsFile
|
||||
foreach ( $param in $currentTestData.TestParameters.param)
|
||||
{
|
||||
Add-Content -Value "$param" -Path $constantsFile
|
||||
LogMsg "$param added to constants.sh"
|
||||
if ( $param -imatch "startThread" )
|
||||
{
|
||||
$startThread = [int]($param.Replace("startThread=",""))
|
||||
}
|
||||
if ( $param -imatch "maxThread" )
|
||||
{
|
||||
$maxThread = [int]($param.Replace("maxThread=",""))
|
||||
}
|
||||
}
|
||||
LogMsg "constanst.sh created successfully..."
|
||||
#endregion
|
||||
|
||||
#region EXECUTE TEST
|
||||
$myString = @"
|
||||
chmod +x perf_fio_nfs.sh
|
||||
./perf_fio_nfs.sh &> fioConsoleLogs.txt
|
||||
. azuremodules.sh
|
||||
collect_VM_properties
|
||||
"@
|
||||
|
||||
$myString2 = @"
|
||||
chmod +x *.sh
|
||||
cp fio_jason_parser.sh gawk JSON.awk /root/FIOLog/jsonLog/
|
||||
cd /root/FIOLog/jsonLog/
|
||||
./fio_jason_parser.sh
|
||||
cp perf_fio.csv /root
|
||||
chmod 666 /root/perf_fio.csv
|
||||
"@
|
||||
Set-Content "$LogDir\StartFioTest.sh" $myString
|
||||
Set-Content "$LogDir\ParseFioTestLogs.sh" $myString2
|
||||
RemoteCopy -uploadTo $testVMData.PublicIP -port $testVMData.SSHPort -files $currentTestData.files -username "root" -password $password -upload
|
||||
|
||||
RemoteCopy -uploadTo $testVMData.PublicIP -port $testVMData.SSHPort -files ".\$constantsFile,.\$LogDir\StartFioTest.sh,.\$LogDir\ParseFioTestLogs.sh" -username "root" -password $password -upload
|
||||
|
||||
$out = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "chmod +x *.sh" -runAsSudo
|
||||
$testJob = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "./StartFioTest.sh" -RunInBackground -runAsSudo
|
||||
|
||||
#endregion
|
||||
|
||||
#region MONITOR TEST
|
||||
while ( (Get-Job -Id $testJob).State -eq "Running" )
|
||||
{
|
||||
$currentStatus = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "tail -1 runlog.txt"-runAsSudo
|
||||
LogMsg "Current Test Staus : $currentStatus"
|
||||
WaitFor -seconds 20
|
||||
}
|
||||
|
||||
$finalStatus = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "cat state.txt"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "FIOTest-*.tar.gz"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "VM_properties.csv"
|
||||
|
||||
$testSummary = $null
|
||||
|
||||
#endregion
|
||||
#>
|
||||
$finalStatus = "TestCompleted"
|
||||
if ( $finalStatus -imatch "TestFailed")
|
||||
{
|
||||
LogErr "Test failed. Last known status : $currentStatus."
|
||||
$testResult = "FAIL"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestAborted")
|
||||
{
|
||||
LogErr "Test Aborted. Last known status : $currentStatus."
|
||||
$testResult = "ABORTED"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestCompleted")
|
||||
{
|
||||
$out = RunLinuxCmd -ip $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -command "/root/ParseFioTestLogs.sh"
|
||||
RemoteCopy -downloadFrom $testVMData.PublicIP -port $testVMData.SSHPort -username "root" -password $password -download -downloadTo $LogDir -files "perf_fio.csv"
|
||||
LogMsg "Test Completed."
|
||||
$testResult = "PASS"
|
||||
}
|
||||
elseif ( $finalStatus -imatch "TestRunning")
|
||||
{
|
||||
LogMsg "Powershell backgroud job for test is completed but VM is reporting that test is still running. Please check $LogDir\zkConsoleLogs.txt"
|
||||
LogMsg "Contests of summary.log : $testSummary"
|
||||
$testResult = "PASS"
|
||||
}
|
||||
LogMsg "Test result : $testResult"
|
||||
LogMsg "Test Completed"
|
||||
$resultSummary += CreateResultSummary -testResult $testResult -metaData "" -checkValues "PASS,FAIL,ABORTED" -testName $currentTestData.testName
|
||||
|
||||
try
|
||||
{
|
||||
foreach($line in (Get-Content "$LogDir\perf_fio.csv"))
|
||||
{
|
||||
if ( $line -imatch "Max IOPS of each mode" )
|
||||
{
|
||||
$maxIOPSforMode = $true
|
||||
$maxIOPSforBlockSize = $false
|
||||
$fioData = $false
|
||||
}
|
||||
if ( $line -imatch "Max IOPS of each BlockSize" )
|
||||
{
|
||||
$maxIOPSforMode = $false
|
||||
$maxIOPSforBlockSize = $true
|
||||
$fioData = $false
|
||||
}
|
||||
if ( $line -imatch "Iteration,TestType,BlockSize" )
|
||||
{
|
||||
$maxIOPSforMode = $false
|
||||
$maxIOPSforBlockSize = $false
|
||||
$fioData = $true
|
||||
}
|
||||
if ( $maxIOPSforMode )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\maxIOPSforMode.csv
|
||||
}
|
||||
if ( $maxIOPSforBlockSize )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\maxIOPSforBlockSize.csv
|
||||
}
|
||||
if ( $fioData )
|
||||
{
|
||||
Add-Content -Value $line -Path $LogDir\fioData.csv
|
||||
}
|
||||
}
|
||||
$maxIOPSforModeCsv = Import-Csv -Path $LogDir\maxIOPSforMode.csv
|
||||
$maxIOPSforBlockSizeCsv = Import-Csv -Path $LogDir\maxIOPSforBlockSize.csv
|
||||
$fioDataCsv = Import-Csv -Path $LogDir\fioData.csv
|
||||
|
||||
|
||||
LogMsg "Uploading the test results.."
|
||||
$dataSource = $xmlConfig.config.Azure.database.server
|
||||
$DBuser = $xmlConfig.config.Azure.database.user
|
||||
$DBpassword = $xmlConfig.config.Azure.database.password
|
||||
$database = $xmlConfig.config.Azure.database.dbname
|
||||
$dataTableName = $xmlConfig.config.Azure.database.dbtable
|
||||
$TestCaseName = $xmlConfig.config.Azure.database.testTag
|
||||
if ($dataSource -And $DBuser -And $DBpassword -And $database -And $dataTableName)
|
||||
{
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
if ( $UseAzureResourceManager )
|
||||
{
|
||||
$HostType = "Azure-ARM"
|
||||
}
|
||||
else
|
||||
{
|
||||
$HostType = "Azure"
|
||||
}
|
||||
|
||||
$HostBy = ($xmlConfig.config.Azure.General.Location).Replace('"','')
|
||||
$HostOS = cat "$LogDir\VM_properties.csv" | Select-String "Host Version"| %{$_ -replace ",Host Version,",""}
|
||||
$GuestOSType = "Linux"
|
||||
$GuestDistro = cat "$LogDir\VM_properties.csv" | Select-String "OS type"| %{$_ -replace ",OS type,",""}
|
||||
$GuestSize = $testVMData.InstanceSize
|
||||
$KernelVersion = cat "$LogDir\VM_properties.csv" | Select-String "Kernel version"| %{$_ -replace ",Kernel version,",""}
|
||||
|
||||
$connectionString = "Server=$dataSource;uid=$DBuser; pwd=$DBpassword;Database=$database;Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;"
|
||||
|
||||
$SQLQuery = "INSERT INTO $dataTableName (TestCaseName,TestDate,HostType,HostBy,HostOS,GuestOSType,GuestDistro,GuestSize,KernelVersion,DiskSetup,BlockSize_KB,QDepth,seq_read_iops,seq_read_lat_usec,rand_read_iops,rand_read_lat_usec,seq_write_iops,seq_write_lat_usec,rand_write_iops,rand_write_lat_usec) VALUES "
|
||||
|
||||
for ( $QDepth = $startThread; $QDepth -le $maxThread; $QDepth *= 2 )
|
||||
{
|
||||
$seq_read_iops = ($fioDataCsv | where { $_.TestType -eq "read" -and $_.Threads -eq "$QDepth"} | Select ReadIOPS).ReadIOPS
|
||||
$seq_read_lat_usec = ($fioDataCsv | where { $_.TestType -eq "read" -and $_.Threads -eq "$QDepth"} | Select MaxOfReadMeanLatency).MaxOfReadMeanLatency
|
||||
|
||||
$rand_read_iops = ($fioDataCsv | where { $_.TestType -eq "randread" -and $_.Threads -eq "$QDepth"} | Select ReadIOPS).ReadIOPS
|
||||
$rand_read_lat_usec = ($fioDataCsv | where { $_.TestType -eq "randread" -and $_.Threads -eq "$QDepth"} | Select MaxOfReadMeanLatency).MaxOfReadMeanLatency
|
||||
|
||||
$seq_write_iops = ($fioDataCsv | where { $_.TestType -eq "write" -and $_.Threads -eq "$QDepth"} | Select WriteIOPS).WriteIOPS
|
||||
$seq_write_lat_usec = ($fioDataCsv | where { $_.TestType -eq "write" -and $_.Threads -eq "$QDepth"} | Select MaxOfWriteMeanLatency).MaxOfWriteMeanLatency
|
||||
|
||||
$rand_write_iops = ($fioDataCsv | where { $_.TestType -eq "randwrite" -and $_.Threads -eq "$QDepth"} | Select WriteIOPS).WriteIOPS
|
||||
$rand_write_lat_usec= ($fioDataCsv | where { $_.TestType -eq "randwrite" -and $_.Threads -eq "$QDepth"} | Select MaxOfWriteMeanLatency).MaxOfWriteMeanLatency
|
||||
|
||||
$BlockSize_KB= (($fioDataCsv | where { $_.Threads -eq "$QDepth"} | Select BlockSize)[0].BlockSize).Replace("K","")
|
||||
|
||||
$SQLQuery += "('$TestCaseName','$(Get-Date -Format yyyy-MM-dd)','$HostType','$HostBy','$HostOS','$GuestOSType','$GuestDistro','$GuestSize','$KernelVersion','RAID0:12xP30','$BlockSize_KB','$QDepth','$seq_read_iops','$seq_read_lat_usec','$rand_read_iops','$rand_read_lat_usec','$seq_write_iops','$seq_write_lat_usec','$rand_write_iops','$rand_write_lat_usec'),"
|
||||
LogMsg "Collected performace data for $QDepth QDepth."
|
||||
}
|
||||
|
||||
$SQLQuery = $SQLQuery.TrimEnd(',')
|
||||
Write-Host $SQLQuery
|
||||
$connection = New-Object System.Data.SqlClient.SqlConnection
|
||||
$connection.ConnectionString = $connectionString
|
||||
$connection.Open()
|
||||
|
||||
$command = $connection.CreateCommand()
|
||||
$command.CommandText = $SQLQuery
|
||||
|
||||
$result = $command.executenonquery()
|
||||
$connection.Close()
|
||||
LogMsg "Uploading the test results done!!"
|
||||
}
|
||||
else
|
||||
{
|
||||
LogMsg "Invalid database details. Failed to upload result to database!"
|
||||
}
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogErr "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
catch
|
||||
{
|
||||
$ErrorMessage = $_.Exception.Message
|
||||
LogMsg "EXCEPTION : $ErrorMessage"
|
||||
}
|
||||
Finally
|
||||
{
|
||||
$metaData = "NTTTCP RESULT"
|
||||
if (!$testResult)
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
}
|
||||
$resultArr += $testResult
|
||||
}
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
$testResult = "Aborted"
|
||||
$resultArr += $testResult
|
||||
}
|
||||
|
||||
$result = GetFinalResultHeader -resultarr $resultArr
|
||||
|
||||
#Clean up the setup
|
||||
DoTestCleanUp -result $result -testName $currentTestData.testName -deployedServices $isDeployed -ResourceGroups $isDeployed
|
||||
|
||||
#Return the result and summery to the test suite script..
|
||||
return $result, $resultSummary
|
Двоичный файл не отображается.
|
@ -2,7 +2,7 @@
|
|||
<test>
|
||||
<TestName>FILE-SYSTEM-VERIFICATION-TESTS-CIFS</TestName>
|
||||
<PowershellScript>FILE-SYSTEM-VERIFICATION-TESTS.ps1</PowershellScript>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh,.\Testscripts\Linux\xfstests-config.config</files>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh</files>
|
||||
<setupType>M1Disk1</setupType>
|
||||
<TestParameters>
|
||||
<param>TestFileSystem=cifs</param>
|
||||
|
@ -16,10 +16,13 @@
|
|||
<test>
|
||||
<TestName>FILE-SYSTEM-VERIFICATION-TESTS-XFS</TestName>
|
||||
<PowershellScript>FILE-SYSTEM-VERIFICATION-TESTS.ps1</PowershellScript>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh,.\Testscripts\Linux\xfstests-config.config</files>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh</files>
|
||||
<setupType>M1Disk1</setupType>
|
||||
<TestParameters>
|
||||
<param>TestFileSystem=xfs</param>
|
||||
<param>FSTYP=xfs</param>
|
||||
<param>TEST_DEV=/dev/sdc</param>
|
||||
<param>TEST_DIR=/root/xfs</param>
|
||||
<param>TEST_FS_MOUNT_OPTS='-o nobarrier'</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Community</Category>
|
||||
|
@ -30,10 +33,13 @@
|
|||
<test>
|
||||
<TestName>FILE-SYSTEM-VERIFICATION-TESTS-EXT4</TestName>
|
||||
<PowershellScript>FILE-SYSTEM-VERIFICATION-TESTS.ps1</PowershellScript>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh,.\Testscripts\Linux\xfstests-config.config</files>
|
||||
<files>.\Testscripts\Linux\perf_xfstesting.sh</files>
|
||||
<setupType>M1Disk1</setupType>
|
||||
<TestParameters>
|
||||
<param>TestFileSystem=ext4</param>
|
||||
<param>FSTYP=ext4</param>
|
||||
<param>TEST_DEV=/dev/sdc</param>
|
||||
<param>TEST_DIR=/root/ext4</param>
|
||||
<param>TEST_FS_MOUNT_OPTS='-o nobarrier'</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Community</Category>
|
||||
|
|
|
@ -153,7 +153,7 @@
|
|||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>startIO=1024</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=1024</param>
|
||||
|
@ -164,5 +164,107 @@
|
|||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_002</TestID>
|
||||
</test>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-OVER-NFS-Synthetic-TCP-4K</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-OVER-NFS.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12NFS</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\fio_jason_parser.sh,.\Testscripts\Linux\JSON.awk,.\Testscripts\Linux\perf_fio_nfs.sh,.\Testscripts\Linux\CreateRaid.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=4</param>
|
||||
<param>fileSize=1023G</param>
|
||||
<param>nfsprotocol=tcp</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_003</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-OVER-NFS-Synthetic-UDP-4K</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-OVER-NFS.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12NFS</setupType>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\fio_jason_parser.sh,.\Testscripts\Linux\JSON.awk,.\Testscripts\Linux\perf_fio_nfs.sh,.\Testscripts\Linux\CreateRaid.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=4</param>
|
||||
<param>fileSize=1023G</param>
|
||||
<param>nfsprotocol=udp</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_003</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-OVER-NFS-SRIOV-TCP-4K</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-OVER-NFS.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12NFS</setupType>
|
||||
<AdditionalHWConfig>
|
||||
<Networking>SRIOV</Networking>
|
||||
</AdditionalHWConfig>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\fio_jason_parser.sh,.\Testscripts\Linux\JSON.awk,.\Testscripts\Linux\perf_fio_nfs.sh,.\Testscripts\Linux\CreateRaid.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=4</param>
|
||||
<param>fileSize=1023G</param>
|
||||
<param>nfsprotocol=tcp</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_003</TestID>
|
||||
</test>
|
||||
<test>
|
||||
<testName>PERF-STORAGE-OVER-NFS-SRIOV-UDP-4K</testName>
|
||||
<testScript></testScript>
|
||||
<PowershellScript>PERF-STORAGE-OVER-NFS.ps1</PowershellScript>
|
||||
<setupType>DS14DISK12NFS</setupType>
|
||||
<AdditionalHWConfig>
|
||||
<Networking>SRIOV</Networking>
|
||||
</AdditionalHWConfig>
|
||||
<SubtestValues>This-tag-will-be-removed</SubtestValues>
|
||||
<files>.\Testscripts\Linux\azuremodules.sh,.\Testscripts\Linux\fio_jason_parser.sh,.\Testscripts\Linux\JSON.awk,.\Testscripts\Linux\perf_fio_nfs.sh,.\Testscripts\Linux\CreateRaid.sh</files>
|
||||
<TestParameters>
|
||||
<param>modes='randread randwrite read write'</param>
|
||||
<param>startThread=1</param>
|
||||
<param>maxThread=1024</param>
|
||||
<param>startIO=4</param>
|
||||
<param>numjobs=1</param>
|
||||
<param>ioruntime=300</param>
|
||||
<param>maxIO=4</param>
|
||||
<param>fileSize=1023G</param>
|
||||
<param>nfsprotocol=tcp</param>
|
||||
</TestParameters>
|
||||
<Platform>Azure</Platform>
|
||||
<Category>Performance</Category>
|
||||
<Area>Storage</Area>
|
||||
<Tags>hv_storvsc,storage</Tags>
|
||||
<TestID>Perf_Storage_003</TestID>
|
||||
</test>
|
||||
</TestCases>
|
|
@ -314,5 +314,95 @@
|
|||
</DataDisk>
|
||||
</VirtualMachine>
|
||||
</ResourceGroup>
|
||||
</DS14DISK12>
|
||||
</DS14DISK12>
|
||||
<DS14DISK12NFS>
|
||||
<isDeployed>NO</isDeployed>
|
||||
<ResourceGroup>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_DS14_v2</InstanceSize>
|
||||
<ARMInstanceSize>Standard_DS14_v2</ARMInstanceSize>
|
||||
<RoleName>nfs-server-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1110</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk>
|
||||
<LUN>0</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>1</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>2</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>3</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>4</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>5</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>6</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>7</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>8</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>9</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>10</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
<DataDisk>
|
||||
<LUN>11</LUN>
|
||||
<DiskSizeInGB>1023</DiskSizeInGB>
|
||||
<HostCaching>None</HostCaching>
|
||||
</DataDisk>
|
||||
</VirtualMachine>
|
||||
<VirtualMachine>
|
||||
<state></state>
|
||||
<InstanceSize>Standard_DS14_v2</InstanceSize>
|
||||
<ARMInstanceSize>Standard_DS14_v2</ARMInstanceSize>
|
||||
<RoleName>nfs-client-vm</RoleName>
|
||||
<EndPoints>
|
||||
<Name>SSH</Name>
|
||||
<Protocol>tcp</Protocol>
|
||||
<LocalPort>22</LocalPort>
|
||||
<PublicPort>1112</PublicPort>
|
||||
</EndPoints>
|
||||
<DataDisk></DataDisk>
|
||||
</VirtualMachine>
|
||||
</ResourceGroup>
|
||||
</DS14DISK12NFS>
|
||||
</TestSetup>
|
||||
|
|
Загрузка…
Ссылка в новой задаче