diff --git a/test/lib/helpers.py b/test/lib/helpers.py index 5be06ca2..01cd316d 100644 --- a/test/lib/helpers.py +++ b/test/lib/helpers.py @@ -12,16 +12,16 @@ def wait_for_op(op, timeout_sec=60): op is an AzureOperationPoller object. """ - log = logging.getLogger('wait_for_op') + log = logging.getLogger("wait_for_op") time_start = time() while not op.done(): op.wait(timeout=timeout_sec) - log.info('>> operation status: {0} ({1} sec)'.format( + log.info(">> operation status: {0} ({1} sec)".format( op.status(), int(time() - time_start))) result = op.result() if result: - log.info('>> operation result: {}'.format(result)) - log.info('>> result.properties: {}'.format(result.properties)) + log.info(">> operation result: {}".format(result)) + log.info(">> result.properties: {}".format(result.properties)) return result @@ -30,8 +30,9 @@ def create_ssh_client(username, hostname, port=22, password=None): ssh_client = paramiko.SSHClient() ssh_client.load_system_host_keys() ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - ssh_client.connect(username=username, hostname=hostname, port=port, - password=password) + ssh_client.connect( + username=username, hostname=hostname, port=port, password=password + ) return ssh_client @@ -43,28 +44,29 @@ def run_ssh_commands(ssh_client, commands): Raises an Exception if any command fails (i.e., non-zero exit code). """ - log = logging.getLogger('run_ssh_commands') + log = logging.getLogger("run_ssh_commands") for cmd in commands: cmd = cmd.strip() if not cmd: # do not run empty "commands" continue - log.debug('command to run: {}'.format(cmd)) + log.debug("command to run: {}".format(cmd)) cmd_stdin, cmd_stdout, cmd_stderr = ssh_client.exec_command(cmd) cmd_rc = cmd_stdout.channel.recv_exit_status() - log.debug('command exit code: {}'.format(cmd_rc)) + log.debug("command exit code: {}".format(cmd_rc)) - cmd_stdout = ''.join(cmd_stdout.readlines()) - log.debug('command output (stdout): {}'.format(cmd_stdout)) + cmd_stdout = "".join(cmd_stdout.readlines()) + log.debug("command output (stdout): {}".format(cmd_stdout)) - cmd_stderr = ''.join(cmd_stderr.readlines()) - log.debug('command output (stderr): {}'.format(cmd_stderr)) + cmd_stderr = "".join(cmd_stderr.readlines()) + log.debug("command output (stderr): {}".format(cmd_stderr)) if cmd_rc: raise Exception( - '"{}" failed with exit code {}.\n\tSTDOUT: {}\n\tSTDERR: {}' - .format(cmd, cmd_rc, cmd_stdout, cmd_stderr) + '"{}" failed with exit code {}.\n\tSTDOUT: {}\n\tSTDERR: {}'.format( + cmd, cmd_rc, cmd_stdout, cmd_stderr + ) ) @@ -83,9 +85,9 @@ def split_ip_range(vserver_ips): ip_hi = ip2.split(".")[-1] ip_prefix = ".".join(ip1_split[:-1]) + "." - vserver_list = [ip_prefix + str(n) for n in range(int(ip_low), int(ip_hi)+1)] + vserver_list = [ip_prefix + str(n) for n in range(int(ip_low), int(ip_hi) + 1)] return vserver_list -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/test/lib/pytest_fixtures.py b/test/lib/pytest_fixtures.py index 5e4a8d16..b2cc784e 100644 --- a/test/lib/pytest_fixtures.py +++ b/test/lib/pytest_fixtures.py @@ -12,43 +12,42 @@ from lib import helpers # FIXTURES #################################################################### -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def group_vars(): """ Instantiates an ArmTemplateDeploy object, creates the resource group as test-group setup, and deletes the resource group as test-group teardown. """ - log = logging.getLogger('group_vars') + log = logging.getLogger("group_vars") vars = {} - if 'VFXT_TEST_VARS_FILE' in os.environ and \ - os.path.isfile(os.environ['VFXT_TEST_VARS_FILE']): - log.debug('Loading into vars from {} (VFXT_TEST_VARS_FILE)'.format( - os.environ['VFXT_TEST_VARS_FILE'])) - with open(os.environ['VFXT_TEST_VARS_FILE'], 'r') as vtvf: + if "VFXT_TEST_VARS_FILE" in os.environ and \ + os.path.isfile(os.environ["VFXT_TEST_VARS_FILE"]): + log.debug("Loading into vars from {} (VFXT_TEST_VARS_FILE)".format( + os.environ["VFXT_TEST_VARS_FILE"])) + with open(os.environ["VFXT_TEST_VARS_FILE"], "r") as vtvf: vars = {**vars, **json.load(vtvf)} - log.debug('Loaded the following JSON into vars: {}'.format( - json.dumps(vars, sort_keys=True, indent=4))) + log.debug("Loaded the following JSON into vars: {}".format( + json.dumps(vars, sort_keys=True, indent=4))) - vars['atd_obj'] = ArmTemplateDeploy(_fields=vars.pop('atd_obj', {})) - rg = vars['atd_obj'].create_resource_group() - log.info('Created Resource Group: {}'.format(rg)) + vars["atd_obj"] = ArmTemplateDeploy(_fields=vars.pop("atd_obj", {})) + rg = vars["atd_obj"].create_resource_group() + log.info("Created Resource Group: {}".format(rg)) yield vars - vars['atd_obj'] = json.loads(vars['atd_obj'].serialize()) - if 'VFXT_TEST_VARS_FILE' in os.environ: - log.debug('vars: {}'.format( - json.dumps(vars, sort_keys=True, indent=4))) - log.debug('Saving vars to {} (VFXT_TEST_VARS_FILE)'.format( - os.environ['VFXT_TEST_VARS_FILE'])) - with open(os.environ['VFXT_TEST_VARS_FILE'], 'w') as vtvf: + vars["atd_obj"] = json.loads(vars["atd_obj"].serialize()) + if "VFXT_TEST_VARS_FILE" in os.environ: + log.debug("vars: {}".format(json.dumps(vars, sort_keys=True, indent=4))) + log.debug("Saving vars to {} (VFXT_TEST_VARS_FILE)".format( + os.environ["VFXT_TEST_VARS_FILE"])) + with open(os.environ["VFXT_TEST_VARS_FILE"], "w") as vtvf: json.dump(vars, vtvf) @pytest.fixture() def ssh_client(group_vars): - client = helpers.create_ssh_client(group_vars['controller_user'], - group_vars['controller_ip']) + client = helpers.create_ssh_client(group_vars["controller_user"], + group_vars["controller_ip"]) yield client client.close() @@ -62,11 +61,11 @@ def scp_client(ssh_client): @pytest.fixture() def vserver_ip_list(group_vars): - if 'vserver_ip_list' not in group_vars: - vserver_ips = group_vars['deploy_outputs']["vserveR_IPS"]["value"] - group_vars['vserver_ip_list'] = helpers.split_ip_range(vserver_ips) - return group_vars['vserver_ip_list'] + if "vserver_ip_list" not in group_vars: + vserver_ips = group_vars["deploy_outputs"]["vserveR_IPS"]["value"] + group_vars["vserver_ip_list"] = helpers.split_ip_range(vserver_ips) + return group_vars["vserver_ip_list"] -if __name__ == '__main__': +if __name__ == "__main__": pytest.main() diff --git a/test/test_avere_template_deploy.py b/test/test_avere_template_deploy.py index e5e66183..2e13e8e2 100755 --- a/test/test_avere_template_deploy.py +++ b/test/test_avere_template_deploy.py @@ -17,82 +17,81 @@ from lib.pytest_fixtures import (group_vars, scp_client, ssh_client, # TEST CASES ################################################################## class TestDeployment: - def test_deploy_template(self, group_vars): - log = logging.getLogger('test_deploy_template') - td = group_vars['atd_obj'] - with open('{}/src/vfxt/azuredeploy-auto.json'.format( - os.environ['BUILD_SOURCESDIRECTORY'])) as tfile: + log = logging.getLogger("test_deploy_template") + td = group_vars["atd_obj"] + with open("{}/src/vfxt/azuredeploy-auto.json".format( + os.environ["BUILD_SOURCESDIRECTORY"])) as tfile: td.template = json.load(tfile) - with open(os.path.expanduser(r'~/.ssh/id_rsa.pub'), 'r') as ssh_pub_f: + with open(os.path.expanduser(r"~/.ssh/id_rsa.pub"), "r") as ssh_pub_f: ssh_pub_key = ssh_pub_f.read() td.deploy_params = { - 'virtualNetworkResourceGroup': td.resource_group, - 'virtualNetworkName': td.deploy_id + '-vnet', - 'virtualNetworkSubnetName': td.deploy_id + '-subnet', - 'avereBackedStorageAccountName': td.deploy_id + 'sa', - 'controllerName': td.deploy_id + '-con', - 'controllerAdminUsername': 'azureuser', - 'controllerAuthenticationType': 'sshPublicKey', - 'controllerSSHKeyData': ssh_pub_key, - 'adminPassword': os.environ['AVERE_ADMIN_PW'], - 'controllerPassword': os.environ['AVERE_CONTROLLER_PW'] + "virtualNetworkResourceGroup": td.resource_group, + "virtualNetworkName": td.deploy_id + "-vnet", + "virtualNetworkSubnetName": td.deploy_id + "-subnet", + "avereBackedStorageAccountName": td.deploy_id + "sa", + "controllerName": td.deploy_id + "-con", + "controllerAdminUsername": "azureuser", + "controllerAuthenticationType": "sshPublicKey", + "controllerSSHKeyData": ssh_pub_key, + "adminPassword": os.environ["AVERE_ADMIN_PW"], + "controllerPassword": os.environ["AVERE_CONTROLLER_PW"], } - group_vars['controller_name'] = td.deploy_params['controllerName'] - group_vars['controller_user'] = td.deploy_params['controllerAdminUsername'] + group_vars["controller_name"] = td.deploy_params["controllerName"] + group_vars["controller_user"] = td.deploy_params["controllerAdminUsername"] - log.debug('Generated deploy parameters: \n{}'.format( - json.dumps(td.deploy_params, indent=4))) - td.deploy_name = 'test_deploy_template' + log.debug("Generated deploy parameters: \n{}".format( + json.dumps(td.deploy_params, indent=4))) + td.deploy_name = "test_deploy_template" try: deploy_result = helpers.wait_for_op(td.deploy()) - group_vars['deploy_outputs'] = deploy_result.properties.outputs + group_vars["deploy_outputs"] = deploy_result.properties.outputs finally: - group_vars['controller_ip'] = td.nm_client.public_ip_addresses.get( - td.resource_group, - 'publicip-' + group_vars['controller_name']).ip_address + group_vars["controller_ip"] = td.nm_client.public_ip_addresses.get( + td.resource_group, "publicip-" + group_vars["controller_name"] + ).ip_address def test_get_vfxt_log(self, group_vars, scp_client): - log = logging.getLogger('test_get_vfxt_log') - log.info('Getting vfxt.log from controller: {}'.format( - group_vars['controller_name'])) - scp_client.get(r'~/vfxt.log', - r'./vfxt.' + group_vars['controller_name'] + '.log') + log = logging.getLogger("test_get_vfxt_log") + log.info("Getting vfxt.log from controller: {}".format( + group_vars["controller_name"])) + scp_client.get(r"~/vfxt.log", + r"./vfxt." + group_vars["controller_name"] + ".log") def test_mount_nodes_on_controller(self, vserver_ip_list, ssh_client): commands = """ sudo apt-get update sudo apt-get install nfs-common - """.split('\n') + """.split("\n") for i, vs_ip in enumerate(vserver_ip_list): - commands.append('sudo mkdir -p /nfs/node{}'.format(i)) - commands.append('sudo chown nobody:nogroup /nfs/node{}'.format(i)) + commands.append("sudo mkdir -p /nfs/node{}".format(i)) + commands.append("sudo chown nobody:nogroup /nfs/node{}".format(i)) fstab_line = "{}:/msazure /nfs/node{} nfs ".format(vs_ip, i) + \ - "hard,nointr,proto=tcp,mountproto=tcp,retry=30 0 0" - commands.append('sudo sh -c \'echo "{}" >> /etc/fstab\''.format( + "hard,nointr,proto=tcp,mountproto=tcp,retry=30 0 0" + commands.append("sudo sh -c 'echo \"{}\" >> /etc/fstab'".format( fstab_line)) - commands.append('sudo mount -a') + commands.append("sudo mount -a") helpers.run_ssh_commands(ssh_client, commands) def test_ping_nodes(self, vserver_ip_list, ssh_client): commands = [] for vs_ip in vserver_ip_list: - commands.append('ping -c 3 {}'.format(vs_ip)) + commands.append("ping -c 3 {}".format(vs_ip)) helpers.run_ssh_commands(ssh_client, commands) def test_node_basic_fileops(self, group_vars, ssh_client, scp_client): - script_name = 'check_node_basic_fileops.sh' - scp_client.put('{0}/test/{1}'.format( - os.environ['BUILD_SOURCESDIRECTORY'], script_name), - r'~/.') + script_name = "check_node_basic_fileops.sh" + scp_client.put("{0}/test/{1}".format( + os.environ["BUILD_SOURCESDIRECTORY"], script_name), + r"~/.") commands = """ chmod +x {0} ./{0} - """.format(script_name).split('\n') + """.format(script_name).split("\n") helpers.run_ssh_commands(ssh_client, commands) -if __name__ == '__main__': +if __name__ == "__main__": pytest.main() diff --git a/test/test_vdbench.py b/test/test_vdbench.py index 0bd4ef69..9ed46601 100644 --- a/test/test_vdbench.py +++ b/test/test_vdbench.py @@ -18,7 +18,6 @@ from sshtunnel import SSHTunnelForwarder class VDBench: - def test_vdbench_setup(self, group_vars, ssh_client): # TODO: Ensure nodes are mounted on controller. (fixture?) commands = """ @@ -29,60 +28,62 @@ class VDBench: sudo curl --retry 5 --retry-delay 5 -o /nfs/node0/bootstrap/vdbenchVerify.sh https://raw.githubusercontent.com/Azure/Avere/master/src/clientapps/vdbench/vdbenchVerify.sh sudo chmod +x /nfs/node0/bootstrap/vdbenchVerify.sh /nfs/node0/bootstrap/vdbenchVerify.sh - """.split('\n') + """.split("\n") helpers.run_ssh_commands(ssh_client, commands) def test_vdbench_deploy(self, group_vars, vserver_ip_list): - td = group_vars['atd_obj'] - with open(os.path.expanduser(r'~/.ssh/id_rsa.pub'), 'r') as ssh_pub_f: + td = group_vars["atd_obj"] + with open(os.path.expanduser(r"~/.ssh/id_rsa.pub"), "r") as ssh_pub_f: ssh_pub_key = ssh_pub_f.read() - with open('{}/src/client/vmas/azuredeploy.json'.format( - os.environ['BUILD_SOURCESDIRECTORY'])) as tfile: + with open("{}/src/client/vmas/azuredeploy.json".format( + os.environ["BUILD_SOURCESDIRECTORY"])) as tfile: td.template = json.load(tfile) orig_params = td.deploy_params.copy() td.deploy_params = { - 'uniquename': td.deploy_id, - 'sshKeyData': ssh_pub_key, - 'virtualNetworkResourceGroup': orig_params['virtualNetworkResourceGroup'], - 'virtualNetworkName': orig_params['virtualNetworkName'], - 'virtualNetworkSubnetName': orig_params['virtualNetworkSubnetName'], - 'nfsCommaSeparatedAddresses': ','.join(vserver_ip_list), - 'vmCount': 12, - 'nfsExportPath': '/msazure', - 'bootstrapScriptPath': '/bootstrap/bootstrap.vdbench.sh' + "uniquename": td.deploy_id, + "sshKeyData": ssh_pub_key, + "virtualNetworkResourceGroup": orig_params["virtualNetworkResourceGroup"], + "virtualNetworkName": orig_params["virtualNetworkName"], + "virtualNetworkSubnetName": orig_params["virtualNetworkSubnetName"], + "nfsCommaSeparatedAddresses": ",".join(vserver_ip_list), + "vmCount": 12, + "nfsExportPath": "/msazure", + "bootstrapScriptPath": "/bootstrap/bootstrap.vdbench.sh", } - td.deploy_name = 'test_vdbench' + td.deploy_name = "test_vdbench" deploy_result = helpers.wait_for_op(td.deploy()) - group_vars['deploy_vd_outputs'] = deploy_result.properties.outputs + group_vars["deploy_vd_outputs"] = deploy_result.properties.outputs def test_vdbench_template_run(self, group_vars): - node_ip = group_vars['deploy_vd_outputs']["nodE_0_IP_ADDRESS"]["value"] + node_ip = group_vars["deploy_vd_outputs"]["nodE_0_IP_ADDRESS"]["value"] with SSHTunnelForwarder( - group_vars['controller_ip'], - ssh_username=group_vars['controller_user'], - ssh_pkey=os.path.expanduser(r'~/.ssh/id_rsa'), - remote_bind_address=(node_ip, 22) + group_vars["controller_ip"], + ssh_username=group_vars["controller_user"], + ssh_pkey=os.path.expanduser(r"~/.ssh/id_rsa"), + remote_bind_address=(node_ip, 22), ) as ssh_tunnel: sleep(1) try: ssh_client = helpers.create_ssh_client( - group_vars['controller_user'], - '127.0.0.1', - ssh_tunnel.local_bind_port) + group_vars["controller_user"], + "127.0.0.1", + ssh_tunnel.local_bind_port, + ) scp_client = SCPClient(ssh_client.get_transport()) try: - scp_client.put(os.path.expanduser(r'~/.ssh/id_rsa'), - r'~/.ssh/id_rsa') + scp_client.put(os.path.expanduser(r"~/.ssh/id_rsa"), + r"~/.ssh/id_rsa") finally: scp_client.close() - commands = """~/copy_idrsa.sh + commands = """ + ~/copy_idrsa.sh cd - """.split('\n') + """.split("\n") # ./run_vdbench.sh inmem.conf uniquestring1 # TODO: reenable helpers.run_ssh_commands(ssh_client, commands) finally: ssh_client.close() -if __name__ == '__main__': +if __name__ == "__main__": pytest.main()