Update command_test_util and start on reworking network tests.

This commit is contained in:
Travis Prescott 2016-04-07 10:35:25 -07:00
Родитель a944c52784
Коммит d18b646d6a
6 изменённых файлов: 108 добавлений и 224 удалений

Просмотреть файл

@ -36,12 +36,10 @@
<Compile Include="azure\cli\extensions\transform.py" />
<Compile Include="azure\cli\extensions\__init__.py" />
<Compile Include="azure\cli\main.py" />
<Compile Include="azure\cli\tests\command_specs\__init__.py" />
<Compile Include="azure\cli\parser.py" />
<Compile Include="azure\cli\tests\test_add_resourcegroup_transform.py" />
<Compile Include="azure\cli\tests\test_parser.py" />
<Compile Include="azure\cli\tests\test_autocommand.py" />
<Compile Include="azure\cli\tests\test_commands.py" />
<Compile Include="azure\cli\tests\test_connection_verify.py" />
<Compile Include="azure\cli\tests\test_application.py" />
<Compile Include="azure\cli\tests\test_output.py" />
@ -112,7 +110,6 @@
<Folder Include="azure\cli\command_modules\" />
<Folder Include="azure\cli\extensions\" />
<Folder Include="azure\cli\tests\" />
<Folder Include="azure\cli\tests\command_specs\" />
<Folder Include="azure\cli\utils\" />
<Folder Include="command_modules\" />
<Folder Include="command_modules\azure-cli-component\" />

Просмотреть файл

@ -1,10 +0,0 @@
# Declare test definitions in the definition portion of each test file
TEST_DEF = []
ENV_VARIABLES = {}
def load_test_definitions(package_name, definition, env_variables=None):
for i in definition:
d = dict((k, i[k]) for k in i.keys() if k in ['test_name', 'command'])
test_key = '{}.{}'.format(package_name, d['test_name'])
TEST_DEF.append((test_key, d))
ENV_VARIABLES.update(env_variables or {})

Просмотреть файл

@ -1,174 +0,0 @@
from __future__ import print_function
import os
import json
import sys
import unittest
import re
import vcr
import logging
from six import add_metaclass, StringIO
try:
import unittest.mock as mock
except ImportError:
import mock
from azure.cli.main import main as cli
from command_specs import TEST_DEF, ENV_VARIABLES
logging.basicConfig()
vcr_log = logging.getLogger('vcr')
vcr_log.setLevel(logging.ERROR)
VCR_CASSETTE_DIR = os.path.join(os.path.dirname(__file__), 'recordings')
EXPECTED_RESULTS_PATH = os.path.join(VCR_CASSETTE_DIR, 'expected_results.res')
FILTER_HEADERS = [
'authorization',
'client-request-id',
'x-ms-client-request-id',
'x-ms-correlation-request-id',
'x-ms-ratelimit-remaining-subscription-reads',
'x-ms-request-id',
'x-ms-routing-request-id',
'x-ms-gateway-service-instanceid',
'x-ms-ratelimit-remaining-tenant-reads',
'x-ms-served-by',
]
def before_record_request(request):
request.uri = re.sub('/subscriptions/([^/]+)/', '/subscriptions/00000000-0000-0000-0000-000000000000/', request.uri)
return request
def before_record_response(response):
def remove_entries(the_dict, entries):
for key in entries:
if key in the_dict:
del the_dict[key]
remove_entries(response['headers'], FILTER_HEADERS)
return response
my_vcr = vcr.VCR(
cassette_library_dir=VCR_CASSETTE_DIR,
before_record_request=before_record_request,
before_record_response=before_record_response
)
class TestSequenceMeta(type):
def __new__(mcs, name, bases, dict):
def gen_test(test_name, command, expected_result):
def load_subscriptions_mock(self):
return [{
"id": "00000000-0000-0000-0000-000000000000",
"user": {
"name": "example@example.com",
"type": "user"
},
"state": "Enabled",
"name": "Example",
"tenantId": "123",
"isDefault": True}]
def get_user_access_token_mock(_, _1, _2):
return 'top-secret-token-for-you'
def _test_impl(self, expected_result):
""" Test implementation, augmented with prompted recording of expected result
if not provided. """
io = StringIO()
cli(command.split(), file=io)
actual_result = io.getvalue()
if expected_result == None:
header = '| RECORDED RESULT FOR {} |'.format(test_name)
print('-' * len(header), file=sys.stderr)
print(header, file=sys.stderr)
print('-' * len(header) + '\n', file=sys.stderr)
print(actual_result, file=sys.stderr)
ans = input('Save result for command: \'{}\'? [Y/n]: '.format(command))
result = None
if ans and ans.lower()[0] == 'y':
# update and save the expected_results.res file
TEST_EXPECTED[test_name] = actual_result
with open(EXPECTED_RESULTS_PATH, 'w') as file:
json.dump(TEST_EXPECTED, file, indent=4, sort_keys=True)
expected_result = actual_result
else:
# recorded result was wrong. Discard the result and the .yaml cassette
expected_result = None
io.close()
self.assertEqual(actual_result, expected_result)
cassette_path = os.path.join(VCR_CASSETTE_DIR, '{}.yaml'.format(test_name))
cassette_found = os.path.isfile(cassette_path)
# if no yaml, any expected result is invalid and must be rerecorded
expected_result = None if not cassette_found else expected_result
# if no expected result, yaml file should be discarded and rerecorded
if cassette_found and expected_result == None:
os.remove(cassette_path)
cassette_found = os.path.isfile(cassette_path)
if cassette_found and expected_result != None:
# playback mode - can be fully automated
@mock.patch('azure.cli._profile.Profile.load_cached_subscriptions', load_subscriptions_mock)
@mock.patch('azure.cli._profile.CredsCache.retrieve_token_for_user',get_user_access_token_mock)
@my_vcr.use_cassette(cassette_path, filter_headers=FILTER_HEADERS)
def test(self):
_test_impl(self, expected_result)
return test
elif not cassette_found and expected_result == None:
# recording needed
# if buffer specified and recording needed, automatically fail
is_buffered = list(set(['--buffer']) & set(sys.argv))
if is_buffered:
def null_test(self):
self.fail('No recorded result provided for {}.'.format(test_name))
return null_test
@my_vcr.use_cassette(cassette_path, filter_headers=FILTER_HEADERS)
def test(self):
_test_impl(self, expected_result)
else:
# yaml file failed to delete or bug exists
raise RuntimeError('Unable to generate test for {} due to inconsistent data. ' \
+ 'Please manually remove the associated .yaml cassette and/or the test\'s ' \
+ 'entry in expected_results.res and try again.')
return test
try:
with open(EXPECTED_RESULTS_PATH, 'r') as file:
TEST_EXPECTED = json.loads(file.read())
except EnvironmentError:
TEST_EXPECTED = {}
for test_path, test_def in TEST_DEF:
test_name = 'test_{}'.format(test_def['test_name'])
command = test_def['command']
expected_result = TEST_EXPECTED.get(test_path, None)
dict[test_name] = gen_test(test_path, command, expected_result)
return type.__new__(mcs, name, bases, dict)
@add_metaclass(TestSequenceMeta)
class TestCommands(unittest.TestCase):
@classmethod
def setUpClass(cls):
# use default environment variables if not currently set in the system
vars = ENV_VARIABLES.keys() if ENV_VARIABLES else []
for var in vars:
if not os.environ.get(var, None):
os.environ[var] = str(ENV_VARIABLES[var])
@classmethod
def tearDownClass(cls):
pass
if __name__ == '__main__':
unittest.main()

Просмотреть файл

@ -1,3 +1,10 @@
from __future__ import print_function
import os
import json
import sys
import unittest
import re
import logging
import re
@ -26,6 +33,10 @@ class CommandTestGenerator(object):
'x-ms-served-by',
]
#from command_specs import TEST_DEF, ENV_VAR
#VCR_CASSETTE_DIR = os.path.join(os.path.dirname(__file__), 'recordings')
#EXPECTED_RESULTS_PATH = os.path.join(VCR_CASSETTE_DIR, 'expected_results.res')
# TODO: Add env_var and expected_results path
def __init__(self, vcr_cassette_dir, test_specs):
self.test_specs = test_specs
logging.basicConfig()
@ -35,9 +46,16 @@ class CommandTestGenerator(object):
before_record_request=CommandTestGenerator.before_record_request,
before_record_response=CommandTestGenerator.before_record_response
)
# use default environment variables if not currently set in the system
vars = ENV_VARIABLES.keys() if ENV_VARIABLES else []
for var in vars:
if not os.environ.get(var, None):
os.environ[var] = str(ENV_VARIABLES[var])
def generate_tests(self):
test_functions = {}
def gen_test(test_name, command, expected_result):
def load_subscriptions_mock(self): #pylint: disable=unused-argument
@ -55,24 +73,88 @@ class CommandTestGenerator(object):
def get_user_access_token_mock(_, _1, _2): #pylint: disable=unused-argument
return 'top-secret-token-for-you'
@mock.patch('azure.cli._profile.Profile.load_cached_subscriptions',
load_subscriptions_mock)
@mock.patch('azure.cli._profile.CredsCache.retrieve_token_for_user',
get_user_access_token_mock)
@self.my_vcr.use_cassette(test_name + '.yaml',
filter_headers=CommandTestGenerator.FILTER_HEADERS)
def test(self):
def _test_impl(self, expected_result):
""" Test implementation, augmented with prompted recording of expected result
if not provided. """
io = StringIO()
cli(command.split(), file=io)
actual_result = io.getvalue()
if expected_result == None:
header = '| RECORDED RESULT FOR {} |'.format(test_name)
print('-' * len(header), file=sys.stderr)
print(header, file=sys.stderr)
print('-' * len(header) + '\n', file=sys.stderr)
print(actual_result, file=sys.stderr)
ans = input('Save result for command: \'{}\'? [Y/n]: '.format(command))
result = None
if ans and ans.lower()[0] == 'y':
# update and save the expected_results.res file
TEST_EXPECTED[test_name] = actual_result
with open(EXPECTED_RESULTS_PATH, 'w') as file:
json.dump(TEST_EXPECTED, file, indent=4, sort_keys=True)
expected_result = actual_result
else:
# recorded result was wrong. Discard the result and the .yaml cassette
expected_result = None
io.close()
self.assertEqual(actual_result, expected_result)
cassette_path = os.path.join(VCR_CASSETTE_DIR, '{}.yaml'.format(test_name))
cassette_found = os.path.isfile(cassette_path)
# if no yaml, any expected result is invalid and must be rerecorded
expected_result = None if not cassette_found else expected_result
# if no expected result, yaml file should be discarded and rerecorded
if cassette_found and expected_result == None:
os.remove(cassette_path)
cassette_found = os.path.isfile(cassette_path)
if cassette_found and expected_result != None:
# playback mode - can be fully automated
@mock.patch('azure.cli._profile.Profile.load_cached_subscriptions',
load_subscriptions_mock)
@mock.patch('azure.cli._profile.CredsCache.retrieve_token_for_user',
get_user_access_token_mock)
@my_vcr.use_cassette(cassette_path,
filter_headers=CommandTestGenerator.FILTER_HEADERS)
def test(self):
_test_impl(self, expected_result)
return test
elif not cassette_found and expected_result == None:
# recording needed
# if buffer specified and recording needed, automatically fail
is_buffered = list(set(['--buffer']) & set(sys.argv))
if is_buffered:
def null_test(self):
self.fail('No recorded result provided for {}.'.format(test_name))
return null_test
@my_vcr.use_cassette(cassette_path,
filter_headers=CommandTestGenerator.FILTER_HEADERS)
def test(self):
_test_impl(self, expected_result)
else:
# yaml file failed to delete or bug exists
raise RuntimeError('Unable to generate test for {} due to inconsistent data. ' \
+ 'Please manually remove the associated .yaml cassette and/or the test\'s ' \
+ 'entry in expected_results.res and try again.')
return test
for test_spec_item in self.test_specs:
test_name = 'test_' + test_spec_item['test_name']
test_functions[test_name] = gen_test(test_name, test_spec_item['command'],
test_spec_item['expected_result'])
try:
with open(EXPECTED_RESULTS_PATH, 'r') as file:
TEST_EXPECTED = json.loads(file.read())
except EnvironmentError:
TEST_EXPECTED = {}
for test_path, test_def in TEST_DEF:
test_name = 'test_{}'.format(test_def['test_name'])
command = test_def['command']
expected_result = TEST_EXPECTED.get(test_path, None)
test_functions[test_name] = gen_test(test_path, command, expected_result)
return test_functions
@staticmethod

Просмотреть файл

@ -1,15 +1,15 @@
from . import TEST_DEF, load_test_definitions
# AZURE CLI NETWORK TEST DEFINITIONS
ENV_VAR = {}
TEST_DEF = [
{
'test_name': 'network_usage_list',
'command': 'network usage list --location westus --output json'
},
{
'test_name': 'network_nic_list',
'command': 'network nic list -g travistestresourcegroup'
}
]
load_test_definitions(
package_name=locals()['__name__'],
definition = [
{
'test_name': 'network_usage_list',
'command': 'network usage list --location westus --output json'
},
{
'test_name': 'network_nic_list',
'command': 'network nic list -g travistestresourcegroup'
}
]
)

Просмотреть файл

@ -1,7 +1,7 @@
import os
import unittest
from azure.cli.utils.command_test_util import CommandTestGenerator
from command_specs import TEST_DEF
from command_specs import TEST_DEF, ENV_VAR
class TestCommands(unittest.TestCase):
pass
@ -14,14 +14,3 @@ for test_name in tests:
if __name__ == '__main__':
unittest.main()
# Declare test definitions in the definition portion of each test file
#TEST_DEF = []
#ENV_VARIABLES = {}
#def load_test_definitions(package_name, definition, env_variables=None):
# for i in definition:
# d = dict((k, i[k]) for k in i.keys() if k in ['test_name', 'command'])
# test_key = '{}.{}'.format(package_name, d['test_name'])
# TEST_DEF.append((test_key, d))
# ENV_VARIABLES.update(env_variables or {})