Changes to improve the output format for RESTler bugs. (#713)
* Changes to improve the output format for RESTler bugs. 1. Added functionality to publish JSON formatted bug details file for each bug reported in addition to the txt file. 2. Added functionality to publish a Bugs.Json file containing the information on bugs found in a RESTler run. This will be an index to the individual bug details files. * Resolved the PR comments. 1. fixed the naming convention issues. 2. resolved comments on code reusability. * Changes to fix the PR comments 1. added tests to validate sequences that contain multiple requests. 2. removed request order from the bug details. 3. refactored code to pass error_code as part of bug bucket class. * Fixed the build error in the last PR as glob command was returning the files in a different order and the test was trying to replay from json files --------- Co-authored-by: Anand Nooli <annooli@microsoft.com>
This commit is contained in:
Родитель
7a6fb8ccee
Коммит
3200bada04
|
@ -68,7 +68,7 @@ def replay_from_dir(ip, port, host, use_ssl, restler_dll_path, replay_dir):
|
|||
import glob
|
||||
from pathlib import Path
|
||||
# get all the 500 replay files in the bug buckets directory
|
||||
bug_buckets = glob.glob(os.path.join(replay_dir, 'RestlerResults', '**/bug_buckets/*500*'))
|
||||
bug_buckets = glob.glob(os.path.join(replay_dir, 'RestlerResults', '**/bug_buckets/*500*txt'))
|
||||
print(f"buckets: {bug_buckets}")
|
||||
for file_path in bug_buckets:
|
||||
if "bug_buckets" in os.path.basename(file_path):
|
||||
|
|
|
@ -15,11 +15,13 @@ class UninitializedError(Exception):
|
|||
pass
|
||||
|
||||
class BugBucket(object):
|
||||
def __init__(self, sequence, reproducible, reproduce_attempts, reproduce_successes):
|
||||
def __init__(self, sequence, reproducible, reproduce_attempts, reproduce_successes, bucket_origin, error_code):
|
||||
self.sequence = sequence
|
||||
self.reproducible = reproducible
|
||||
self.reproduce_attempts = reproduce_attempts
|
||||
self.reproduce_successes = reproduce_successes
|
||||
self.origin = bucket_origin
|
||||
self.error_code = error_code
|
||||
|
||||
class BugBuckets(object):
|
||||
__instance = None
|
||||
|
@ -88,8 +90,29 @@ class BugBuckets(object):
|
|||
create_once_requests.append(c_req)
|
||||
return create_once_requests
|
||||
|
||||
def _get_bug_code_description(self, bug_code):
|
||||
""" Helper to get a bug code description .
|
||||
|
||||
@param bug_code: The status code that triggered the bug
|
||||
@type bug_code: Str
|
||||
@return: The bug code description
|
||||
@rtype : Str
|
||||
|
||||
"""
|
||||
if bug_code == TIMEOUT_CODE:
|
||||
bug_code_string = 'timeout'
|
||||
elif bug_code == CONNECTION_CLOSED_CODE:
|
||||
bug_code_string = 'connection_closed'
|
||||
elif bug_code.startswith('20'):
|
||||
bug_code_string = '20x'
|
||||
else:
|
||||
bug_code_string = f'{bug_code}'
|
||||
|
||||
return bug_code_string
|
||||
|
||||
def _get_bucket_origin(self, origin, bug_code):
|
||||
""" Helper to get the bug bucket origin string from a bug code
|
||||
""" Helper to get a full bug bucket origin string that includes the
|
||||
bug origin and bug code.
|
||||
|
||||
@param origin: The origin of the bug (checker name, main driver, etc)
|
||||
@type origin: Str
|
||||
|
@ -99,14 +122,9 @@ class BugBuckets(object):
|
|||
@rtype : Str
|
||||
|
||||
"""
|
||||
if bug_code == TIMEOUT_CODE:
|
||||
return f'{origin}_timeout'
|
||||
elif bug_code == CONNECTION_CLOSED_CODE:
|
||||
return f'{origin}_connection_closed'
|
||||
elif bug_code.startswith('20'):
|
||||
return f'{origin}_20x'
|
||||
else:
|
||||
return f'{origin}_{bug_code}'
|
||||
return f'{origin}_{self._get_bug_code_description(bug_code)}'
|
||||
|
||||
|
||||
|
||||
def _get_bug_hash(self, origin, sequence, hash_full_request, additional_str):
|
||||
""" Helper that creates and returns the unique bug hash
|
||||
|
@ -196,6 +214,7 @@ class BugBuckets(object):
|
|||
|
||||
try:
|
||||
bucket_origin = self._get_bucket_origin(origin, bug_code)
|
||||
bucket_bugcode_description = self._get_bug_code_description(bug_code)
|
||||
if bucket_origin not in self._bug_buckets:
|
||||
self._bug_buckets[bucket_origin] = OrderedDict()
|
||||
bucket = self._bug_buckets[bucket_origin]
|
||||
|
@ -207,7 +226,7 @@ class BugBuckets(object):
|
|||
(reproducible, reproduce_attempts, reproduce_successes) = self._test_bug_reproducibility(sequence, bug_code, bucket)
|
||||
else:
|
||||
(reproducible, reproduce_attempts, reproduce_successes) = (False, 0, 0)
|
||||
bucket[seq_hex] = BugBucket(sequence, reproducible, reproduce_attempts, reproduce_successes)
|
||||
bucket[seq_hex] = BugBucket(sequence, reproducible, reproduce_attempts, reproduce_successes, origin, bucket_bugcode_description)
|
||||
|
||||
sent_request_data_list = sequence.sent_request_data_list
|
||||
create_once_requests = self._get_create_once_requests(sequence)
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
from test_servers.parsed_requests import *
|
||||
|
||||
import copy
|
||||
import json
|
||||
|
||||
SENDING = ': Sending: '
|
||||
GENERATION = 'Generation-'
|
||||
|
@ -386,3 +387,43 @@ class BugLogParser(LogParser):
|
|||
print("Failed to read bug log. Log was not a complete test log.\n"
|
||||
f"{err!s}")
|
||||
raise TestFailedException
|
||||
|
||||
class JsonFormattedBugsLogParser(LogParser):
|
||||
class FileType(enumerate):
|
||||
Bugs = 'Bugs',
|
||||
BugDetails = 'BugDetails',
|
||||
|
||||
|
||||
def __init__(self, path, fileType):
|
||||
""" BugLogParser constructor
|
||||
|
||||
@param path: The path to the bug log file
|
||||
@type path: Str
|
||||
|
||||
"""
|
||||
super().__init__(path)
|
||||
# key = bug type, value = list(tuple(ParsedSequence, reproduced-bool, bug-hash))
|
||||
self._bug_list = []
|
||||
self._bug_detail = None
|
||||
self._bug_buckets_bychecker = []
|
||||
self._fileType = fileType
|
||||
self._parse()
|
||||
|
||||
def _parse(self):
|
||||
""" Parses the bug log to populate the bug list
|
||||
|
||||
@return: None
|
||||
@rtype : None
|
||||
|
||||
"""
|
||||
try:
|
||||
with open(self._path, 'r') as bugs:
|
||||
bugs_json = json.load(bugs)
|
||||
if self._fileType == JsonFormattedBugsLogParser.FileType.Bugs:
|
||||
self._bug_list = bugs_json['bugs']
|
||||
elif self._fileType == JsonFormattedBugsLogParser.FileType.BugDetails:
|
||||
self._bug_detail = bugs_json
|
||||
except Exception as err:
|
||||
print("Failed to read bug buckets file type {self._fileType} in bug buckets directory.\n"
|
||||
f"{err!s}")
|
||||
raise TestFailedException
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
{
|
||||
"bugs": [
|
||||
{
|
||||
"filepath": "LeakageRuleChecker_20x_1.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "LeakageRuleChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_1.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "NameSpaceRuleChecker_20x_1.json",
|
||||
"reproducible": false,
|
||||
"checker_name": "NameSpaceRuleChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_2.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "UseAfterFreeChecker_20x_1.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "UseAfterFreeChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_3.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "NameSpaceRuleChecker_20x_2.json",
|
||||
"reproducible": false,
|
||||
"checker_name": "NameSpaceRuleChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_4.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_5.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_6.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_7.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_8.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_9.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_10.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "main_driver_500_1.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "main_driver",
|
||||
"error_code": "500"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_11.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_12.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "ResourceHierarchyChecker_20x_1.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "ResourceHierarchyChecker",
|
||||
"error_code": "20x"
|
||||
},
|
||||
{
|
||||
"filepath": "InvalidDynamicObjectChecker_20x_13.json",
|
||||
"reproducible": true,
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"error_code": "20x"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"status_code": "200",
|
||||
"checker_name": "InvalidDynamicObjectChecker",
|
||||
"reproducible": true,
|
||||
"verb": "GET",
|
||||
"endpoint": "/city/{cityName}",
|
||||
"status_text": "OK",
|
||||
"request_sequence": [
|
||||
{
|
||||
"producer_timing_delay": 0,
|
||||
"max_async_wait_time": 20,
|
||||
"replay_request": "PUT /city/cityName<test!>-3978db8792 HTTP/1.1\r\nHost: unittest\r\nAUTHORIZATION TOKEN\r\n\r\n{}\r\n",
|
||||
"response": "HTTP/1.1 201 Created\r\nRestler Test\r\n\r\n{\"name\": \"cityName<test!>-3978db8792\", \"properties\": {}}"
|
||||
},
|
||||
{
|
||||
"producer_timing_delay": 0,
|
||||
"max_async_wait_time": 0,
|
||||
"replay_request": "GET /city/cityName<test!>-3978db8792?injected_query_string=123 HTTP/1.1\r\nAccept: application/json\r\nHost: unittest\r\nContent-Type: application/json\r\nAUTHORIZATION TOKEN\r\n\r\n",
|
||||
"response": "HTTP/1.1 200 OK\r\nRestler Test\r\n\r\n{\"name\": \"cityName<test!>-3978db8792\", \"properties\": {}}"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"status_code": "200",
|
||||
"checker_name": "UseAfterFreeChecker",
|
||||
"reproducible": true,
|
||||
"verb": "GET",
|
||||
"endpoint": "/useafterfreetest/{useafterfreeTest}",
|
||||
"status_text": "OK",
|
||||
"request_sequence": [
|
||||
{
|
||||
"producer_timing_delay": 0,
|
||||
"max_async_wait_time": 0,
|
||||
"replay_request": "PUT /useafterfreetest/useafterfreeTest<test!>-6ae6874f61 HTTP/1.1\r\nAccept: application/json\r\nHost: unittest\r\nContent-Type: application/json\r\nAUTHORIZATION TOKEN\r\n\r\n{}\r\n",
|
||||
"response": "HTTP/1.1 201 Created\r\nRestler Test\r\n\r\n{\"name\": \"useafterfreeTest<test!>-6ae6874f61\"}"
|
||||
},
|
||||
{
|
||||
"producer_timing_delay": 0,
|
||||
"max_async_wait_time": 20,
|
||||
"replay_request": "DELETE /useafterfreetest/useafterfreeTest<test!>-6ae6874f61 HTTP/1.1\r\nAccept: application/json\r\nHost: unittest\r\nContent-Type: application/json\r\nAUTHORIZATION TOKEN\r\n\r\n",
|
||||
"response": "HTTP/1.1 202 Accepted\r\nRestler Test\r\n\r\n\"useafterfreeTest<test!>-6ae6874f61\""
|
||||
},
|
||||
{
|
||||
"producer_timing_delay": 0,
|
||||
"max_async_wait_time": 0,
|
||||
"replay_request": "GET /useafterfreetest/useafterfreeTest<test!>-6ae6874f61 HTTP/1.1\r\nAccept: application/json\r\nHost: unittest\r\nContent-Type: application/json\r\nAUTHORIZATION TOKEN\r\n\r\n",
|
||||
"response": "HTTP/1.1 200 OK\r\nRestler Test\r\n\r\n{\"name\": \"useafterfreeTest<test!>-6ae6874f61\"}"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1058,6 +1058,62 @@ class FunctionalityTests(unittest.TestCase):
|
|||
except TestFailedException:
|
||||
self.fail("Smoke test failed: Fuzzing")
|
||||
|
||||
def test_logger_jsonformatted_bugbuckets(self):
|
||||
|
||||
def verify_bug_details(baseline_bugdetail_filename, actual_bugdetail_filename):
|
||||
try:
|
||||
#Verify the generated bug details in json format.
|
||||
default_parser = JsonFormattedBugsLogParser(baseline_bugdetail_filename, JsonFormattedBugsLogParser.FileType.BugDetails)
|
||||
test_parser = JsonFormattedBugsLogParser(actual_bugdetail_filename, JsonFormattedBugsLogParser.FileType.BugDetails)
|
||||
self.assertTrue(default_parser._bug_detail['status_code'] == test_parser._bug_detail['status_code'])
|
||||
self.assertTrue(default_parser._bug_detail['checker_name'] == test_parser._bug_detail['checker_name'])
|
||||
self.assertTrue(default_parser._bug_detail['reproducible'] == test_parser._bug_detail['reproducible'])
|
||||
self.assertTrue(default_parser._bug_detail['verb'] == test_parser._bug_detail['verb'])
|
||||
self.assertTrue(default_parser._bug_detail['endpoint'] == test_parser._bug_detail['endpoint'])
|
||||
self.assertTrue(default_parser._bug_detail['status_text'] == test_parser._bug_detail['status_text'])
|
||||
self.assertTrue(len(default_parser._bug_detail['request_sequence']) == len(test_parser._bug_detail['request_sequence']))
|
||||
except TestFailedException:
|
||||
self.fail("verification of bugs details file failed")
|
||||
|
||||
|
||||
settings_file_path = os.path.join(Test_File_Directory, "test_one_schema_settings.json")
|
||||
args = Common_Settings + [
|
||||
'--fuzzing_mode', 'directed-smoke-test',
|
||||
'--restler_grammar', f'{os.path.join(Test_File_Directory, "test_grammar_bugs.py")}',
|
||||
'--enable_checkers', '*',
|
||||
'--disable_checkers', 'invalidvalue',
|
||||
'--settings', f'{settings_file_path}'
|
||||
]
|
||||
|
||||
result = subprocess.run(args, capture_output=True)
|
||||
if result.stderr:
|
||||
self.fail(result.stderr)
|
||||
try:
|
||||
result.check_returncode()
|
||||
except subprocess.CalledProcessError:
|
||||
self.fail(f"Restler returned non-zero exit code: {result.returncode}")
|
||||
|
||||
experiments_dir = self.get_experiments_dir()
|
||||
try:
|
||||
#Verify the generated bugs.json file
|
||||
default_parser = JsonFormattedBugsLogParser(os.path.join(Test_File_Directory, "Bug_Buckets_Json","Bugs_Bucket_AsJson.json"),JsonFormattedBugsLogParser.FileType.Bugs)
|
||||
test_parser = JsonFormattedBugsLogParser(os.path.join(experiments_dir, 'bug_buckets', 'Bugs.json'),JsonFormattedBugsLogParser.FileType.Bugs)
|
||||
self.assertTrue(len(default_parser._bug_list) == len(test_parser._bug_list),"Expected count of bugs are not same.")
|
||||
counter =0;
|
||||
for expected_bug in default_parser._bug_list:
|
||||
actual_bug = test_parser._bug_list[counter]
|
||||
self.assertTrue(expected_bug == actual_bug ,f"Expected bug :{expected_bug} and actual bug :{actual_bug} are different")
|
||||
counter = counter + 1
|
||||
except TestFailedException:
|
||||
self.fail("verification of bugs json file failed")
|
||||
|
||||
verify_bug_details(os.path.join(Test_File_Directory,"Bug_Buckets_Json", "InvalidDynamicObjectChecker_20x_1.json"),
|
||||
os.path.join(experiments_dir, 'bug_buckets', 'InvalidDynamicObjectChecker_20x_1.json'))
|
||||
|
||||
verify_bug_details(os.path.join(Test_File_Directory,"Bug_Buckets_Json", "UseAfterFreeChecker_20x_1.json"),
|
||||
os.path.join(experiments_dir, 'bug_buckets', 'UseAfterFreeChecker_20x_1.json'))
|
||||
|
||||
|
||||
def test_gc_limits(self):
|
||||
""" This test checks that RESTler exits after N objects cannot be deleted according
|
||||
to the settings. It also tests that async resource deletion is being performed.
|
||||
|
|
|
@ -13,6 +13,7 @@ import json
|
|||
import types
|
||||
import copy
|
||||
import itertools
|
||||
import datetime
|
||||
from collections import OrderedDict
|
||||
from shutil import copyfile
|
||||
from collections import namedtuple
|
||||
|
@ -51,6 +52,7 @@ GARBAGE_COLLECTOR_LOGS = None
|
|||
LOGS_DIR = None
|
||||
# Directory for bug bucket logs
|
||||
BUG_BUCKETS_DIR = None
|
||||
DELIM = "\r\n\r\n"
|
||||
|
||||
# This is the symbol that will appear before any request in a bug bucket
|
||||
# log that should be sent as part of the replay.
|
||||
|
@ -69,6 +71,40 @@ LOG_TYPE_PREPROCESSING = 'preprocessing'
|
|||
LOG_TYPE_REPLAY = 'replay'
|
||||
LOG_TYPE_AUTH = 'auth'
|
||||
|
||||
class Bug():
|
||||
def __init__(self):
|
||||
|
||||
self.filepath = None
|
||||
self.reproducible = False
|
||||
self.checker_name = None
|
||||
self.error_code = None
|
||||
|
||||
def toJson(self):
|
||||
return json.dumps(self, default=lambda o : o.__dict__, indent=4)
|
||||
|
||||
class BugDetail():
|
||||
def __init__(self):
|
||||
|
||||
self.status_code = 0
|
||||
self.checker_name = None
|
||||
self.reproducible = False
|
||||
self.verb = None
|
||||
self.endpoint = None
|
||||
self.status_text = None
|
||||
self.request_sequence = []
|
||||
|
||||
def toJson(self):
|
||||
return json.dumps(self, default=lambda o : o.__dict__, indent=4)
|
||||
|
||||
class BugRequest():
|
||||
def __init__(self):
|
||||
|
||||
self.producer_timing_delay = 0
|
||||
self.max_async_wait_time = 0
|
||||
self.replay_request = None
|
||||
self.response =None
|
||||
|
||||
|
||||
Network_Auth_Log = None
|
||||
|
||||
class NetworkLog(object):
|
||||
|
@ -588,6 +624,7 @@ Bugs_Logged = dict()
|
|||
# Dict of bug hashes to be printed to bug_buckets.json
|
||||
# {bug_hash: {"file_path": replay_log_relative_path}}
|
||||
Bug_Hashes = dict()
|
||||
|
||||
def update_bug_buckets(bug_buckets, bug_request_data, bug_hash, additional_log_str=None):
|
||||
"""
|
||||
@param bug_buckets: Dictionary containing bug bucket information
|
||||
|
@ -604,9 +641,12 @@ def update_bug_buckets(bug_buckets, bug_request_data, bug_hash, additional_log_s
|
|||
|
||||
"""
|
||||
Header_Len = 80
|
||||
def get_bug_filename(file_extension):
|
||||
return f"{bucket_class}_{len(bug_buckets[bucket_class].keys())}.{file_extension}"
|
||||
|
||||
def log_new_bug():
|
||||
# Create the new bug log
|
||||
filename = f"{bucket_class}_{len(bug_buckets[bucket_class].keys())}.txt"
|
||||
filename = get_bug_filename("txt")
|
||||
filepath = os.path.join(BUG_BUCKETS_DIR, filename)
|
||||
|
||||
with open(filepath, "w+", encoding='utf-8') as bug_file:
|
||||
|
@ -639,6 +679,70 @@ def update_bug_buckets(bug_buckets, bug_request_data, bug_hash, additional_log_s
|
|||
|
||||
return filename
|
||||
|
||||
def log_new_bug_as_json():
|
||||
# Create the new bug log in json format
|
||||
filename = get_bug_filename("json")
|
||||
filepath = os.path.join(BUG_BUCKETS_DIR, filename)
|
||||
currentsequence = bug_bucket.sequence
|
||||
|
||||
bugDetail = BugDetail()
|
||||
bugDetail.checker_name = bug_bucket.origin
|
||||
bugDetail.reproducible = bug_bucket.reproducible
|
||||
bugDetail.endpoint = currentsequence.last_request._endpoint_no_dynamic_objects
|
||||
bugDetail.verb = currentsequence.last_request.method
|
||||
sequence_request_counter = 0
|
||||
for req in bug_request_data:
|
||||
try:
|
||||
bugRequest = BugRequest()
|
||||
bugRequest.replay_request = req.rendered_data
|
||||
bugRequest.response = req.response
|
||||
bugRequest.producer_timing_delay = req.producer_timing_delay
|
||||
bugRequest.max_async_wait_time = req.max_async_wait_time
|
||||
bugDetail.request_sequence.append(bugRequest)
|
||||
if(sequence_request_counter == len(bug_request_data)-1):
|
||||
if(len(req.response.split(DELIM))>1):
|
||||
split_responsebody = req.response.split(DELIM)
|
||||
response_headers = split_responsebody[0].split("\r\n")
|
||||
response_statusCode_and_statusMessage = response_headers[0].split(" ")
|
||||
bugDetail.status_code = response_statusCode_and_statusMessage[1]
|
||||
bugDetail.status_text = ' '.join(response_statusCode_and_statusMessage[2:])
|
||||
else :
|
||||
bugDetail.status_code = ''
|
||||
bugDetail.status_text = ''
|
||||
|
||||
sequence_request_counter = sequence_request_counter + 1
|
||||
except Exception as error:
|
||||
write_to_main(f"Failed to write bug bucket as json log: {error!s}")
|
||||
filename = 'Failed to create bug bucket as json log.'
|
||||
|
||||
jsonString =bugDetail.toJson()
|
||||
with open(filepath, "w+", encoding='utf-8') as bug_file:
|
||||
print(f'{jsonString}', file=bug_file)
|
||||
log_file.flush()
|
||||
os.fsync(log_file.fileno())
|
||||
|
||||
return filename
|
||||
|
||||
def write_incremental_bugs(file_path, req_bug):
|
||||
if not os.path.exists(file_path):
|
||||
with open(file_path, 'w', encoding='utf-8') as file:
|
||||
file.write("{\"bugs\":[]}")
|
||||
|
||||
req_bug_as_json = req_bug.toJson() # json.dumps(req_bug, indent=4)
|
||||
# remove the start and end brackets, since they will already be present
|
||||
# also remove the end newline
|
||||
req_bug_as_json = req_bug_as_json[0:len(req_bug_as_json) - 2]
|
||||
|
||||
with open(file_path, 'r+', encoding='utf-8') as file:
|
||||
pos = file.seek(0, os.SEEK_END)
|
||||
file_size = file.tell()
|
||||
pos = file.seek(file_size - 2, 0)
|
||||
|
||||
if file_size > 11:
|
||||
file.write(",")
|
||||
file.write(req_bug_as_json)
|
||||
file.write("}]}")
|
||||
|
||||
def add_hash(replay_filename):
|
||||
""" Helper that adds bug hash to the bug buckets json file """
|
||||
global Bug_Hashes
|
||||
|
@ -674,6 +778,14 @@ def update_bug_buckets(bug_buckets, bug_request_data, bug_hash, additional_log_s
|
|||
if bucket_hash not in Bugs_Logged:
|
||||
try:
|
||||
filename = log_new_bug()
|
||||
filenameJson = log_new_bug_as_json()
|
||||
requestBug = Bug()
|
||||
requestBug.filepath = filenameJson
|
||||
requestBug.reproducible = bug_bucket.reproducible
|
||||
requestBug.checker_name = bug_bucket.origin
|
||||
requestBug.error_code = bug_bucket.error_code
|
||||
|
||||
write_incremental_bugs(os.path.join(BUG_BUCKETS_DIR, "Bugs.json"),requestBug)
|
||||
Bugs_Logged[bucket_hash] = BugTuple(filename, bug_hash, bug_bucket.reproduce_attempts, bug_bucket.reproduce_successes)
|
||||
add_hash(filename)
|
||||
except Exception as error:
|
||||
|
@ -1136,3 +1248,4 @@ def generate_summary_speccov():
|
|||
@rtype : None
|
||||
"""
|
||||
SpecCoverageLog.Instance().generate_summary_speccov()
|
||||
|
Загрузка…
Ссылка в новой задаче