This commit is contained in:
Jiahang Xu 2023-02-07 20:13:51 +08:00 коммит произвёл GitHub
Родитель ffd51e32c3
Коммит bd6bcb268e
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
7 изменённых файлов: 61 добавлений и 48 удалений

10
.github/workflows/integration-test.yml поставляемый
Просмотреть файл

@ -14,10 +14,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.6
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.6.10
python-version: 3.7
- name: Cache
uses: actions/cache@v2
@ -32,14 +32,16 @@ jobs:
- name: Install dependencies
run: |
pip install tensorflow==1.15.0
pip install tensorflow==2.6.0
pip install onnx==1.9.0
pip install torch==1.9.0
pip install torchvision==0.10.0
pip install onnx-simplifier
- name: Install nn-Meter
run: pip install -U .
run: |
pip install -U .
pip install protobuf==3.20.3
- name: Integration test
run: python tests/integration_test/test_latency_predictor.py

Просмотреть файл

@ -14,10 +14,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.6
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.6.10
python-version: 3.7
- name: Cache
uses: actions/cache@v2

Просмотреть файл

@ -14,10 +14,10 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.6
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.6.10
python-version: 3.7
- name: Cache
uses: actions/cache@v2
@ -38,7 +38,9 @@ jobs:
pip install onnx-simplifier==0.3.6
- name: Install nn-Meter
run: pip install -U .
run: |
pip install -U .
pip install protobuf==3.20.3
- name: Integration test
run: python tests/integration_test/test_latency_predictor_torch.py --apply-onnx

Просмотреть файл

@ -117,7 +117,8 @@ def dump_profiled_results(results, detail = False, metrics = ["latency"]):
def read_profiled_results(results):
results_copy = copy.deepcopy(results)
for item in results_copy.values():
for model in item.values():
if 'latency' in model:
model['latency'] = Latency(model['latency'])
if isinstance(item, dict):
for model in item.values():
if 'latency' in model:
model['latency'] = Latency(model['latency'])
return results_copy

Просмотреть файл

@ -180,6 +180,10 @@ def get_data_by_profiled_results(kernel_type, feature_parser, cfgs_path, labs_pa
except:
pass
if len(features) == 0:
raise ValueError(f"Didn't find any data of {kernel_type} for predictor training. There maybe some error about model profiling. Please check the profiling error in " \
"`<workspace>/predictor_build/results/profile_error.log`")
# save features and latency information to `save_path`
if save_path:
import pandas as pd

Просмотреть файл

@ -52,7 +52,7 @@ class ShapeInference:
"Relu6",
"Selu",
"LeakyReLU",
"Elu"
"Elu",
"Softmax",
"NoOp"
@ -68,7 +68,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
input_nodes = node["inbounds"]
@ -170,7 +170,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return [], [node["attr"]["attr"]["tensor_shape"]]
@ -185,7 +185,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return [], [graph[node["inbounds"][0]]["attr"]["output_shape"][0]]
@ -200,7 +200,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
in_shape = [graph[node["inbounds"][0]]["attr"]["output_shape"][0]]
@ -220,7 +220,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pad_get_shape(graph, node)
@ -235,7 +235,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
logging.info("Propagate through op %s.", node["attr"]["name"])
@ -251,7 +251,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
if len(node["inbounds"]) != 1:
@ -307,7 +307,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pool_get_shape(graph, node)
@ -321,7 +321,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pool_get_shape(graph, node)
@ -335,7 +335,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pool_get_shape(graph, node)
@ -349,7 +349,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pool_get_shape(graph, node)
@ -363,7 +363,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Pool_get_shape(graph, node)
@ -378,7 +378,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return [], [node["attr"]["attr"]["shape"]]
@ -392,7 +392,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
weight_node = ph.find_weights_root(graph, node)
@ -475,7 +475,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
weight_node = ph.find_weights_root(graph, node)
@ -559,7 +559,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
input_shape = graph[node["inbounds"][0]]["attr"]["output_shape"][0]
@ -591,7 +591,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Reduce_get_shape(graph, node)
@ -605,7 +605,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Reduce_get_shape(graph, node)
@ -619,7 +619,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Reduce_get_shape(graph, node)
@ -633,7 +633,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
weight_node = ph.find_weights_root(graph, node)
@ -694,7 +694,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
if "shape" in node["attr"]["attr"].keys():
@ -751,7 +751,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
input_shape = []
@ -780,7 +780,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Concat_get_shape(graph, node)
@ -794,7 +794,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
return ShapeInference.Concat_get_shape(graph, node)
@ -809,7 +809,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
for in_node in node["inbounds"]:
@ -839,7 +839,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
for in_node in node["inbounds"]:
@ -875,7 +875,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
seq = ph.get_graph_seq(graph, [node["attr"]["name"]])[:5]
@ -898,7 +898,7 @@ class ShapeInference:
----------
graph : dict
The Graph IR in dict format.
node : dict
node : dict
The node in Graph IR in dict format.
"""
seq = ph.get_graph_seq(graph, [node["attr"]["name"]])[:5]

Просмотреть файл

@ -79,15 +79,19 @@ def integration_test(model_type, url, ppath, output_name = "tests/integration_te
# start testing
for pred_name, pred_version in get_predictors():
try:
since = time.time()
# print(f'nn-meter --{model_type} {ppath} --predictor {pred_name} --predictor-version {pred_version}')
result = subprocess.check_output(['nn-meter', 'predict', f'--{model_type}', f'{ppath}', '--predictor', f'{pred_name}', '--predictor-version', f'{pred_version}'])
runtime = time.time() - since
except NotImplementedError:
logging.error(f"Meets ERROR when checking --{model_type} {ppath} --predictor {pred_name} --predictor-version {pred_version}")
# try:
since = time.time()
# print(f'nn-meter --{model_type} {ppath} --predictor {pred_name} --predictor-version {pred_version}')
result = subprocess.check_output(['nn-meter', 'predict', f'--{model_type}', f'{ppath}', '--predictor', f'{pred_name}', '--predictor-version', f'{pred_version}'])
runtime = time.time() - since
# except NotImplementedError:
# logging.error(f"Meets ERROR when checking --{model_type} {ppath} --predictor {pred_name} --predictor-version {pred_version}")
latency_list = parse_latency_info(result.decode('utf-8'))
print(model_type)
print(latency_list)
print("-----")
os.system("cat tests/integration_test/test_result.txt")
for model, latency in latency_list:
item = f'{model}, {model_type}, {pred_name}, {pred_version}, {round(float(latency), 4)}\n'
# print(item)