This commit is contained in:
PatrickBue 2019-06-19 20:42:22 +00:00 коммит произвёл GitHub
Родитель d2b9a1eb02
Коммит 38821f4437
2 изменённых файлов: 1 добавлений и 56 удалений

Просмотреть файл

@ -1,20 +1,7 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import torch.cuda as cuda
from utils_cv.common.gpu import gpu_info, which_processor
def test_gpu_info():
gpus = gpu_info()
# Check if torch.cuda returns the same number of gpus
assert cuda.device_count() == len(gpus)
for i in range(len(gpus)):
# Check if torch.cuda returns the same device name
assert gpus[i]["device_name"] == cuda.get_device_name(i)
# Total memory should be grater than used-memory
assert int(gpus[i]["total_memory"]) > int(gpus[i]["used_memory"])
from utils_cv.common.gpu import which_processor
def test_which_processor():

Просмотреть файл

@ -1,55 +1,13 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import subprocess
import warnings
from torch.cuda import current_device, get_device_name, is_available
def gpu_info():
"""Get information of GPUs.
Returns:
list: List of gpu information dictionary {device_name, total_memory, used_memory}.
Returns an empty list if there is no cuda device available.
"""
gpus = []
try:
output = subprocess.check_output(
[
"nvidia-smi",
"--query-gpu=name,memory.total,memory.used",
"--format=csv,nounits,noheader",
],
encoding="utf-8",
)
for o in output.split("\n"):
info = o.split(",")
if len(info) == 3:
gpu = dict()
gpu["device_name"] = info[0].strip()
gpu["total_memory"] = info[1].strip()
gpu["used_memory"] = info[2].strip()
gpus.append(gpu)
except:
pass
return gpus
def which_processor():
"""Check if fastai/torch is using GPU or CPU"""
if is_available():
device_nr = current_device()
print(f"Fast.ai (Torch) is using GPU: {get_device_name(device_nr)}")
info = gpu_info()
if len(info) > device_nr:
gpu = info[device_nr]
free = int(gpu["total_memory"]) - int(gpu["used_memory"])
print(
f"Available / Total memory = {free} / {gpu['total_memory']} (MiB)"
)
else:
print("Cuda is not available. Fast.ai/Torch is using CPU")