better printing for quick_tests.sh

This commit is contained in:
hobogalaxy 2021-03-05 16:28:39 +01:00
Родитель 0d9b7383db
Коммит 1008463fc0
2 изменённых файлов: 24 добавлений и 16 удалений

Просмотреть файл

@ -125,8 +125,7 @@ def log_hparams_to_all_loggers(
)
# send hparams to all loggers
for lg in logger:
lg.log_hyperparams(hparams)
trainer.logger.log_hyperparams(hparams)
def finish(

Просмотреть файл

@ -4,32 +4,41 @@
# conda activate testenv
export PYTHONWARNINGS="ignore"
print_test_name() {
termwidth="$(tput cols)"
padding="$(printf '%0.1s' ={1..500})"
printf '\e[33m%*.*s %s %*.*s\n\e[0m' 0 "$(((termwidth-2-${#1})/2))" "$padding" "$1" 0 "$(((termwidth-1-${#1})/2))" "$padding"
}
# Test for CPU
echo TEST 1
python train.py trainer.gpus=0 trainer.max_epochs=1
print_test_name "TEST 1"
python train.py trainer.gpus=0 trainer.max_epochs=1 print_config=false
# Test for GPU
echo TEST 2
python train.py trainer.gpus=1 trainer.max_epochs=1
print_test_name "TEST 2"
python train.py trainer.gpus=1 trainer.max_epochs=1 print_config=false
# Test multiple workers and cuda pinned memory
echo TEST 3
python train.py trainer.gpus=1 trainer.max_epochs=2 \
print_test_name "TEST 3"
python train.py trainer.gpus=1 trainer.max_epochs=2 print_config=false\
datamodule.num_workers=4 datamodule.pin_memory=True
# Test all experiment configs
echo TEST 4
python train.py -m '+experiment=glob(*)' trainer.gpus=1 trainer.max_epochs=3
print_test_name "TEST 4"
python train.py -m '+experiment=glob(*)' trainer.gpus=1 trainer.max_epochs=3 print_config=false
# Test with debug trainer
echo TEST 5
python train.py trainer=debug_trainer
print_test_name "TEST 5"
python train.py trainer=debug_trainer print_config=false
# Overfit to 10 bathes
echo TEST 6
python train.py trainer.min_epochs=20 trainer.max_epochs=20 +trainer.overfit_batches=10
print_test_name "TEST 6"
python train.py trainer.min_epochs=20 trainer.max_epochs=20 +trainer.overfit_batches=10 print_config=false
# Test default hydra sweep over hyperparameters (runs 4 different combinations for 1 epoch)
echo TEST 7
python train.py -m datamodule.batch_size=32,64 model.lr=0.001,0.003 \
print_test_name "TEST 7"
python train.py -m datamodule.batch_size=32,64 model.lr=0.001,0.003 print_config=false \
trainer.gpus=1 trainer.max_epochs=1