Added no freezing in stage 2 experiments.

This commit is contained in:
Debadeepta Dey 2021-08-02 09:42:21 -07:00 коммит произвёл Gustavo Rosa
Родитель c3bdb2125a
Коммит 75a0aec42f
4 изменённых файлов: 6 добавлений и 5 удалений

6
.vscode/launch.json поставляемый
Просмотреть файл

@ -236,7 +236,7 @@
"request": "launch",
"program": "${cwd}/scripts/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "proxynas_natsbench_space", "--datasets", "cifar10"],
"args": ["--full", "--algos", "proxynas_natsbench_space", "--datasets", "cifar100"],
"justMyCode": true
},
{
@ -718,7 +718,7 @@
"request": "launch",
"program": "${cwd}/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py",
"console": "integratedTerminal",
"args": ["--results-dir", "F:\\archaiphilly\\phillytools\\ft_fb1024_ftlr0.1_fte10_ct256_ftt0.6_scu",
"args": ["--results-dir", "F:\\archaiphilly\\phillytools\\ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze",
"--out-dir", "F:\\archai_experiment_reports"]
},
{
@ -772,7 +772,7 @@
"request": "launch",
"program": "${cwd}/scripts/reports/fear_plots/cross_exp_plots.py",
"console": "integratedTerminal",
"args": ["--dataset", "natsbench_sss_cifar10",
"args": ["--dataset", "natsbench_cifar10",
"--conf-location", "scripts/reports/fear_plots/cross_exp_conf.yaml"]
},
{

Просмотреть файл

@ -19,7 +19,7 @@ nas:
naswotrain:
train_batch: 256 # batch size for computing trainingless score
freeze_loader:
train_batch: 1024 # batch size for freeze training. 2048 works reliably on V100 with cell13 onwards unfrozen
train_batch: 512 # batch size for freeze training. 2048 works reliably on V100 with cell13 onwards unfrozen
trainer:
plotsdir: ''
use_val: False

Просмотреть файл

@ -80,7 +80,7 @@ def main():
# a = parse_a_job(job_dir)
# parallel parsing of yaml logs
num_workers = 12
num_workers = 8
with Pool(num_workers) as p:
a = p.map(parse_a_job, job_dirs)

Просмотреть файл

@ -59,6 +59,7 @@ natsbench_cifar10:
# ft_fb512_ftlr0.1_fte10_ct256_ftt0.6_c9: 'fear stage 2: batch 512, lr 0.1, <br> epochs 10, stage 1: batch 256, thresh 0.6, frozen till: cell 9'
# ft_fb256_ftlr0.1_fte5_ct256_ftt0.6_c9: 'fear stage 2: batch 256, lr 0.1, <br> epochs 5, stage 1: batch 256, thresh 0.6, frozen till: cell 9'
# ft_fb256_ftlr0.1_fte10_ct256_ftt0.6_c9: 'fear stage 2: batch 256, lr 0.1, <br> epochs 10, stage 1: batch 256, thresh 0.6, frozen till: cell 9'
ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze: 'ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze'
zero_cost: