From 75a0aec42f8b3295070c94f323cfced0f78b38d2 Mon Sep 17 00:00:00 2001 From: Debadeepta Dey Date: Mon, 2 Aug 2021 09:42:21 -0700 Subject: [PATCH] Added no freezing in stage 2 experiments. --- .vscode/launch.json | 6 +++--- confs/algos/proxynas_natsbench_space.yaml | 2 +- .../fear_analysis/analysis_freeze_natsbench_space_new.py | 2 +- scripts/reports/fear_plots/cross_exp_conf.yaml | 1 + 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index fb6e5334..c3b6d114 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -236,7 +236,7 @@ "request": "launch", "program": "${cwd}/scripts/main.py", "console": "integratedTerminal", - "args": ["--full", "--algos", "proxynas_natsbench_space", "--datasets", "cifar10"], + "args": ["--full", "--algos", "proxynas_natsbench_space", "--datasets", "cifar100"], "justMyCode": true }, { @@ -718,7 +718,7 @@ "request": "launch", "program": "${cwd}/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py", "console": "integratedTerminal", - "args": ["--results-dir", "F:\\archaiphilly\\phillytools\\ft_fb1024_ftlr0.1_fte10_ct256_ftt0.6_scu", + "args": ["--results-dir", "F:\\archaiphilly\\phillytools\\ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze", "--out-dir", "F:\\archai_experiment_reports"] }, { @@ -772,7 +772,7 @@ "request": "launch", "program": "${cwd}/scripts/reports/fear_plots/cross_exp_plots.py", "console": "integratedTerminal", - "args": ["--dataset", "natsbench_sss_cifar10", + "args": ["--dataset", "natsbench_cifar10", "--conf-location", "scripts/reports/fear_plots/cross_exp_conf.yaml"] }, { diff --git a/confs/algos/proxynas_natsbench_space.yaml b/confs/algos/proxynas_natsbench_space.yaml index feb69bc6..5a6751f5 100644 --- a/confs/algos/proxynas_natsbench_space.yaml +++ b/confs/algos/proxynas_natsbench_space.yaml @@ -19,7 +19,7 @@ nas: naswotrain: train_batch: 256 # batch size for computing trainingless score freeze_loader: - train_batch: 1024 # batch size for freeze training. 2048 works reliably on V100 with cell13 onwards unfrozen + train_batch: 512 # batch size for freeze training. 2048 works reliably on V100 with cell13 onwards unfrozen trainer: plotsdir: '' use_val: False diff --git a/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py b/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py index 301ee632..cbf08123 100644 --- a/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py +++ b/scripts/reports/fear_analysis/analysis_freeze_natsbench_space_new.py @@ -80,7 +80,7 @@ def main(): # a = parse_a_job(job_dir) # parallel parsing of yaml logs - num_workers = 12 + num_workers = 8 with Pool(num_workers) as p: a = p.map(parse_a_job, job_dirs) diff --git a/scripts/reports/fear_plots/cross_exp_conf.yaml b/scripts/reports/fear_plots/cross_exp_conf.yaml index 42a16979..0bec4705 100644 --- a/scripts/reports/fear_plots/cross_exp_conf.yaml +++ b/scripts/reports/fear_plots/cross_exp_conf.yaml @@ -59,6 +59,7 @@ natsbench_cifar10: # ft_fb512_ftlr0.1_fte10_ct256_ftt0.6_c9: 'fear stage 2: batch 512, lr 0.1,
epochs 10, stage 1: batch 256, thresh 0.6, frozen till: cell 9' # ft_fb256_ftlr0.1_fte5_ct256_ftt0.6_c9: 'fear stage 2: batch 256, lr 0.1,
epochs 5, stage 1: batch 256, thresh 0.6, frozen till: cell 9' # ft_fb256_ftlr0.1_fte10_ct256_ftt0.6_c9: 'fear stage 2: batch 256, lr 0.1,
epochs 10, stage 1: batch 256, thresh 0.6, frozen till: cell 9' + ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze: 'ft_fb1024_ftlr0.1_fte5_ct256_ftt0.6_nofreeze' zero_cost: