From 9720b4e626209d8c212e6af0c2182d8f97016739 Mon Sep 17 00:00:00 2001 From: tibuch Date: Tue, 6 Apr 2021 13:55:15 +0200 Subject: [PATCH] Remove outdated scripts. --- scripts/LoDoPaB_config.json | 13 ----- scripts/SRes_Celeb.py | 74 ------------------------- scripts/SRes_MNIST.py | 74 ------------------------- scripts/TRec_LoDoPaB.py | 107 ------------------------------------ scripts/TRec_MNIST.py | 106 ----------------------------------- scripts/sres_config.json | 9 --- scripts/trec_config.json | 15 ----- 7 files changed, 398 deletions(-) delete mode 100644 scripts/LoDoPaB_config.json delete mode 100755 scripts/SRes_Celeb.py delete mode 100755 scripts/SRes_MNIST.py delete mode 100755 scripts/TRec_LoDoPaB.py delete mode 100755 scripts/TRec_MNIST.py delete mode 100644 scripts/sres_config.json delete mode 100644 scripts/trec_config.json diff --git a/scripts/LoDoPaB_config.json b/scripts/LoDoPaB_config.json deleted file mode 100644 index e684f12..0000000 --- a/scripts/LoDoPaB_config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "batch_size": 2, - "num_angles": 40, - "n_heads": 8, - "d_query": 32, - "init_bin_factor": 8, - "bin_factor_cd": 5, - "alpha": 1.5, - "lr": 0.0001, - "attention_type": "linear", - "n_layers": 4, - "max_epochs": 1000 -} \ No newline at end of file diff --git a/scripts/SRes_Celeb.py b/scripts/SRes_Celeb.py deleted file mode 100755 index 2848fed..0000000 --- a/scripts/SRes_Celeb.py +++ /dev/null @@ -1,74 +0,0 @@ -import argparse -import glob -import json -from os.path import exists - -from pytorch_lightning import Trainer, seed_everything -from pytorch_lightning.callbacks import ModelCheckpoint - -from fit.datamodules.super_res.SResDataModule import CelebASResFourierTargetDataModule -from fit.modules import SResTransformerModule - - -def main(): - seed_everything(28122020) - - parser = argparse.ArgumentParser(description="") - parser.add_argument("--exp_config") - - args = parser.parse_args() - - with open(args.exp_config) as f: - conf = json.load(f) - - dm = CelebASResFourierTargetDataModule(root_dir=conf['root_dir'], batch_size=conf['batch_size']) - dm.setup() - - model = SResTransformerModule(d_model=conf['n_heads'] * conf['d_query'], - img_shape=dm.IMG_SHAPE, - lr=conf['lr'], weight_decay=0.01, n_layers=conf['n_layers'], - n_heads=conf['n_heads'], d_query=conf['d_query'], dropout=0.1, attention_dropout=0.1) - - if exists('lightning_logs'): - print('Some experiments already exist. Abort.') - return 0 - - trainer = Trainer(max_epochs=conf['max_epochs'], - gpus=1, - checkpoint_callback=ModelCheckpoint( - filepath=None, - save_top_k=1, - verbose=False, - save_last=True, - monitor='Train/avg_val_loss', - mode='min', - prefix='best_val_loss_' - ), - deterministic=True) - - trainer.fit(model, datamodule=dm); - - model = SResTransformerModule.load_from_checkpoint('lightning_logs/version_0/checkpoints/best_val_loss_-last.ckpt') - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('last_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - best_path = glob.glob('lightning_logs/version_0/checkpoints/best_val_loss_-epoch*')[0] - model = SResTransformerModule.load_from_checkpoint(best_path) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('best_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - -if __name__ == "__main__": - main() diff --git a/scripts/SRes_MNIST.py b/scripts/SRes_MNIST.py deleted file mode 100755 index 11fbec0..0000000 --- a/scripts/SRes_MNIST.py +++ /dev/null @@ -1,74 +0,0 @@ -import argparse -import glob -import json -from os.path import exists - -from pytorch_lightning import Trainer, seed_everything -from pytorch_lightning.callbacks import ModelCheckpoint - -from fit.datamodules.super_res import MNISTSResFourierTargetDataModule -from fit.modules import SResTransformerModule - - -def main(): - seed_everything(28122020) - - parser = argparse.ArgumentParser(description="") - parser.add_argument("--exp_config") - - args = parser.parse_args() - - with open(args.exp_config) as f: - conf = json.load(f) - - dm = MNISTSResFourierTargetDataModule(root_dir=conf['root_dir'], batch_size=conf['batch_size']) - dm.setup() - - model = SResTransformerModule(d_model=conf['n_heads'] * conf['d_query'], - img_shape=dm.IMG_SHAPE, - lr=conf['lr'], weight_decay=0.01, n_layers=conf['n_layers'], - n_heads=conf['n_heads'], d_query=conf['d_query'], dropout=0.1, attention_dropout=0.1) - - if exists('lightning_logs'): - print('Some experiments already exist. Abort.') - return 0 - - trainer = Trainer(max_epochs=conf['max_epochs'], - gpus=1, - checkpoint_callback=ModelCheckpoint( - filepath=None, - save_top_k=1, - verbose=False, - save_last=True, - monitor='Train/avg_val_loss', - mode='min', - prefix='best_val_loss_' - ), - deterministic=True) - - trainer.fit(model, datamodule=dm); - - model = SResTransformerModule.load_from_checkpoint('lightning_logs/version_0/checkpoints/best_val_loss_-last.ckpt') - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('last_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - best_path = glob.glob('lightning_logs/version_0/checkpoints/best_val_loss_-epoch*')[0] - model = SResTransformerModule.load_from_checkpoint(best_path) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('best_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - -if __name__ == "__main__": - main() diff --git a/scripts/TRec_LoDoPaB.py b/scripts/TRec_LoDoPaB.py deleted file mode 100755 index cdd9fa3..0000000 --- a/scripts/TRec_LoDoPaB.py +++ /dev/null @@ -1,107 +0,0 @@ -import argparse -import glob -import json -from os.path import exists - -from pytorch_lightning import Trainer, seed_everything -from pytorch_lightning.callbacks import ModelCheckpoint - -from fit.datamodules.tomo_rec import MNISTTomoFourierTargetDataModule -from fit.datamodules.tomo_rec.TRecDataModule import LoDoPaBFourierTargetDataModule -from fit.modules import TRecTransformerModule -from fit.utils.tomo_utils import get_proj_coords, get_img_coords - - -def main(): - seed_everything(22122020) - - parser = argparse.ArgumentParser(description="") - parser.add_argument("--exp_config") - - args = parser.parse_args() - - with open(args.exp_config) as f: - conf = json.load(f) - - dm = LoDoPaBFourierTargetDataModule(batch_size=conf['batch_size'], - num_angles=conf['num_angles']) - dm.setup() - - det_len = dm.gt_ds.get_ray_trafo().geometry.detector.shape[0] - - proj_xcoords, proj_ycoords, src_flatten = get_proj_coords(angles=dm.gt_ds.get_ray_trafo().geometry.angles, - det_len=det_len) - target_xcoords, target_ycoords, dst_flatten, order = get_img_coords(img_shape=dm.IMG_SHAPE, det_len=det_len) - - model = TRecTransformerModule(d_model=conf['n_heads'] * conf['d_query'], - y_coords_proj=proj_ycoords, x_coords_proj=proj_xcoords, - y_coords_img=target_ycoords, x_coords_img=target_xcoords, - src_flatten_coords=src_flatten, dst_flatten_coords=dst_flatten, - dst_order=order, - angles=dm.gt_ds.get_ray_trafo().geometry.angles, img_shape=dm.IMG_SHAPE, - detector_len=det_len, - init_bin_factor=conf['init_bin_factor'], bin_factor_cd=conf['bin_factor_cd'], - alpha=conf['alpha'], - lr=conf['lr'], weight_decay=0.01, - attention_type=conf['attention_type'], n_layers=conf['n_layers'], - n_heads=conf['n_heads'], d_query=conf['d_query'], dropout=0.1, attention_dropout=0.1) - - if exists('lightning_logs'): - print('Some experiments already exist. Abort.') - return 0 - - trainer = Trainer(max_epochs=conf['max_epochs'], - gpus=1, - checkpoint_callback=ModelCheckpoint( - filepath=None, - save_top_k=1, - verbose=False, - save_last=True, - monitor='Train/avg_val_mse', - mode='min', - prefix='best_val_loss_' - ), - deterministic=True) - - trainer.fit(model, datamodule=dm); - - model = TRecTransformerModule.load_from_checkpoint('lightning_logs/version_0/checkpoints/best_val_loss_-last.ckpt', - y_coords_proj=model.y_coords_proj, - x_coords_proj=model.x_coords_proj, - y_coords_img=model.y_coords_img, - x_coords_img=model.x_coords_img, - angles=model.angles, - src_flatten_coords=model.src_flatten_coords, - dst_flatten_coords=model.dst_flatten_order, - dst_order=model.dst_order) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('last_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - best_path = glob.glob('lightning_logs/version_0/checkpoints/best_val_loss_-epoch*')[0] - model = TRecTransformerModule.load_from_checkpoint(best_path, - y_coords_proj=model.y_coords_proj, - x_coords_proj=model.x_coords_proj, - y_coords_img=model.y_coords_img, - x_coords_img=model.x_coords_img, - angles=model.angles, - src_flatten_coords=model.src_flatten_coords, - dst_flatten_coords=model.dst_flatten_order, - dst_order=model.dst_order) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('best_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - -if __name__ == "__main__": - main() diff --git a/scripts/TRec_MNIST.py b/scripts/TRec_MNIST.py deleted file mode 100755 index 55d0a1a..0000000 --- a/scripts/TRec_MNIST.py +++ /dev/null @@ -1,106 +0,0 @@ -import argparse -import glob -import json -from os.path import exists - -from pytorch_lightning import Trainer, seed_everything -from pytorch_lightning.callbacks import ModelCheckpoint - -from fit.datamodules.tomo_rec import MNISTTomoFourierTargetDataModule -from fit.modules import TRecTransformerModule -from fit.utils.tomo_utils import get_proj_coords, get_img_coords - - -def main(): - seed_everything(28122020) - - parser = argparse.ArgumentParser(description="") - parser.add_argument("--exp_config") - - args = parser.parse_args() - - with open(args.exp_config) as f: - conf = json.load(f) - - dm = MNISTTomoFourierTargetDataModule(root_dir=conf['root_dir'], batch_size=conf['batch_size'], - num_angles=conf['num_angles'], inner_circle=conf['inner_circle']) - dm.setup() - - det_len = dm.gt_ds.get_ray_trafo().geometry.detector.shape[0] - - proj_xcoords, proj_ycoords, src_flatten = get_proj_coords(angles=dm.gt_ds.get_ray_trafo().geometry.angles, - det_len=det_len) - target_xcoords, target_ycoords, dst_flatten, order = get_img_coords(img_shape=dm.IMG_SHAPE, det_len=det_len) - - model = TRecTransformerModule(d_model=conf['n_heads'] * conf['d_query'], - y_coords_proj=proj_ycoords, x_coords_proj=proj_xcoords, - y_coords_img=target_ycoords, x_coords_img=target_xcoords, - src_flatten_coords=src_flatten, dst_flatten_coords=dst_flatten, - dst_order=order, - angles=dm.gt_ds.get_ray_trafo().geometry.angles, img_shape=dm.IMG_SHAPE, - detector_len=det_len, - init_bin_factor=conf['init_bin_factor'], bin_factor_cd=conf['bin_factor_cd'], - alpha=conf['alpha'], - lr=conf['lr'], weight_decay=0.01, - attention_type=conf['attention_type'], n_layers=conf['n_layers'], - n_heads=conf['n_heads'], d_query=conf['d_query'], dropout=0.1, attention_dropout=0.1) - - if exists('lightning_logs'): - print('Some experiments already exist. Abort.') - return 0 - - trainer = Trainer(max_epochs=conf['max_epochs'], - gpus=1, - checkpoint_callback=ModelCheckpoint( - filepath=None, - save_top_k=1, - verbose=False, - save_last=True, - monitor='Train/avg_val_mse', - mode='min', - prefix='best_val_loss_' - ), - deterministic=True) - - trainer.fit(model, datamodule=dm); - - model = TRecTransformerModule.load_from_checkpoint('lightning_logs/version_0/checkpoints/best_val_loss_-last.ckpt', - y_coords_proj=model.y_coords_proj, - x_coords_proj=model.x_coords_proj, - y_coords_img=model.y_coords_img, - x_coords_img=model.x_coords_img, - angles=model.angles, - src_flatten_coords=model.src_flatten_coords, - dst_flatten_coords=model.dst_flatten_order, - dst_order=model.dst_order) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('last_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - best_path = glob.glob('lightning_logs/version_0/checkpoints/best_val_loss_-epoch*')[0] - model = TRecTransformerModule.load_from_checkpoint(best_path, - y_coords_proj=model.y_coords_proj, - x_coords_proj=model.x_coords_proj, - y_coords_img=model.y_coords_img, - x_coords_img=model.x_coords_img, - angles=model.angles, - src_flatten_coords=model.src_flatten_coords, - dst_flatten_coords=model.dst_flatten_order, - dst_order=model.dst_order) - - test_res = trainer.test(model, datamodule=dm)[0] - out_res = { - "Mean PSNR": test_res["Mean PSNR"].item(), - "SEM PSNR": test_res["SEM PSNR"].item() - } - with open('best_ckpt_results.json', 'w') as f: - json.dump(out_res, f) - - -if __name__ == "__main__": - main() diff --git a/scripts/sres_config.json b/scripts/sres_config.json deleted file mode 100644 index fdb1c40..0000000 --- a/scripts/sres_config.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "root_dir": "/home/tibuch/Data/mnist", - "batch_size": 4, - "n_heads": 8, - "d_query": 16, - "lr": 0.0001, - "n_layers": 8, - "max_epochs": 100 -} \ No newline at end of file diff --git a/scripts/trec_config.json b/scripts/trec_config.json deleted file mode 100644 index b92276f..0000000 --- a/scripts/trec_config.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "root_dir": "/home/tibuch/Data/mnist", - "batch_size": 4, - "num_angles": 15, - "inner_circle": true, - "n_heads": 8, - "d_query": 16, - "init_bin_factor": 3, - "bin_factor_cd": 5, - "alpha": 1.5, - "lr": 0.0001, - "attention_type": "linear", - "n_layers": 8, - "max_epochs": 100 -} \ No newline at end of file