-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy patheval.py
58 lines (47 loc) · 2.03 KB
/
eval.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import os
import torch
import parser
import logging
from os.path import join
from datetime import datetime
import test
import util
import commons
import datasets_ws
import network
import warnings
warnings.filterwarnings("ignore")
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
######################################### SETUP #########################################
args = parser.parse_arguments()
start_time = datetime.now()
args.save_dir = join("test", args.save_dir, start_time.strftime('%Y-%m-%d_%H-%M-%S'))
commons.setup_logging(args.save_dir)
commons.make_deterministic(args.seed)
args.features_dim = 14*768
if args.eval_dataset_name.startswith("pitts"): # set infer_batch_size = 8 for pitts30k/pitts250k
args.infer_batch_size = args.infer_batch_size // 2
logging.info(f"Arguments: {args}")
logging.info(f"The outputs are being saved in {args.save_dir}")
######################################### MODEL #########################################
model = network.CricaVPRNet()
model = model.to(args.device)
if args.resume is not None:
logging.info(f"Resuming model from {args.resume}")
model = util.resume_model(args, model)
# Enable DataParallel after loading checkpoint, otherwise doing it before
# would append "module." in front of the keys of the state dict triggering errors
model = torch.nn.DataParallel(model)
if args.pca_dim is None:
pca = None
else:
full_features_dim = args.features_dim
args.features_dim = args.pca_dim
pca = util.compute_pca(args, model, args.pca_dataset_folder, full_features_dim)
######################################### DATASETS #########################################
test_ds = datasets_ws.BaseDataset(args, args.eval_datasets_folder, args.eval_dataset_name, "test")
logging.info(f"Test set: {test_ds}")
######################################### TEST on TEST SET #########################################
recalls, recalls_str = test.test(args, test_ds, model, args.test_method, pca)
logging.info(f"Recalls on {test_ds}: {recalls_str}")
logging.info(f"Finished in {str(datetime.now() - start_time)[:-7]}")