forked from karpathy/nanoGPT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcallback.py
54 lines (43 loc) · 1.52 KB
/
callback.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import keras as K
try:
import wandb
except: # Don't crash if wandb is not available, not a problem as long as config.do_wandb is False
pass
class AddLRCallback(K.callbacks.Callback):
def __init__(self, optimizer):
self.optimizer = optimizer
def on_batch_end(self, batch, logs=None):
logs['lr'] = self.optimizer.learning_rate
class EvaluateCallback(K.callbacks.Callback):
def __init__(self, config, val_dataset, n_step_val):
self.config = config
self.val_dataset = val_dataset
self.n_step_val = n_step_val
def on_batch_end(self, batch, logs=None):
if batch % self.config.do_eval_every == 0:
loss, accuracy = self.model.evaluate(
self.val_dataset,
batch_size=self.config.batch_size,
steps=self.n_step_val,
verbose=0
)
logs["val_loss"] = loss
logs["val_acc"] = accuracy
class WandbCallback(K.callbacks.Callback):
def __init__(self, n_step_epoch):
self.offset = 0
self.batch = 0
self.n_step_epoch = n_step_epoch
def on_batch_end(self, batch, logs=None):
self.batch = batch
wandb.log(logs, step=self.offset + batch)
def on_epoch_end(self, epoch, logs=None):
self.offset += self.n_step_epoch
def on_test_end(self, logs=None):
wandb.log(
{
"val_loss": logs['loss'],
"val_acc": logs['acc'],
},
step=self.offset + self.batch
)