forked from artemyk/ibsgd
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkde.py
42 lines (35 loc) · 1.51 KB
/
kde.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import keras
import keras.backend as K
import numpy as np
def Kget_dists(X):
"""Keras code to compute the pairwise distance matrix for a set of
vectors specifie by the matrix X.
"""
x2 = K.expand_dims(K.sum(K.square(X), axis=1), 1)
dists = x2 + K.transpose(x2) - 2*K.dot(X, K.transpose(X))
return dists
def get_shape(x):
dims = K.cast( K.shape(x)[1], K.floatx() )
N = K.cast( K.shape(x)[0], K.floatx() )
return dims, N
def entropy_estimator_kl(x, var):
# KL-based upper bound on entropy of mixture of Gaussians with covariance matrix var * I
# see Kolchinsky and Tracey, Estimating Mixture Entropy with Pairwise Distances, Entropy, 2017. Section 4.
# and Kolchinsky and Tracey, Nonlinear Information Bottleneck, 2017. Eq. 10
dims, N = get_shape(x)
dists = Kget_dists(x)
dists2 = dists / (2*var)
normconst = (dims/2.0)*K.log(2*np.pi*var)
lprobs = K.logsumexp(-dists2, axis=1) - K.log(N) - normconst
h = -K.mean(lprobs)
return dims/2 + h
def entropy_estimator_bd(x, var):
# Bhattacharyya-based lower bound on entropy of mixture of Gaussians with covariance matrix var * I
# see Kolchinsky and Tracey, Estimating Mixture Entropy with Pairwise Distances, Entropy, 2017. Section 4.
dims, N = get_shape(x)
val = entropy_estimator_kl(x,4*var)
return val + np.log(0.25)*dims/2
def kde_condentropy(output, var):
# Return entropy of a multivariate Gaussian, in nats
dims = output.shape[1]
return (dims/2.0)*(np.log(2*np.pi*var) + 1)