Skip to content

Commit f711528

Browse files
committed
refactored for compatibility with new gpflow!
1 parent a0f3bc1 commit f711528

14 files changed

+175
-168
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ before_install:
1111

1212
install:
1313
- pip install numpy scipy pandas pytest pytest-cov codecov
14-
- pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.5.1-cp36-cp36m-linux_x86_64.whl
14+
- pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.8.0-cp36-cp36m-linux_x86_64.whl
1515
- pip install git+https://github.com/GPflow/GPflow
1616
- python setup.py install
1717

dgplib/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import absolute_import
22

33
from . import layers
4-
from . import models
4+
from . import cascade
55
from . import multikernel_layers
66
from . import utils
77
from . import specialized_kernels
File renamed without changes.

dgplib/dsdgp.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import numpy as np
21
import tensorflow as tf
32

43
from gpflow import settings
@@ -31,7 +30,7 @@ class DSDGP(Model):
3130
"""
3231
@defer_build()
3332
def __init__(self, X, Y, Z, layers, likelihood,
34-
num_latent_Y=None,
33+
num_latent=None,
3534
minibatch_size=None,
3635
num_samples=1,
3736
mean_function=Zero(),
@@ -58,7 +57,9 @@ def __init__(self, X, Y, Z, layers, likelihood,
5857

5958
self.num_data, D_X = X.shape
6059
self.num_samples = num_samples
61-
self.D_Y = num_latent_Y or Y.shape[1]
60+
self.D_Y = num_latent or Y.shape[1]
61+
62+
self.mean_function = mean_function
6263

6364
layers.initialize_params(X, Z)#Maybe add initialization method for model
6465
if layers._initialized == True:

dgplib/layers.py

Lines changed: 32 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010
from gpflow.kullback_leiblers import gauss_kl
1111
from gpflow.mean_functions import Linear, Zero
1212
from gpflow.params import Parameter, Parameterized, ParamList
13+
from gpflow.features import inducingpoint_wrapper
1314

14-
from .utils import shape_as_list
1515

1616
class Layer(Parameterized):
1717
"""
@@ -34,18 +34,18 @@ def __init__(self, input_dim, output_dim, num_inducing, kernel,
3434
self.output_dim = output_dim
3535
self.num_inducing = num_inducing
3636
if multitask:
37-
self.Z = Parameter(np.zeros((self.num_inducing, self.input_dim+1)),
38-
fix_shape=True)
37+
Z = np.zeros((self.num_inducing, self.input_dim + 1))
3938
else:
40-
self.Z = Parameter(np.zeros((self.num_inducing, self.input_dim)),
41-
fix_shape=True)
39+
Z = np.zeros((self.num_inducing, self.input_dim))
40+
41+
self.feature = inducingpoint_wrapper(None, Z)
4242

4343
if isinstance(kernel, list):
4444
self.kernel = ParamList(kernel)
4545
else:
4646
self.kernel = kernel
4747

48-
self.mean_function = mean_function or Zero()
48+
self.mean_function = mean_function or Zero(output_dim=self.output_dim)
4949

5050
shape = (self.num_inducing, self.output_dim)
5151

@@ -63,10 +63,7 @@ def build_prior_KL(self, K):
6363
def _build_predict(self, Xnew, full_cov=False, stochastic=True):
6464
# Credits to High Salimbeni for this (@hughsalimbeni)
6565
def f_conditional(Xnew, full_cov=False):
66-
mean, var = conditional(Xnew=Xnew,
67-
X=self.Z,
68-
kern=self.kernel,
69-
f=self.q_mu,
66+
mean, var = conditional(Xnew, self.feature, self.kernel, self.q_mu,
7067
q_sqrt=self.q_sqrt,
7168
full_cov=full_cov,
7269
white=True)
@@ -75,14 +72,17 @@ def f_conditional(Xnew, full_cov=False):
7572

7673
def multisample_conditional(Xnew, full_cov=False):
7774
if full_cov:
78-
f = lambda a: f_conditional(a, full_cov=full_cov)
75+
def f(a):
76+
m, v = f_conditional(a, full_cov=full_cov)
77+
return m, tf.transpose(v)
78+
#f = lambda a: f_conditional(a, full_cov=full_cov)
7979
mean, var = tf.map_fn(f, Xnew, dtype=(settings.tf_float,
80-
settings.tf_float))
80+
settings.tf_float))
8181
return tf.stack(mean), tf.stack(var)
8282
else:
83-
#S, N, D = shape_as_list(Xnew)
83+
# S, N, D = shape_as_list(Xnew)
8484
s = tf.shape(Xnew)
85-
X_flat = tf.reshape(Xnew, [s[0]*s[1], s[2]])
85+
X_flat = tf.reshape(Xnew, [s[0] * s[1], s[2]])
8686
mean, var = f_conditional(X_flat)
8787
return [tf.reshape(m, [s[0], s[1], -1]) for m in [mean, var]]
8888

@@ -93,6 +93,7 @@ def multisample_conditional(Xnew, full_cov=False):
9393

9494
return mean, var
9595

96+
9697
def find_weights(input_dim, output_dim, X, multitask=False):
9798
"""
9899
Find the initial weights of the Linear mean function based on
@@ -104,7 +105,7 @@ def find_weights(input_dim, output_dim, X, multitask=False):
104105

105106
elif input_dim > output_dim:
106107
if multitask:
107-
_, _, V = np.linalg.svd(X[:,:-1], full_matrices=False)
108+
_, _, V = np.linalg.svd(X[:, :-1], full_matrices=False)
108109
else:
109110
_, _, V = np.linalg.svd(X, full_matrices=False)
110111
W = V[:output_dim, :].T
@@ -115,15 +116,17 @@ def find_weights(input_dim, output_dim, X, multitask=False):
115116
W = np.concatenate([I, zeros], 1)
116117

117118
if multitask:
118-
W = np.concatenate([W, np.zeros((1,W.shape[1]))], axis=0)
119+
W = np.concatenate([W, np.zeros((1, W.shape[1]))], axis=0)
119120

120121
return W
121122

123+
122124
class InputMixin(object):
123125
"""
124126
Mixin class for input layers. Implements a single method to compute the
125127
value of the inputs and inducing inputs for the next layer.
126128
"""
129+
127130
def compute_inputs(self, X, Z, multitask=False):
128131
W = find_weights(self.input_dim, self.output_dim, X, multitask)
129132

@@ -132,44 +135,49 @@ def compute_inputs(self, X, Z, multitask=False):
132135

133136
return X_running, Z_running, W
134137

138+
135139
class HiddenMixin(object):
136140
"""
137141
Mixin class for hidden layers. Implements a single method to compute the
138142
value of the inputs and inducing inputs for the next layer.
139143
"""
144+
140145
def compute_inputs(self, X, Z, multitask=False):
141146
W = find_weights(self.input_dim, self.output_dim, X, multitask)
142147

143-
if isinstance(self.Z, ParamList):
144-
Z_running = self.Z[0].value.copy().dot(W)
148+
if isinstance(self.feature, ParamList):
149+
Z_running = self.feature[0].Z.value.copy().dot(W)
145150
else:
146-
Z_running = self.Z.value.copy().dot(W)
151+
Z_running = self.feature.Z.value.copy().dot(W)
147152

148153
X_running = X.copy().dot(W)
149154

150155
return X_running, Z_running, W
151156

157+
152158
class OutputMixin(object):
153159
"""
154160
Mixin class for output layers. Does not implement any methods. Only used
155161
for type checking.
156162
"""
163+
157164
def compute_inputs(self, X, Z, multitask=False):
158165
W = find_weights(self.input_dim, self.output_dim, X, multitask)
159166

160-
Z_running = self.Z.value.copy().dot(W)
167+
Z_running = self.feature.Z.value.copy().dot(W)
161168
X_running = X.copy().dot(W)
162169

163170
return X_running, Z_running, W
164171

172+
165173
class InputLayer(Layer, InputMixin):
166174
@defer_build()
167175
def initialize_forward(self, X, Z, multitask=False):
168176
"""
169177
Initialize Layer and Propagate values of inputs and inducing inputs
170178
forward
171179
"""
172-
self.Z.assign(Z)
180+
self.feature.Z.assign(Z)
173181

174182
X_running, Z_running, W = self.compute_inputs(X, Z, multitask)
175183

@@ -187,7 +195,7 @@ def initialize_forward(self, X, Z, multitask=False):
187195
Initialize Layer and Propagate values of inputs and inducing inputs
188196
forward
189197
"""
190-
self.Z.assign(Z)
198+
self.feature.Z.assign(Z)
191199

192200
X_running, Z_running, W = self.compute_inputs(X, Z, multitask)
193201

@@ -197,6 +205,7 @@ def initialize_forward(self, X, Z, multitask=False):
197205

198206
return X_running, Z_running
199207

208+
200209
class OutputLayer(Layer, OutputMixin):
201210
@defer_build()
202211
def initialize_forward(self, X, Z, multitask=False):
@@ -205,5 +214,5 @@ def initialize_forward(self, X, Z, multitask=False):
205214
forward
206215
"""
207216

208-
self.Z.assign(Z)
217+
self.feature.Z.assign(Z)
209218
return (None, None)

dgplib/multikernel_layers.py

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@
77

88
from gpflow.conditionals import conditional
99
from gpflow.decors import params_as_tensors, autoflow, defer_build
10+
from gpflow.features import inducingpoint_wrapper
1011
from gpflow.kullback_leiblers import gauss_kl
1112
from gpflow.mean_functions import Linear, Zero
1213
from gpflow.params import Parameter, Parameterized, ParamList
1314

14-
from .layers import Layer, find_weights
15+
from .layers import Layer
1516
from .layers import InputMixin, HiddenMixin, OutputMixin
16-
from .utils import shape_as_list
1717

1818
class MultikernelLayer(Layer):
1919
"""
@@ -40,12 +40,13 @@ def __init__(self, input_dim, output_dim, num_inducing, kernel_list,
4040
self.offset = int(self.output_dim/self.num_kernels)
4141

4242
if not self._shared_Z:
43-
del self.Z
43+
del self.feature
4444
if multitask:
4545
Z = np.zeros((self.num_inducing, self.input_dim+1))
4646
else:
4747
Z = np.zeros((self.num_inducing, self.input_dim))
48-
self.Z = ParamList([Parameter(Z.copy()) for _ in range(self.num_kernels)])
48+
49+
self.feature = ParamList([inducingpoint_wrapper(None, Z.copy()) for _ in range(self.num_kernels)])
4950

5051

5152
@params_as_tensors
@@ -67,19 +68,21 @@ def f_conditional(Xnew, full_cov=False):
6768
mean = []
6869
var = []
6970
if self._shared_Z:
70-
Zs = [self.Z for _ in range(self.num_kernels)]
71+
feats = [self.feature for _ in range(self.num_kernels)]
7172
else:
72-
Zs = self.Z
73-
for i, (k, Z) in enumerate(zip(self.kernel, Zs)):
74-
m, v = conditional(Xnew=Xnew,
75-
X=Z,
76-
kern=k,
77-
f=self.q_mu[:,(i*self.offset):((i+1)*self.offset)],
73+
feats = [feat for feat in self.feature]
74+
for i, (k, feat) in enumerate(zip(self.kernel, feats)):
75+
m, v = conditional(Xnew, feat, k, self.q_mu[:,(i*self.offset):((i+1)*self.offset)],
7876
q_sqrt=self.q_sqrt[(i*self.offset):((i+1)*self.offset),:,:,],
7977
full_cov=full_cov,
8078
white=True)
8179
mean.append(m)
82-
var.append(v)
80+
81+
#temporary fix
82+
if full_cov:
83+
var.append(tf.transpose(v))
84+
else:
85+
var.append(v)
8386

8487
mean = tf.concat(mean, axis=-1) #NxK
8588
var = tf.concat(var, axis=-1) #NxK or NxNxK
@@ -114,10 +117,10 @@ def initialize_forward(self, X, Z, multitask=False):
114117
forward
115118
"""
116119
if self._shared_Z:
117-
self.Z.assign(Z)
120+
self.feature.Z.assign(Z)
118121
else:
119-
for Z_current in self.Z:
120-
Z_current.assign(Z)
122+
for feat in self.feature:
123+
feat.Z.assign(Z)
121124

122125
X_running, Z_running, W = self.compute_inputs(X, Z, multitask)
123126

@@ -136,10 +139,10 @@ def initialize_forward(self, X, Z, multitask=False):
136139
forward
137140
"""
138141
if self._shared_Z:
139-
self.Z.assign(Z)
142+
self.feature.Z.assign(Z)
140143
else:
141-
for Z_current in self.Z:
142-
Z_current.assign(Z)
144+
for feat in self.feature:
145+
feat.Z.assign(Z)
143146

144147
X_running, Z_running, W = self.compute_inputs(X, Z, multitask)
145148

@@ -158,9 +161,9 @@ def initialize_forward(self, X, Z, multitask=False):
158161
"""
159162

160163
if self._shared_Z:
161-
self.Z.assign(Z)
164+
self.feature.Z.assign(Z)
162165
else:
163-
for Z_current in self.Z:
164-
Z_current.assign(Z)
166+
for feat in self.feature:
167+
feat.Z.assign(Z)
165168

166169
return (None, None)

dgplib/multitask_dsdgp.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,6 @@
1-
import numpy as np
21
import tensorflow as tf
32

4-
from gpflow import settings
5-
63
from gpflow.decors import autoflow, defer_build, params_as_tensors
7-
from gpflow.mean_functions import Zero
8-
from gpflow.models import Model
9-
from gpflow.params import DataHolder, Minibatch
104

115
from .dsdgp import DSDGP
126
from .utils import normal_sample, tile_over_samples
@@ -30,4 +24,3 @@ def _propagate(self, Xnew, full_cov=False, num_samples=1):
3024
Fvars.append(var)
3125

3226
return Fs[1:], Fmeans, Fvars
33-

0 commit comments

Comments
 (0)