# Simple Variational GP Classification with PyroΒΆ

[1]:

import math
import torch
import gpytorch
import pyro
from matplotlib import pyplot as plt

%matplotlib inline

[2]:

train_x = torch.linspace(0, 1, 50)
train_y = torch.sign(torch.cos(train_x * (4 * math.pi))).add(1).div(2)

[3]:

from gpytorch.variational import CholeskyVariationalDistribution, VariationalStrategy
from gpytorch.models import PyroGP

class PyroGPClassificationModel(PyroGP):
def __init__(self, likelihood, inducing_points):
variational_distribution = CholeskyVariationalDistribution(inducing_points.size(0))
variational_strategy = VariationalStrategy(self, inducing_points, variational_distribution)
super(PyroGPClassificationModel, self).__init__(
variational_strategy, likelihood, num_data=train_y.numel(), name_prefix="basic_gp_test"
)
self.mean_module = gpytorch.means.ZeroMean()
self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.MaternKernel(nu=0.5))

def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
latent_pred = gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
return latent_pred

# Initialize model and likelihood
likelihood = gpytorch.likelihoods.BernoulliLikelihood()
model = PyroGPClassificationModel(likelihood, train_x)

[4]:

from pyro import optim
from pyro import infer

elbo = infer.Trace_ELBO(num_particles=64, vectorize_particles=True)
svi = infer.SVI(model.model, model.guide, optimizer, elbo)

[5]:

num_epochs = 100

for i in range(num_epochs):
loss = svi.step(train_x, train_y)
if not (i + 1) % 10:
print('Iteration {}, Loss = {}'.format(i + 1, loss))

Iteration 10, Loss = 69.04036617279053
Iteration 20, Loss = 62.06441354751587
Iteration 30, Loss = 67.1903223991394
Iteration 40, Loss = 60.96347999572754
Iteration 50, Loss = 58.23017120361328
Iteration 60, Loss = 54.84669291973114
Iteration 70, Loss = 59.56512427330017
Iteration 80, Loss = 57.56812238693237
Iteration 90, Loss = 59.487281799316406
Iteration 100, Loss = 58.22996139526367

[6]:

model.eval()
test_x = torch.linspace(0, 1, 200)
pred_dist = model(test_x)

[7]:

pred_y = likelihood(pred_dist)

[8]:

# Initialize fig and axes for plot
f, ax = plt.subplots(1, 1, figsize=(4, 3))
ax.plot(train_x.numpy(), train_y.numpy(), 'k*')
# Get the predicted labels (probabilites of belonging to the positive class)
# Transform these probabilities to be 0/1 labels
pred_labels = pred_y.mean.ge(0.5).float()
ax.plot(test_x.numpy(), pred_labels.numpy(), 'b')
ax.set_ylim([-1, 2])
ax.legend(['Observed Data', 'Mean', 'Confidence'])

[8]:

<matplotlib.legend.Legend at 0x7f7c540734e0>

[ ]: