How to learn one commun Gaussian Process for all the features in gpytorch

104 views Asked by At

I am trying to re-implement the example in this website: Where they have the code:

class DKLModel(gpytorch.Module):

    def __init__(self, feature_extractor, num_dim, grid_bounds=(-10., 10.)):
        super(DKLModel, self).__init__()
        self.feature_extractor = feature_extractor
        self.gp_layer = GaussianProcessLayer(num_dim=num_dim, grid_bounds=grid_bounds)
        self.grid_bounds = grid_bounds
        self.num_dim = num_dim

    def forward(self, x):
        features = self.feature_extractor(x)
        features = gpytorch.utils.grid.scale_to_bounds(features, self.grid_bounds[0], self.grid_bounds[1])
        # This next line makes it so that we learn a GP for each feature
        features = features.transpose(-1, -2).unsqueeze(-1)
        res = self.gp_layer(features)
        return res

model = DKLModel(feature_extractor, num_dim=num_features)
likelihood = gpytorch.likelihoods.SoftmaxLikelihood(num_features=model.num_dim, num_classes=num_classes)


if torch.cuda.is_available():
    model = model.cuda()
    likelihood = likelihood.cuda()

They mentioned that they learn one GP for every feature, but I would like to have the same gaussian for all the features. Anyone knows how to do that ?

0

There are 0 answers