I have this equation with , y is observation and f is latent. I write this new Likelihood to compute in my model:
class ExpGaussian(likelihoods.ScalarLikelihood):
def Y_given_F(self, F):
mu = tf.math.exp(F)
sigma = tf.math.sqrt(4*mu)
return tfd.Normal(mu, sigma)
@inherit_check_shapes
def _scalar_log_prob(self, X: TensorType, F: TensorType, Y: TensorType) -> tf.Tensor:
# prob = tfd.Normal(tf.math.exp(F), 4*tf.math.exp(F))
# return prob.log_prob(Y)
return self.Y_given_F(F).log_prob(Y)
@inherit_check_shapes
def _conditional_mean(self, X: TensorType, F: TensorType) -> tf.Tensor:
# return tf.math.exp(F)
return self.Y_given_F(F).mean()
@inherit_check_shapes
def _conditional_variance(self, X: TensorType, F: TensorType) -> tf.Tensor:
# return 4*tf.math.exp(F)
return self.Y_given_F(F).variance()
My input X is [[0],[1],[2],...,[250]] and Y is its corresponding value, which looklike this After optimize, I predict value of mean Y in 20 steps ahead and got this: It's exponentially growth up! So is that normal or my model has any problem? Thanks a lot!
P/S: Here is colab link to my code https://colab.research.google.com/drive/1r9zpJhGwCV3qIybrJDC5SzZTurvEsfxG?usp=sharing