I am getting incorrect results on this simple model:
import pyro, numpy as np, torch, pyro.distributions as dist, torch.nn as nn
from pyro.optim import Adam
import torch.distributions.constraints as constraints
from pyro.infer import SVI
from pyro.infer import Trace_ELBO
from pyro.contrib.autoguide import *
y= np.array([28.0, 26.0, 33.0, 24.0, 34.0, 44.0, 27.0, 16.0, 40.0, 2.0, 29.0, 22.0, 24.0, 21.0, 25.0, 30.0, 23.0, 29.0, 31.0, 19.0, 24.0, 20.0, 36.0, 32.0, 36.0, 28.0, 25.0, 21.0, 28.0, 29.0, 37.0, 25.0, 28.0, 26.0, 30.0, 32.0, 36.0, 26.0, 30.0, 22.0, 36.0, 23.0, 27.0, 27.0, 28.0, 27.0, 31.0, 27.0, 26.0, 33.0, 26.0, 32.0, 32.0, 24.0, 39.0, 28.0, 24.0, 25.0, 32.0, 25.0, 29.0, 27.0, 28.0, 29.0, 16.0, 23.0], dtype=np.float32).reshape(66,1)
y=torch.Tensor(y)
def model(y):
beta = pyro.sample('beta', dist.Normal(torch.tensor(1.0),torch.tensor(1.0)))
sigma = pyro.sample('sigma', dist.Normal(torch.tensor(1.0),torch.tensor(1.0)))
pyro.sample('obs_'.format(), dist.Normal(beta, torch.nn.Softplus()(sigma)), obs=y)
optim = Adam({'lr': 0.05})
guide = AutoDiagonalNormal(model)
svi = SVI(model, guide, optim, loss=Trace_ELBO())
for i in range(10000):
loss = svi.step(y)
if ((i % 1000) == 0):
print(loss)
for name in pyro.get_param_store().get_all_param_names():
print(('{0} : {1}'.format(name, pyro.param(name).data.numpy())))
print(guide.quantiles([0.05, 0.5, 0.95]))
Output:
auto_loc : [12.233615 4.6869206]
auto_scale : [0.6047264 0.4203389]
{'beta': [tensor(11.2389), tensor(12.2336), tensor(13.2283)], 'sigma': [tensor(3.9955), tensor(4.6869), tensor(5.3783)]}
Expected:
beta ~ 27
sigma ~ 6
Am I doing anything wrong here?