I am inferring some parameters of a neural network via MCMC. I noticed all of the samples returned by MCMC are identical. Note that I am not fixing the random seed for Pyro. Am I doing something wrong?
Here’s how I define my neural network model and use MCMC to get a posterior over v_q
:
class model(nn.Module):
def __init__(self):
super(model, self).__init__()
.
. # define model layers
.
def infer_v_q(self, x_q, sigma):
with pyro.plate("data", x.shape[0]):
# v_q.shape = (batch_dim, 7), batch_dim=36
v_q = pyro.sample('v_q', pyro.distributions.Uniform(v_q_min, v_q_max).to_event(1))
u = self.generation_network(v_q)
return pyro.sample("x", pyro.distributions.Normal(u, sigma).to_event(3), obs=x_q)
# Here's how I run MCMC inference
nuts_kernel = pyro.infer.NUTS(model.infer_v_q, adapt_step_size=True, step_size=1e-7, target_accept_prob=0.7, max_tree_depth=2)
mcmc = pyro.infer.MCMC(nuts_kernel, num_samples=200, warmup_steps=100, num_chains=1)
mcmc.run(x_q)
# After inference, estimated step_size is around 1e-8 and acceptance probability is around 70%
# get samples
v_q_pred = mcmc.get_samples()["v_q"]
cond = True
for num_sample1 in range(200):
for num_sample2 in range(1, 200):
cond = cond and (torch.isclose(v_q_pred[num_sample1], v_q_pred[num_sample2], atol=1e-5).min().item())
print(cond) # prints True