Baysian Latent linear regression


#1

I want to use variational inference to implement this directed graph. This is a bit like Bayesian least squares, but it contains a hidden variable. I don’t know how to implement it. Can someone point it out?

image


#3

N=5
M=4
S=1
K=2
Nt = 15
X_train = torch.randn(N,M)
W= torch.randn((M,K))
V= torch.randn((K,S))
Y=torch.mm(X_train,W).mm(V).add(torch.randn(N,S))
def model(X_train, Y):

a = pyro.sample(“a”, dist.MultivariateNormal(8 torch.ones([N, K]), 1000 torch.eye(K)))
b = pyro.sample(“b”, dist.MultivariateNormal(8 torch.ones([N, S]), 1000 torch.eye(S)))

S_igma_pior = pyro.sample(" S_igma", dist.Gamma(torch.ones([M, K]), torch.ones([M, K])).independent(1))
# S_igma_pior = dist.Gamma(1, 1.0).sample(sample_shape=(M, K))
P_si_pior = pyro.sample(" P_si", dist.Gamma(torch.ones([K, S]), torch.ones([K, S])).independent(1))
# P_si_pior = dist.Gamma(1, 1.0).sample(sample_shape=(K, S))
P_pior = pyro.sample(“P”, dist.Normal(torch.zeros([M, K]), S_igma_pior).independent(1))
Q_pior = pyro.sample(“Q”, dist.Normal(torch.zeros([K, S]), P_si_pior).independent(1))
#O_mega_pior = dist.Gamma(1, 1.0).sample(sample_shape=torch.eye(K).item())
mean_z = X_train.mm(P_pior)+a
Z_pior = pyro.sample(“Z”, dist.MultivariateNormal(mean_z, torch.eye(K)))
mean_y = Z_pior.mm(Q_pior)+b
with pyro.iarange(“X_train”, len(X_train)):
pyro.sample(“obs”, dist.MultivariateNormal(mean_y, torch.eye(S)), obs=Y)

guide = AutoMultivariateNormal(model)
svi = SVI(model,
guide,
optim.Adam({“lr”: .005}),
loss=Trace_ELBO(),
num_samples=1000)

#is_cont_africa, ruggedness, log_gdp = train[:, 0], train[:, 1], train[:, 2]
pyro.clear_param_store()
num_iters = 10000
for i in range(num_iters):
elbo = svi.step(X_train,Y)
if i % 500 == 0:
logging.info(“Elbo loss: {}”.format(elbo))


#4

I have built the following model, but the results have not converged, ELOB is beating, people know the reason?


#5

would you mind to paste your code that can be read easily?