Thank you so much and I have tried your codes. I want to post a followup question and hope this will not bother you too much. Here I want to bayesian about the parameters and learn a posterior distribution of the parameters. The goal is to get some samples from the posterior probability distribution of the parameters.
import torch
import pyro
import pyro.distributions as dist
import numpy as np
import matplotlib.pyplot as plt
######### generate data###############
data_A = list(np.random.binomial(1,0.5,5000))
data_B = []
data_C= []
for i in data_A:
if i==0:
data_B.append(np.random.binomial(1,0.5,1)[0])
data_C.append(np.random.binomial(1,0.6,1)[0])
else:
data_B.append(np.random.binomial(1,0.75,1)[0])
data_C.append(np.random.binomial(1,0.25,1)[0])
data_A = torch.tensor(data_A)
data_B = torch.tensor(data_B)
data_C = torch.tensor(data_C)
pyro.clear_param_store()
@pyro.infer.config_enumerate
def model(N):
# a1,b1,a2,b2,a3,b3,a4,b4,a5,b5
beta_param = pyro.param('beta_param', torch.randn(1,10).exp())
# parameter
probs_a = pyro.sample("probs_a", dist.Beta(beta_param[0][0],beta_param[0][1]))
probs_a0b = pyro.sample("probs_a0b", dist.Beta(beta_param[0][2],beta_param[0][3]))
probs_a1b = pyro.sample("probs_a1b", dist.Beta(beta_param[0][4],beta_param[0][5]))
probs_a0c = pyro.sample("probs_a0c", dist.Beta(beta_param[0][6],beta_param[0][7]))
probs_a1c = pyro.sample("probs_a1c", dist.Beta(beta_param[0][8],beta_param[0][9]))
probs_b = torch.tensor([[1-probs_a0b, probs_a0b],[1-probs_a1b, probs_a1b]])
probs_c = torch.tensor([[1-probs_a0c, probs_a0c],[1-probs_a1c, probs_a1c]])
with pyro.plate("data", N):
a = pyro.sample("a", dist.Categorical(probs=torch.tensor([1-probs_a,probs_a])))
b = pyro.sample("b", dist.Categorical(probs=probs_b[a.long(),:]))
c = pyro.sample("c", dist.Categorical(probs=probs_c[a.long(),:]))
return a, b, c
def guide(N):
pass
N=5000
conditioned_model = pyro.condition(model, data={"a": data_A, "b": data_B, "c": data_C})
optim = pyro.optim.Adam({"lr": 0.0004, "betas": (0.90, 0.999)})
svi = pyro.infer.SVI(conditioned_model, guide, loss = pyro.infer.TraceEnum_ELBO(), optim= optim)
losses = []
for step in range(500):
print(step)
losses.append(svi.step(N))
plt.plot(losses)
But this code does not provide a good reasonable result comparing to the synthetic data I generated. The losses do not converge. I also have some codes that work well but I am not providing a prior for the parameters. Finally, I can not get a samples of the parameters from the posterior distribution.
@pyro.infer.config_enumerate
def model(N):
probs_a = pyro.param("probs_a", torch.randn(1, 2).exp(), constraint=constraints.simplex)
probs_b = pyro.param("probs_b", torch.randn(2, 2).exp(), constraint=constraints.simplex)
probs_c = pyro.param("probs_c", torch.randn(2, 2).exp(), constraint=constraints.simplex)
with pyro.plate("data", N):
a = pyro.sample("a", dist.Categorical(probs=probs_a))
b = pyro.sample("b", dist.Categorical(probs=probs_b[a.long(),:]))
c = pyro.sample("c", dist.Categorical(probs=probs_c[a.long(),:]))
return a, b, c
def guide(N):
pass
N=5000
conditioned_model = pyro.condition(model, data={"a": data_A, "b": data_B, "c": data_C})
optim = pyro.optim.Adam({"lr": 0.04, "betas": (0.90, 0.999)})
svi = pyro.infer.SVI(conditioned_model, guide, loss = pyro.infer.TraceEnum_ELBO(), optim= optim)
losses = []
for step in range(500):
print(step)
losses.append(svi.step(N))