Hello friends, I haven’t been able to work on my Pyro projects for almost a year but now I’m back. I’m trying to run some old code that I discussed previously (here and here).
The code ran previously but isn’t now. I’m using the newest version of pyro so maybe something broke along the way.
Here’s the relevant code
class BayesianRegression_LogGamma_shape_zeroInf_thetaFunc3(PyroModule):
@name_count
def __init__(self, in_features, mu_l, sh_l, th_l, out_features = 1):
super().__init__()
# parameter names list
self.parameter_names = []
layers = []
for i in range(len(mu_l)-1):
#print(i)
layers.append(('mu_fc' + str(i), nn.Linear(mu_l[i], mu_l[i+1])))
if i != (len(mu_l)-2): layers.append(('mu_ReLU' + str(i), nn.ReLU()))
mu = OrderedDict(layers)
self.mu = nn.Sequential(mu)
for name, param in self.mu.named_parameters():
self.parameter_names.append(name)
pyro.nn.module.to_pyro_module_(self.mu)
for m in self.mu.modules():
if m._pyro_name == 'mu_fc' + str(len(mu_l)-2):
for name, value in list(m.named_parameters(recurse=False)):
setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))
layers = []
for i in range(len(sh_l)-1):
layers.append(('sh_fc' + str(i), nn.Linear(sh_l[i], sh_l[i+1])))
if i != (len(sh_l)-2): layers.append(('sh_ReLU' + str(i), nn.ReLU()))
shape = OrderedDict(layers)
self.shape = nn.Sequential(shape)
for name, param in self.shape.named_parameters():
self.parameter_names.append(name)
pyro.nn.module.to_pyro_module_(self.shape)
for m in self.shape.modules():
if m._pyro_name == 'sh_fc' + str(len(sh_l)-2):
for name, value in list(m.named_parameters(recurse=False)):
setattr(m, name, PyroSample(prior=dist.Laplace(0., 1.).expand(value.shape).to_event(value.dim())))
layers = []
for i in range(len(th_l)-1):
layers.append(('th_fc' + str(i), nn.Linear(th_l[i], th_l[i+1])))
if i != (len(th_l)-2): layers.append(('th_ReLU' + str(i), nn.ReLU()))
layers.append(('theta_Sigmoid', nn.Sigmoid()))
theta = OrderedDict(layers)
self.theta = nn.Sequential(theta)
for name, param in self.theta.named_parameters():
self.parameter_names.append(name)
pyro.nn.module.to_pyro_module_(self.theta)
for m in self.theta.modules():
if m._pyro_name == 'th_fc' + str(len(th_l)-2):
for name, value in list(m.named_parameters(recurse=False)):
setattr(m, name, PyroSample(prior=dist.Laplace(0., 1.).expand(value.shape).to_event(value.dim())))
def forward(self, x, y=None):
x = x.reshape(-1, 2)
mu = self.mu(x).squeeze(-1).exp().clamp(min = .000001)
shape = self.shape(x).squeeze(-1).exp().clamp(min = 0.000001)
theta = self.theta(x).squeeze(-1)
# will need to add GPU device
with pyro.plate("data", x.shape[0]):
obs = pyro.sample("obs", GammaHurdle(concentration = shape, rate = shape / mu, theta = theta), obs=y)
return torch.cat((mu, shape, theta), 0)
Called below.
model = BayesianRegression_LogGamma_shape_zeroInf_thetaFunc3(2, mu_l = [2, 64, 64, 64, 1], sh_l = [2, 64, 64, 1], th_l = [2, 64, 64, 1], out_features = 1)
guide = AutoDiagonalNormal(model)
adam = pyro.optim.ClippedAdam({"lr": 0.0001, 'betas': (.95, .999), 'weight_decay' : .2, 'clip_norm' : 5.})
svi = SVI(model, guide, adam, loss=Trace_ELBO())
pyro.clear_param_store()
pyro.set_rng_seed(123456789)
%time data.train_and_evaluate_SVI(svi, criterion = Trace_ELBO(), model = model, guide = guide, bs = BATCH_SIZE, ne = NUM_EPOCHS)
Omitting some output, here are the errors:
**RuntimeError** : AutoDiagonalNormal found no latent variables; Use an empty guide instead The above exception was the direct cause of the following exception:
**RuntimeError** : AutoDiagonalNormal found no latent variables; Use an empty guide instead Trace Shapes: Param Sites: Sample Sites: