AutoDiagonalNormal found no latent variables; Use an empty guide instead

Hello friends, I haven’t been able to work on my Pyro projects for almost a year but now I’m back. I’m trying to run some old code that I discussed previously (here and here).

The code ran previously but isn’t now. I’m using the newest version of pyro so maybe something broke along the way.

Here’s the relevant code

class BayesianRegression_LogGamma_shape_zeroInf_thetaFunc3(PyroModule):
    @name_count
    def __init__(self, in_features, mu_l, sh_l, th_l, out_features = 1):
        super().__init__()
        
        # parameter names list
        self.parameter_names = []
        
        layers = []
        for i in range(len(mu_l)-1):
            #print(i)
            layers.append(('mu_fc' + str(i), nn.Linear(mu_l[i], mu_l[i+1])))
            if i != (len(mu_l)-2): layers.append(('mu_ReLU' + str(i), nn.ReLU()))
        mu = OrderedDict(layers)
        self.mu = nn.Sequential(mu)
        
        for name, param in self.mu.named_parameters():
            self.parameter_names.append(name)
        
        pyro.nn.module.to_pyro_module_(self.mu)
        for m in self.mu.modules():
            if m._pyro_name == 'mu_fc' + str(len(mu_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))
                
        layers = []
        for i in range(len(sh_l)-1):
            layers.append(('sh_fc' + str(i), nn.Linear(sh_l[i], sh_l[i+1])))
            if i != (len(sh_l)-2): layers.append(('sh_ReLU' + str(i), nn.ReLU()))
        shape = OrderedDict(layers)
        self.shape = nn.Sequential(shape)
        
        for name, param in self.shape.named_parameters():
            self.parameter_names.append(name)
        
        pyro.nn.module.to_pyro_module_(self.shape)
        for m in self.shape.modules():
            if m._pyro_name == 'sh_fc' + str(len(sh_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    setattr(m, name, PyroSample(prior=dist.Laplace(0., 1.).expand(value.shape).to_event(value.dim())))
        
        layers = []
        for i in range(len(th_l)-1):
            layers.append(('th_fc' + str(i), nn.Linear(th_l[i], th_l[i+1])))
            if i != (len(th_l)-2): layers.append(('th_ReLU' + str(i), nn.ReLU()))
        layers.append(('theta_Sigmoid', nn.Sigmoid()))
        theta = OrderedDict(layers)
        self.theta = nn.Sequential(theta)
        
        for name, param in self.theta.named_parameters():
            self.parameter_names.append(name)
        
        pyro.nn.module.to_pyro_module_(self.theta)
        for m in self.theta.modules():
            if m._pyro_name == 'th_fc' + str(len(th_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    setattr(m, name, PyroSample(prior=dist.Laplace(0., 1.).expand(value.shape).to_event(value.dim())))

    def forward(self, x, y=None):
        
        x = x.reshape(-1, 2)
        mu = self.mu(x).squeeze(-1).exp().clamp(min = .000001)
        shape = self.shape(x).squeeze(-1).exp().clamp(min = 0.000001)
        theta = self.theta(x).squeeze(-1)
        
        # will need to add GPU device
        with pyro.plate("data", x.shape[0]):
            obs = pyro.sample("obs", GammaHurdle(concentration = shape, rate = shape / mu, theta = theta), obs=y)
        return  torch.cat((mu, shape, theta), 0)

Called below.

model = BayesianRegression_LogGamma_shape_zeroInf_thetaFunc3(2, mu_l = [2, 64, 64, 64, 1], sh_l = [2, 64, 64, 1], th_l = [2, 64, 64, 1], out_features = 1)
guide = AutoDiagonalNormal(model)
adam = pyro.optim.ClippedAdam({"lr": 0.0001, 'betas': (.95, .999), 'weight_decay' : .2, 'clip_norm' : 5.})
svi = SVI(model, guide, adam, loss=Trace_ELBO())
pyro.clear_param_store()
pyro.set_rng_seed(123456789)
%time data.train_and_evaluate_SVI(svi, criterion = Trace_ELBO(), model = model, guide = guide, bs = BATCH_SIZE, ne = NUM_EPOCHS)

Omitting some output, here are the errors:

**RuntimeError** : AutoDiagonalNormal found no latent variables; Use an empty guide instead The above exception was the direct cause of the following exception:
**RuntimeError** : AutoDiagonalNormal found no latent variables; Use an empty guide instead Trace Shapes: Param Sites: Sample Sites:

Hi @yoshy, I don’t immediately see an issue. My first guess is that one of the underlying PyTorch modules you’re using changed the names of some of its internal variables, and broke your name munging logic.

To dianoise, you might try finding a previous (pyro,pytorch) version that worked, and print out named_parameters(recurse=True) in both the failing and working versions to compare naming. Again my first guess is that the names will differ.

Thank you. I’ll look into that. I think you’re right that it’s related to names.

I made a simpler version where only mu gets the programmatic naming scheme:

class br_test_class(PyroModule):
    @name_count
    def __init__(self, in_features, mu_l, sh_l, th_l, out_features = 1):
        super().__init__()
        
        # parameter names list
        self.parameter_names = []
        
        layers = []
        for i in range(len(mu_l)-1):
            #print(i)
            layers.append(('mu_fc' + str(i), nn.Linear(mu_l[i], mu_l[i+1])))
            if i != (len(mu_l)-2): layers.append(('mu_ReLU' + str(i), nn.ReLU()))
        mu = OrderedDict(layers)
        self.mu = nn.Sequential(mu)
        
        for name, param in self.mu.named_parameters():
            self.parameter_names.append(name)
        
        pyro.nn.module.to_pyro_module_(self.mu)
        # following step sets only the final layer to Bayesian
        # the lower layers remain constants as in a regular neural net
        for m in self.mu.modules():
            if m._pyro_name == 'mu_fc' + str(len(mu_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))
                
        self.linear_shape = PyroModule[nn.Linear](in_features, out_features)
        self.linear_shape.weight = PyroSample(dist.Normal(0., 2.).expand([out_features, in_features]).to_event(2))
        self.linear_shape.bias = PyroSample(dist.Normal(0., 2.).expand([out_features]).to_event(1))

    def forward(self, x, y=None):
        
        x = x.reshape(-1, 2)
        mu = self.mu(x).squeeze(-1).exp().clamp(min = .000001)
        shape = self.linear_shape(x).squeeze(-1).exp()
        theta = pyro.sample("theta", dist.Uniform(.01, .99))
        
        # will need to add GPU device
        with pyro.plate("data", x.shape[0]):
            obs = pyro.sample("obs", GammaHurdle(concentration = shape, rate = shape / mu, theta = theta), obs=y)
        return  torch.cat((mu, shape), 0)

If I run that and follow the usual path to Predictive to get the posterior for mu, it results in no uncertainty for measuring mu (i.e., mean and quantiles are all the same for each individual). So basically the PyroSample attribute isn’t being applied to mu at all.

This part isn’t doing what it did previously:

for m in self.mu.modules():
            if m._pyro_name == 'mu_fc' + str(len(mu_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))

you could add a print statement to check what PyroSamples are being set:

  for m in self.mu.modules():
      if m._pyro_name == 'mu_fc' + str(len(mu_l)-2):
          for name, value in list(m.named_parameters(recurse=False)):
+             print(f"Adding sample site {m._pyro_name} {name}")
              setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))




You were correct. The previously named mu_fc0... parameters, for example, are now coming out named as mu.mu_fc0....

I think pyro.nn.module.to_pyro_module_(self.mu) slightly changed its behavior from the last time I worked on this. Easy fix! I just need to add the parameter name to the front: 'mu_fc' to 'mu.mu_fc'

for m in self.mu.modules():
            if m._pyro_name == 'mu.mu_fc' + str(len(mu_l)-2):
                for name, value in list(m.named_parameters(recurse=False)):
                    print(f"Adding sample site {m._pyro_name} {name}")
                    setattr(m, name, PyroSample(prior=dist.Normal(0., 1.).expand(value.shape).to_event(value.dim())))
1 Like