Hello all
I have written an easyguide for a small Regression problem. I could not get the guide parameters by initializing it.
Then I found a way of initializing parameters looking at Example: Sparse Bayesian Linear Regression — Pyro Tutorials 1.8.4 documentation. That did work. But the parameters did not show up using .parameters
method.
Minimal reproducible snippet -
In [1]: import pyro
...: import torch
...: import pyro.distributions as dist
...: from pyro.contrib.easyguide import easy_guide, EasyGuide
...: from pyro.nn import PyroModule, PyroSample, PyroParam
...: from torch.distributions import constraints
...: import numpy as np
...:
...: torch.manual_seed(42)
...: pyro.set_rng_seed(42)
...: pyro.__version__
Out[1]: '1.6.0'
In [2]: class BayesianRegression(PyroModule):
...: def __init__(self, in_features, out_features):
...: super().__init__()
...: self.linear = PyroModule[torch.nn.Linear](in_features, out_features)
...: self.linear.weight = PyroSample(dist.Normal(0., 1.).expand([out_features, in_features]).to_event(2))
...: self.linear.bias = PyroSample(dist.Normal(0., 10.).expand([out_features]).to_event(1))
...:
...: def forward(self, x, full_size, y=None):
...: sigma = pyro.sample("sigma", dist.Uniform(0., 10.))
...: mean = self.linear(x).squeeze(-1)
...: with pyro.plate("data", size=full_size, subsample_size=x.shape[0]):
...: obs = pyro.sample("obs", dist.Normal(mean, sigma), obs=y)
...: return mean
...:
In [3]: base_regression_model = BayesianRegression(1, 1)
In [4]: @easy_guide(base_regression_model)
...: def regression_guide(self, x, full_size, y=None):
...: group = self.group(match=".*")
...: loc = pyro.param("loc", torch.randn(group.event_shape))
...: scale = pyro.param("scale", torch.ones(group.event_shape)*0.01, constraint=constraints.positive)
...: group.sample("joint", dist.Normal(loc=loc, scale=scale).to_event(1))
...:
In [5]: pyro.clear_param_store()
...: regression_guide(x=torch.ones(10, 1), full_size=100)
...: list(regression_guide.parameters()) # Unable to get parameters
Out[5]: []
In [6]: pyro.clear_param_store()
...: with pyro.poutine.block(), pyro.poutine.trace(param_only=True) as param_capture:
...: regression_guide(x=torch.ones(10, 1), full_size=100)
...: params = list([pyro.param(name).unconstrained() for name in param_capture.trace])
...: params
Out[6]:
[tensor([ 1.8928, 1.3067, -0.0662, -0.4235, -2.3768, 0.0641, -0.3435, 1.2287,
-0.2754, -0.2109, 0.9287, -0.2282, -1.2179], requires_grad=True),
tensor([-4.6052, -4.6052, -4.6052, -4.6052, -4.6052, -4.6052, -4.6052, -4.6052,
-4.6052, -4.6052, -4.6052, -4.6052, -4.6052], requires_grad=True)]
In [7]: list(regression_guide.parameters()) # still cannot get the parameters
Out[7]: []
In [8]: pyro.clear_param_store()
...: regression_guide = pyro.infer.autoguide.AutoNormal(base_regression_model)
...: regression_guide(x=torch.ones(10, 1), full_size=100)
...: list(regression_guide.parameters()) # Getting params from AutoNormal works well
Out[8]:
[Parameter containing:
tensor(0., requires_grad=True),
Parameter containing:
tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 0.], requires_grad=True),
Parameter containing:
tensor([[0.]], requires_grad=True),
Parameter containing:
tensor([0.], requires_grad=True),
Parameter containing:
tensor(-2.3026, requires_grad=True),
Parameter containing:
tensor([-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026, -2.3026,
-2.3026, -2.3026, -2.3026, -2.3026], requires_grad=True),
Parameter containing:
tensor([[-2.3026]], requires_grad=True),
Parameter containing:
tensor([-2.3026], requires_grad=True)]
So, how can I get parameters generated by easyguide using .parameters method?
Thanks