I am not able to get PSIS to work.
re-producible example from the regression tut
import os
from functools import partial
import torch
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import pyro
import pyro.distributions as dist
DATA_URL = "https://d2hg8soec8ck9v.cloudfront.net/datasets/rugged_data.csv"
data = pd.read_csv(DATA_URL, encoding="ISO-8859-1")
df = data[["cont_africa", "rugged", "rgdppc_2000"]]
df = df[np.isfinite(df.rgdppc_2000)]
df["rgdppc_2000"] = np.log(df["rgdppc_2000"])
from pyro.nn import PyroSample
class BayesianRegression(PyroModule):
def __init__(self, in_features, out_features):
super().__init__()
self.linear = PyroModule[nn.Linear](in_features, out_features)
self.linear.weight = PyroSample(dist.Normal(0., 1.).expand([out_features, in_features]).to_event(2))
self.linear.bias = PyroSample(dist.Normal(0., 10.).expand([out_features]).to_event(1))
def forward(self, x, y=None):
sigma = pyro.sample("sigma", dist.Uniform(0., 10.))
mean = self.linear(x).squeeze(-1)
with pyro.plate("data", x.shape[0]):
obs = pyro.sample("obs", dist.Normal(mean, sigma), obs=y)
return mean
from pyro.infer.autoguide import AutoDiagonalNormal
model = BayesianRegression(3, 1)
guide = AutoDiagonalNormal(model)
from pyro.infer import SVI, Trace_ELBO
adam = pyro.optim.Adam({"lr": 0.03})
svi = SVI(model, guide, adam, loss=Trace_ELBO())
pyro.clear_param_store()
num_iterations = 1500
for j in range(num_iterations):
# calculate the loss and take a gradient step
loss = svi.step(x_data, y_data)
if j % 100 == 0:
print("[iteration %04d] loss: %.4f" % (j + 1, loss / len(data)))
Now apply PSIS:
from pyro.infer import importance
k_hat = importance.psis_diagnostic(model,guide,x_data,y_data)
Error Summary below:
RuntimeError: t() expects a tensor with <= 2 dimensions, but self is 10D
Trace Shapes:
Param Sites:
Sample Sites:
num_particles_vectorized dist |
value 1000 |
sigma dist 1000 1 1 1 1 1 1 1 |
value 1000 1 1 1 1 1 1 1 |
linear.weight dist 1000 1 1 1 1 1 1 1 | 1 3
value 1000 1 1 1 1 1 1 1 | 1 3
linear.bias dist 1000 1 1 1 1 1 1 1 | 1
value 1000 1 1 1 1 1 1 1 | 1
Can you please help me with this error?
Thanks!