I am currently trying to write “Bayesian Hackers” using pyro.
import torch
from torch.autograd import Variable
import pyro
import pyro.distributions as dist
from pyro.optim import Adam
from pyro.infer import SVI
%matplotlib inline
from IPython.core.pylabtools import figsize
import numpy as np
from matplotlib import pyplot as plt
pyro.clear_param_store()
figsize(12.5, 3.5)
count_data = np.loadtxt("data/txtdata.csv")
n_count_data = len(count_data)
plt.bar(np.arange(n_count_data), count_data, color="#348ABD")
plt.xlabel("Time (days)")
plt.ylabel("count of text-msgs received")
plt.title("Did the user's texting habits change over time?")
plt.xlim(0, n_count_data);
alpha_f = 1.0/count_data.mean()
def lambda_(tau, lambda_1, lambda_2):
out = Variable(torch.zeros(n_count_data))
tau = int(tau.data.numpy())
for i in range(0, len(out[:tau])):
out[i] = lambda_1
for j in range(len(out[:tau]),n_count_data):
out[j] = lambda_2
return out
def model(count_data):
alpha = Variable(torch.Tensor([alpha_f]))
lambda_1 = pyro.sample("lambda_1", dist.exponential, alpha)
lambda_2 = pyro.sample("lambda_2" ,dist.exponential, alpha)
tau = pyro.sample("tau", dist.uniform, Variable(torch.IntTensor([0])), Variable(torch.IntTensor([n_count_data])))
f = pyro.sample("latent", dist.poisson, lambda_(tau, lambda_1, lambda_2))
for i in range(n_count_data):
pyro.observe("obs_{}".format(i), dist.poisson,
count_data[i], f)
def guide(count_data):
log_lambda1_q_0 = Variable(torch.Tensor([np.log(15.0)]),requires_grad=True)
log_lambda2_q_0 = Variable(torch.Tensor([np.log(15.0)]),requires_grad=True)
log_tau_q_0 = Variable(torch.IntTensor([np.log(36.0)]), requires_grad=True)
log_lambda1_q = pyro.param("log_lambda1_q", log_lambda1_q_0)
log_lambda2_q = pyro.param("log_lambda2_q", log_lambda2_q_0)
log_tau_q = pyro.param("log_tau_q", log_tau_q_0)
lambda1_q = torch.exp(log_lambda1_q)
lambda2_q = torch.exp(log_lambda2_q)
tau_q = torch.exp(log_tau_q)
pyro.sample("latent", dist.poisson, lambda_(tau_q, lambda1_q, lambda2_q))
adam_params = {"lr":0.0005, "betas":(0.90, 0.999)}
optimizer = Adam(adam_params)
n_steps=4000
svi = SVI(model, guide, optimizer, loss="ELBO")
for step in range(n_steps):
svi.step(count_data)
if step % 100 == 0:
print('.', end='')
But this got errors.
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-102-c0cac78536c6> in <module>()
65
66 for step in range(n_steps):
---> 67 svi.step(count_data)
68 if step % 100 == 0:
69 print('.', end='')
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/infer/svi.py in step(self, *args, **kwargs)
96 """
97 # get loss and compute gradients
---> 98 loss = self.loss_and_grads(self.model, self.guide, *args, **kwargs)
99
100 # get active params
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/infer/elbo.py in loss_and_grads(self, model, guide, *args, **kwargs)
63 :rtype: float
64 """
---> 65 return self.which_elbo.loss_and_grads(model, guide, *args, **kwargs)
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/infer/trace_elbo.py in loss_and_grads(self, model, guide, *args, **kwargs)
133 trainable_params = set()
134 # grab a trace from the generator
--> 135 for weight, model_trace, guide_trace, log_r in self._get_traces(model, guide, *args, **kwargs):
136 elbo_particle = weight * 0
137 surrogate_elbo_particle = weight * 0
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/infer/trace_elbo.py in _get_traces(self, model, guide, *args, **kwargs)
78 continue
79
---> 80 guide_trace = poutine.trace(guide).get_trace(*args, **kwargs)
81 model_trace = poutine.trace(poutine.replay(model, guide_trace)).get_trace(*args, **kwargs)
82
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/poutine/trace_poutine.py in get_trace(self, *args, **kwargs)
161 Calls this poutine and returns its trace instead of the function's return value.
162 """
--> 163 self(*args, **kwargs)
164 return self.trace.copy()
165
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/poutine/trace_poutine.py in __call__(self, *args, **kwargs)
149 name="_INPUT", type="args",
150 args=args, kwargs=kwargs)
--> 151 ret = super(TracePoutine, self).__call__(*args, **kwargs)
152 self.trace.add_node("_RETURN", name="_RETURN", type="return", value=ret)
153 return ret
~/.pyenv/versions/3.5.2/lib/python3.5/site-packages/pyro/poutine/poutine.py in __call__(self, *args, **kwargs)
40 """
41 with self:
---> 42 return self.fn(*args, **kwargs)
43
44 def __enter__(self):
<ipython-input-102-c0cac78536c6> in guide(count_data)
46 log_lambda1_q_0 = Variable(torch.Tensor([np.log(15.0)]),requires_grad=True)
47 log_lambda2_q_0 = Variable(torch.Tensor([np.log(15.0)]),requires_grad=True)
---> 48 log_tau_q_0 = Variable(torch.IntTensor([np.log(36.0)]), requires_grad=True)
49
50 log_lambda1_q = pyro.param("log_lambda1_q", log_lambda1_q_0)
RuntimeError: tried to construct a tensor from a int sequence, but found an item of type numpy.float64 at index (0)
I don’t know what to do, so I’m happy if anyone will fix this program.
And also, is this definition of a deterministic function correct?