Ah no unfortunately. Here is the entire stack trace:
KeyError Traceback (most recent call last)
in ()
----> 1 losses = gp.util.train(gplvm, num_steps=4000)
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/util.py in train(gpmodule, optimizer, loss_fn, retain_graph, num_steps)
175 losses =
176 for i in range(num_steps):
→ 177 loss = optimizer.step(closure)
178 losses.append(torch_item(loss))
179 return losses
/opt/conda/lib/python3.6/site-packages/torch/optim/adam.py in step(self, closure)
56 loss = None
57 if closure is not None:
—> 58 loss = closure()
59
60 for group in self.param_groups:
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/util.py in closure()
169 def closure():
170 optimizer.zero_grad()
→ 171 loss = loss_fn(gpmodule.model, gpmodule.guide)
172 torch_backward(loss, retain_graph)
173 return loss
/opt/conda/lib/python3.6/site-packages/pyro/infer/trace_elbo.py in differentiable_loss(self, model, guide, *args, **kwargs)
106 loss = 0.
107 surrogate_loss = 0.
→ 108 for model_trace, guide_trace in self._get_traces(model, guide, *args, **kwargs):
109 loss_particle, surrogate_loss_particle = self._differentiable_loss_particle(model_trace, guide_trace)
110 surrogate_loss += surrogate_loss_particle / self.num_particles
/opt/conda/lib/python3.6/site-packages/pyro/infer/elbo.py in _get_traces(self, model, guide, *args, **kwargs)
166 else:
167 for i in range(self.num_particles):
→ 168 yield self._get_trace(model, guide, *args, **kwargs)
/opt/conda/lib/python3.6/site-packages/pyro/infer/trace_mean_field_elbo.py in _get_trace(self, model, guide, *args, **kwargs)
65 def _get_trace(self, model, guide, *args, **kwargs):
66 model_trace, guide_trace = super(TraceMeanField_ELBO, self)._get_trace(
—> 67 model, guide, *args, **kwargs)
68 if is_validation_enabled():
69 _check_mean_field_requirement(model_trace, guide_trace)
/opt/conda/lib/python3.6/site-packages/pyro/infer/trace_elbo.py in _get_trace(self, model, guide, *args, **kwargs)
50 “”"
51 model_trace, guide_trace = get_importance_trace(
—> 52 “flat”, self.max_plate_nesting, model, guide, *args, **kwargs)
53 if is_validation_enabled():
54 check_if_enumerated(guide_trace)
/opt/conda/lib/python3.6/site-packages/pyro/infer/enum.py in get_importance_trace(graph_type, max_plate_nesting, model, guide, *args, **kwargs)
40 against it.
41 “”"
—> 42 guide_trace = poutine.trace(guide, graph_type=graph_type).get_trace(*args, **kwargs)
43 model_trace = poutine.trace(poutine.replay(model, trace=guide_trace),
44 graph_type=graph_type).get_trace(*args, **kwargs)
/opt/conda/lib/python3.6/site-packages/pyro/poutine/trace_messenger.py in get_trace(self, *args, **kwargs)
167 Calls this poutine and returns its trace instead of the function’s return value.
168 “”"
→ 169 self(*args, **kwargs)
170 return self.msngr.get_trace()
/opt/conda/lib/python3.6/site-packages/pyro/poutine/trace_messenger.py in call(self, *args, **kwargs)
145 args=args, kwargs=kwargs)
146 try:
→ 147 ret = self.fn(*args, **kwargs)
148 except (ValueError, RuntimeError):
149 exc_type, exc_value, traceback = sys.exc_info()
/opt/conda/lib/python3.6/site-packages/pyro/contrib/autoname/scoping.py in _fn(*args, **kwargs)
73 def _fn(*args, **kwargs):
74 with type(self)(prefix=self.prefix, inner=self.inner):
—> 75 return fn(*args, **kwargs)
76 return _fn
77
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/models/sgpr.py in guide(self)
160 @autoname.scope(prefix=“SGPR”)
161 def guide(self):
→ 162 self.set_mode(“guide”)
163
164 def forward(self, Xnew, full_cov=False, noiseless=True):
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/parameterized.py in set_mode(self, mode)
211 for module in self.modules():
212 if isinstance(module, Parameterized):
→ 213 module.mode = mode
214
215 @property
/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py in setattr(self, name, value)
581 buffers[name] = value
582 else:
→ 583 object.setattr(self, name, value)
584
585 def delattr(self, name):
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/parameterized.py in mode(self, mode)
226 self._register_param(name)
227 for name in self._priors:
→ 228 self._register_param(name)
229
230 def _sample_from_guide(self, name):
/opt/conda/lib/python3.6/site-packages/pyro/contrib/gp/parameterized.py in _register_param(self, name)
270 p_unconstrained = self._parameters[“{}_unconstrained”.format(name)]
271 p = transform_to(self._constraints[name])(p_unconstrained)
→ 272 self.register_buffer(name, p)
/opt/conda/lib/python3.6/site-packages/torch/nn/modules/module.py in register_buffer(self, name, tensor)
118 raise KeyError(“buffer name can’t be empty string ""”)
119 elif hasattr(self, name) and name not in self._buffers:
→ 120 raise KeyError(“attribute ‘{}’ already exists”.format(name))
121 elif tensor is not None and not isinstance(tensor, torch.Tensor):
122 raise TypeError("cannot assign ‘{}’ object to buffer ‘{}’ "
KeyError: “attribute ‘X’ already exists”