Hey !
My optimized algorith loss doesn’t reduce over iterations.
Do you know why ?
def model(data):
T_min = torch.tensor(5.0)
T_max = torch.tensor(100.0)
I0_min = torch.tensor(0.0)
I0_max = torch.tensor(5.0)
beta_min = torch.tensor(0.0)
beta_max = torch.tensor(2.0)
I0 = pyro.sample("I0", dist.Uniform(I0_min, I0_max))
beta = pyro.sample("beta", dist.Uniform(beta_min, beta_max))
T = pyro.sample("T", dist.Uniform(T_min, T_max))
with pyro.plate("data", data.size(0)):
pyro.sample("obs", dist.Normal(H(I0, beta, T), noise), obs=data)
autoguide = pyro.infer.autoguide.AutoDelta(model)
def train(model, guide, lr=0.0000000000001, n_steps=201):
pyro.clear_param_store()
adam_params = {"lr": lr}
adam = pyro.optim.Adam(adam_params)
svi = SVI(model, guide, adam, loss=Trace_ELBO())
for step in range(n_steps):
loss = svi.step(data)
if step % 50 == 0:
print("[iter {}] loss: {:.4f}".format(step, loss))
train(model, autoguide)