Hi I’m trying a simple example of SVI and would like to understand how I can affect it’s loss function
I’m trying make change_loss
affect loss function in some way - e.g if it equals 0.3 then to make loss 30% more, if it’s -0.2 make loss value less on 20%
is there a way how I can do so? I might be do something totally wrong here , but here is my example:
import numpy as np
import torch
from torch.distributions import constraints
import pyro
import pyro.infer
import pyro.optim as optim
import pyro.distributions as dist
dtype=torch.float32
torch.manual_seed(101)
pyro.set_rng_seed(101)
np.random.seed(12)
N = 10
mu_ = 10.
sigma_= 2.
X = np.random.normal(mu_, sigma_, N)
X = np.array(X, dtype=np.float32)
def model(X):
change_loss = torch.FloatTensor(1).uniform_(-0.5, 0.5)
# how to make change_loss affect elbo ?
mu = pyro.sample("mu", dist.Uniform(torch.tensor(-25.), torch.tensor(+25.)))
tau = torch.tensor( 1 /4)
with pyro.plate("observed_data", size=len(X)):
sample = pyro.sample("gaussian_data", dist.Normal(mu, 1/ torch.sqrt(tau)), obs=X)
return sample
def guide(X):
mean_loc = torch.randn((1))
mean_scale = torch.tensor(0.001)
mu_loc = pyro.param("guide_mu_mean", mean_loc)
mu_scale = pyro.param("guide_mu_scale", mean_scale, constraint=constraints.positive)
mu = pyro.sample("mu", dist.Normal(mu_loc, mu_scale))
pyro.clear_param_store()
adam_params = {"lr": 0.003, "betas": (0.95, 0.999)}
optimizer = optim.Adam(adam_params)
svi = pyro.infer.SVI(model=model,
guide=guide,
optim=optimizer,
loss=pyro.infer.Trace_ELBO())
losses = []
X_ = torch.tensor(X, dtype=dtype)
for t in range(10000):
loss = svi.step(X_)
losses.append(loss)
if t%100==0:
print (t, "\t", loss)