Hi again,

I found time to have another attempt, I’ve managed to get MLE working for a 1D case with F = Hg+n (I’ve used x and y instead of g and F). This seems to be working well however I’d just like to know how one would consider the case of F= H*(g+n) as torch.matmul doesn’t work with tensor distributions. Do I have to manually manage the multiplication myself, should I propogate a dist.* to each element in the tensor? Can I run a pyro.sample of the distribution before I apply H and the use a condition <- this later one I tried but it didn’t work, code below.

Thanks for your advice so far. Truncating the possion distribution is very sensible for this use case as there is a physical upper limit in the imaging system.

```
# %% Imports
# Borrowed from: https://pyro.ai/examples/mle_map.html
import pyro
from torch.distributions import constraints
import pyro.distributions as dist
import matplotlib.pyplot as plt
import torch.distributions.constraints as constraints
import math
import os
import torch
import torch.distributions.constraints as constraints
from pyro.optim import Adam
from pyro.infer import SVI, Trace_ELBO
import pyro.distributions as dist
from scipy.linalg import circulant
import numpy as np
pyro.set_rng_seed(42)
# pyro.enable_validation(True)
# %% Setup tensors
x_true = torch.tensor([0.1, 0.2, 0.3, 0.4, 0.5]).double()
x_0 = torch.tensor([0.5, 0.5, 0.5, 0.5, 0.5]).double()
H = torch.tensor(circulant([1, 0.5, 0, 0, 0.5])).double()
y = torch.matmul(H, x_true)
y_0 = torch.matmul(H, x_0)
# %% Define forward model
# %% y = H*(x+n)
@pyro.condition(data={"Hx": y})
def model(y, H):
x = pyro.param("x", x_0, constraint=constraints.positive)
x_prior = pyro.sample("x_prior", dist.Poisson(x))
Hx = torch.matmul(H, x_prior)
# %% y = (H*x)+n
def model(y, H):
x = pyro.param("x", torch.tensor(x_0), constraint=constraints.positive)
Hx = torch.matmul(H, x)
pyro.sample("f", dist.Poisson(Hx), obs=y)
# %% MLE means we can ignore the guide function
def guide(y, H):
pass
# %%
# Setup the optimizer
adam_params = {"lr": 0.005}
optimizer = Adam(adam_params)
# Ssetup the inference algorithm
svi = SVI(model, guide, optimizer, loss=Trace_ELBO())
loss = []
# %% Begin training
pyro.clear_param_store()
n_steps = 10000
for step in range(n_steps):
loss.append(svi.step(y, H))
if step % 100 == 0:
print(".", end="")
print(f'Current guess: {pyro.param("x")}')
print(f"True x: {x_true}")
plt.plot(loss)
```