Skip to content
6 changes: 3 additions & 3 deletions pymc/distributions/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -905,11 +905,11 @@ class EulerMaruyama(Distribution):

Parameters
----------
dt: float
dt : float
time step of discretization
sde_fn: callable
sde_fn : callable
function returning the drift and diffusion coefficients of SDE
sde_pars: tuple
sde_pars : tuple
parameters of the SDE, passed as ``*args`` to ``sde_fn``
init_dist : unnamed distribution, optional
Scalar distribution for initial values. Unnamed refers to distributions created with
Expand Down
22 changes: 14 additions & 8 deletions pymc/tests/distributions/test_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -835,8 +835,11 @@ class TestEulerMaruyama:
@pytest.mark.parametrize("batched_param", [1, 2])
@pytest.mark.parametrize("explicit_shape", (True, False))
def test_batched_size(self, explicit_shape, batched_param):
RANDOM_SEED = 42
numpy_rng = np.random.default_rng(RANDOM_SEED)

steps, batch_size = 100, 5
param_val = np.square(np.random.randn(batch_size))
param_val = np.square(numpy_rng.standard_normal(batch_size))
if explicit_shape:
kwargs = {"shape": (batch_size, steps)}
else:
Expand All @@ -853,9 +856,9 @@ def sde_fn(x, k, d, s):
"y", dt=0.02, sde_fn=sde_fn, sde_pars=sde_pars, init_dist=init_dist, **kwargs
)

y_eval = draw(y, draws=2)
y_eval = draw(y, draws=2, random_seed=RANDOM_SEED)
assert y_eval[0].shape == (batch_size, steps)
assert not np.any(np.isclose(y_eval[0], y_eval[1]))
assert np.any(~np.isclose(y_eval[0], y_eval[1]))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

np.any(~np.isclose([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]))   # -> False since all the same
np.any(~np.isclose([5, 4, 3, 2, 1], [1, 2, 3, 4, 5]))   # -> True since at least one different 
np.any(~np.isclose([1, 2, 3, 4, 5], [6, 7, 8, 9, 10]))  # -> True since all different 

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you reckon this check is better, than checking all are different?

Copy link
Contributor Author

@williambdean williambdean Nov 18, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Surprisingly, I had it fail on my end where checking that all are different (previous test). Not sure the chances of that happening, but it did. That is, the two arrays were the same at at least 1 spot. In the case of an intersection between the two ys, I saw this better capturing that they are different arrays

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The test should now be deterministic no?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. There are two changes:

  1. Deterministic because of the random seed
  2. Change in the assert which, I thought, better checks that the two draws are different based on the logic described. If you disagree, I will change back

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It still passes under the seed and the previous assert though but so can any test catered to do so

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't have a strong preference, but all values differently is certainly stronger evidence that it's being random :D

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fair. 😃


if explicit_shape:
kwargs["shape"] = steps
Expand All @@ -873,7 +876,7 @@ def sde_fn(x, k, d, s):
**kwargs,
)

t0_init = t0.initial_point()
t0_init = t0.initial_point(random_seed=RANDOM_SEED)
t1_init = {f"y_{i}": t0_init["y"][i] for i in range(batch_size)}
np.testing.assert_allclose(
t0.compile_logp()(t0_init),
Expand Down Expand Up @@ -919,17 +922,20 @@ def test_linear_model(self):
N = 300
dt = 1e-1

RANDOM_SEED = 42
numpy_rng = np.random.default_rng(RANDOM_SEED)

def _gen_sde_path(sde, pars, dt, n, x0):
xs = [x0]
wt = np.random.normal(size=(n,) if isinstance(x0, float) else (n, x0.size))
wt = numpy_rng.normal(size=(n,) if isinstance(x0, float) else (n, x0.size))
for i in range(n):
f, g = sde(xs[-1], *pars)
xs.append(xs[-1] + f * dt + np.sqrt(dt) * g * wt[i])
return np.array(xs)

sde = lambda x, lam: (lam * x, sig2)
x = floatX(_gen_sde_path(sde, (lam,), dt, N, 5.0))
z = x + np.random.randn(x.size) * sig2
z = x + numpy_rng.standard_normal(size=x.size) * sig2
# build model
with Model() as model:
lamh = Flat("lamh")
Expand All @@ -939,9 +945,9 @@ def _gen_sde_path(sde, pars, dt, n, x0):
Normal("zh", mu=xh, sigma=sig2, observed=z)
# invert
with model:
trace = sample(chains=1)
trace = sample(chains=1, random_seed=RANDOM_SEED)

ppc = sample_posterior_predictive(trace, model=model)
ppc = sample_posterior_predictive(trace, model=model, random_seed=RANDOM_SEED)

p95 = [2.5, 97.5]
lo, hi = np.percentile(trace.posterior["lamh"], p95, axis=[0, 1])
Expand Down