Skip to content

Commit b9337e6

Browse files
committed
Added alternative scale parameterization with unit tests to exponential
1 parent aae97a2 commit b9337e6

File tree

2 files changed

+43
-2
lines changed

2 files changed

+43
-2
lines changed

pymc/distributions/continuous.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1347,13 +1347,22 @@ class Exponential(PositiveContinuous):
13471347
----------
13481348
lam : tensor_like of float
13491349
Rate or inverse scale (``lam`` > 0).
1350+
scale: tensor_like of float
1351+
Alternative parameter (scale = 1/lam).
13501352
"""
13511353
rv_op = exponential
13521354

13531355
@classmethod
1354-
def dist(cls, lam: DIST_PARAMETER_TYPES, *args, **kwargs):
1355-
lam = pt.as_tensor_variable(floatX(lam))
1356+
def dist(cls, lam=None, scale=None, *args, **kwargs):
1357+
if lam is not None and scale is not None:
1358+
raise ValueError("Incompatible parametrization. Can't specify both lam and scale.")
1359+
elif lam is None and scale is None:
1360+
raise ValueError("Incompatible parametrization. Must specify either lam or scale.")
1361+
1362+
if scale is not None:
1363+
lam = pt.reciprocal(scale)
13561364

1365+
lam = pt.as_tensor_variable(floatX(lam))
13571366
# PyTensor exponential op is parametrized in terms of mu (1/lam)
13581367
return super().dist([pt.reciprocal(lam)], **kwargs)
13591368

tests/distributions/test_continuous.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -432,18 +432,43 @@ def test_exponential(self):
432432
{"lam": Rplus},
433433
lambda value, lam: st.expon.logpdf(value, 0, 1 / lam),
434434
)
435+
check_logp(
436+
pm.Exponential,
437+
Rplus,
438+
{"scale": Rplus},
439+
lambda value, scale: st.expon.logpdf(value, 0, scale),
440+
)
435441
check_logcdf(
436442
pm.Exponential,
437443
Rplus,
438444
{"lam": Rplus},
439445
lambda value, lam: st.expon.logcdf(value, 0, 1 / lam),
440446
)
447+
check_logcdf(
448+
pm.Exponential,
449+
Rplus,
450+
{"scale": Rplus},
451+
lambda value, scale: st.expon.logcdf(value, 0, scale),
452+
)
441453
check_icdf(
442454
pm.Exponential,
443455
{"lam": Rplus},
444456
lambda q, lam: st.expon.ppf(q, loc=0, scale=1 / lam),
445457
)
446458

459+
def test_exponential_wrong_arguments(self):
460+
m = pm.Model()
461+
462+
msg = "Incompatible parametrization. Can't specify both lam and scale"
463+
with m:
464+
with pytest.raises(ValueError, match=msg):
465+
pm.Exponential("x", lam=0.5, scale=5)
466+
467+
msg = "Incompatible parametrization. Must specify either lam or scale"
468+
with m:
469+
with pytest.raises(ValueError, match=msg):
470+
pm.Exponential("x")
471+
447472
def test_laplace(self):
448473
check_logp(
449474
pm.Laplace,
@@ -2091,6 +2116,13 @@ class TestExponential(BaseTestDistributionRandom):
20912116
]
20922117

20932118

2119+
class TestExponentialScale(BaseTestDistributionRandom):
2120+
pymc_dist = pm.Exponential
2121+
pymc_dist_params = {"scale": 5.0}
2122+
expected_rv_op_params = {"mu": pymc_dist_params["scale"]}
2123+
checks_to_run = ["check_pymc_params_match_rv_op"]
2124+
2125+
20942126
class TestCauchy(BaseTestDistributionRandom):
20952127
pymc_dist = pm.Cauchy
20962128
pymc_dist_params = {"alpha": 2.0, "beta": 5.0}

0 commit comments

Comments
 (0)