From 394ebe43687b045676ef007cd91b7f316e0978ea Mon Sep 17 00:00:00 2001 From: Ricardo Vieira Date: Wed, 4 Dec 2024 13:02:44 +0100 Subject: [PATCH 1/2] Exponential scale default to 1.0 --- pymc/distributions/continuous.py | 11 +++++------ tests/distributions/test_continuous.py | 21 ++++++++++++--------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/pymc/distributions/continuous.py b/pymc/distributions/continuous.py index 7a0d044cce..d5e69fc799 100644 --- a/pymc/distributions/continuous.py +++ b/pymc/distributions/continuous.py @@ -1373,13 +1373,12 @@ class Exponential(PositiveContinuous): rv_op = exponential @classmethod - def dist(cls, lam=None, scale=None, *args, **kwargs): - if lam is not None and scale is not None: + def dist(cls, lam=None, *, scale=None, **kwargs): + if lam is None and scale is None: + scale = 1.0 + elif lam is not None and scale is not None: raise ValueError("Incompatible parametrization. Can't specify both lam and scale.") - elif lam is None and scale is None: - raise ValueError("Incompatible parametrization. Must specify either lam or scale.") - - if scale is None: + elif lam is not None: scale = pt.reciprocal(lam) scale = pt.as_tensor_variable(scale) diff --git a/tests/distributions/test_continuous.py b/tests/distributions/test_continuous.py index 41504816ae..2864335e34 100644 --- a/tests/distributions/test_continuous.py +++ b/tests/distributions/test_continuous.py @@ -461,15 +461,6 @@ def test_exponential(self): lambda q, lam: st.expon.ppf(q, loc=0, scale=1 / lam), ) - def test_exponential_wrong_arguments(self): - msg = "Incompatible parametrization. Can't specify both lam and scale" - with pytest.raises(ValueError, match=msg): - pm.Exponential.dist(lam=0.5, scale=5) - - msg = "Incompatible parametrization. Must specify either lam or scale" - with pytest.raises(ValueError, match=msg): - pm.Exponential.dist() - def test_laplace(self): check_logp( pm.Laplace, @@ -2274,8 +2265,20 @@ class TestExponential(BaseTestDistributionRandom): checks_to_run = [ "check_pymc_params_match_rv_op", "check_pymc_draws_match_reference", + "check_both_lam_scale_raises", + "check_default_scale", ] + def check_both_lam_scale_raises(self): + msg = "Incompatible parametrization. Can't specify both lam and scale" + with pytest.raises(ValueError, match=msg): + pm.Exponential.dist(lam=0.5, scale=5) + + def check_default_scale(self): + rv = self.pymc_dist.dist() + [scale] = rv.owner.op.dist_params(rv.owner) + assert scale.data == 1.0 + class TestExponentialScale(BaseTestDistributionRandom): pymc_dist = pm.Exponential From 6a45ef69b5dd730a835acdcc023abdea48c9c337 Mon Sep 17 00:00:00 2001 From: Ricardo Vieira Date: Thu, 5 Dec 2024 09:48:18 +0100 Subject: [PATCH 2/2] Fix pre-commit --- pymc/step_methods/hmc/nuts.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/pymc/step_methods/hmc/nuts.py b/pymc/step_methods/hmc/nuts.py index cc29e0334a..770605f4b7 100644 --- a/pymc/step_methods/hmc/nuts.py +++ b/pymc/step_methods/hmc/nuts.py @@ -242,24 +242,24 @@ def competence(var, has_grad): class _Tree: __slots__ = ( - "ndim", - "integrator", - "start", - "step_size", "Emax", - "start_energy", - "rng", - "left", - "right", - "proposal", "depth", - "log_size", + "floatX", + "integrator", + "left", "log_accept_sum", + "log_size", + "max_energy_change", "mean_tree_accept", "n_proposals", + "ndim", "p_sum", - "max_energy_change", - "floatX", + "proposal", + "right", + "rng", + "start", + "start_energy", + "step_size", ) def __init__(