-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Labels
Description
Describe the issue:
The same model results in an error depending on the usage of a deterministic. See the examples below.
Reproduceable code example:
import numpy as np
import pymc as pm
import pytensor.tensor as pt
rng = np.random.default_rng(1234)
n1, n2 = 30, 70
y = np.concatenate([np.zeros(n1), rng.poisson(3, size=n2)]).astype(int)
coords={"__obs__": np.arange(n1 + n2)}
# Works
with pm.Model(coords=coords) as model:
a = pm.Normal("Intercept", mu=0, sigma=2.5)
psi = pm.Beta("psi", alpha=2, beta=2)
pm.HurdlePoisson("y", mu=pt.exp(a), psi=psi, observed=y, dims="__obs__")
# Raises error
with pm.Model(coords=coords) as model:
a = pm.Normal("Intercept", mu=0, sigma=2.5)
mu = pm.Deterministic("mu", pt.exp(a))
psi = pm.Beta("psi", alpha=2, beta=2)
pm.HurdlePoisson("y", mu=mu, psi=psi, observed=y, dims="__obs__")
Error message:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[3], line 5
3 mu = pm.Deterministic("mu", pt.exp(a))
4 psi = pm.Beta("psi", alpha=2, beta=2)
----> 5 pm.HurdlePoisson("y", mu=mu, psi=psi, observed=y, dims="__obs__")
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/mixture.py:866, in HurdlePoisson.__new__(cls, name, psi, mu, **kwargs)
865 def __new__(cls, name, psi, mu, **kwargs):
--> 866 return _hurdle_mixture(
867 name=name, nonzero_p=psi, nonzero_dist=Poisson.dist(mu=mu), dtype="int", **kwargs
868 )
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/mixture.py:822, in _hurdle_mixture(name, nonzero_p, nonzero_dist, dtype, **kwargs)
818 nonzero_p = pt.as_tensor_variable(nonzero_p)
819 weights = pt.stack([1 - nonzero_p, nonzero_p], axis=-1)
820 comp_dists = [
821 DiracDelta.dist(zero),
--> 822 Truncated.dist(nonzero_dist, lower=lower),
823 ]
825 if name is not None:
826 return Mixture(name, weights, comp_dists, **kwargs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/truncated.py:316, in Truncated.dist(cls, dist, lower, upper, max_n_steps, **kwargs)
313 if lower is None and upper is None:
314 raise ValueError("lower and upper cannot both be None")
--> 316 return super().dist([dist, lower, upper, max_n_steps], **kwargs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/distribution.py:633, in Distribution.dist(cls, dist_params, shape, **kwargs)
631 ndim_supp = getattr(cls.rv_type, "ndim_supp", None)
632 if ndim_supp is None:
--> 633 ndim_supp = cls.rv_op(*dist_params, **kwargs).owner.op.ndim_supp
634 create_size = find_size(shape=shape, size=size, ndim_supp=ndim_supp)
635 rv_out = cls.rv_op(*dist_params, size=create_size, **kwargs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/truncated.py:183, in TruncatedRV.rv_op(cls, dist, lower, upper, max_n_steps, size)
179 return graph_inputs.index(rng)
181 next_rngs = [next_rng for rng, next_rng in sorted(updates.items(), key=sort_updates)]
--> 183 return TruncatedRV(
184 base_rv_op=dist.owner.op,
185 inputs=graph_inputs,
186 outputs=[truncated_rv, *next_rngs],
187 ndim_supp=0,
188 max_n_steps=max_n_steps,
189 )(*graph_inputs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/truncated.py:77, in TruncatedRV.__init__(self, base_rv_op, max_n_steps, *args, **kwargs)
72 self.max_n_steps = max_n_steps
73 self._print_name = (
74 f"Truncated{self.base_rv_op._print_name[0]}",
75 f"\\operatorname{{{self.base_rv_op._print_name[1]}}}",
76 )
---> 77 super().__init__(*args, **kwargs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pymc/distributions/distribution.py:422, in SymbolicRandomVariable.__init__(self, *args, **kwargs)
420 kwargs.setdefault("inline", True)
421 kwargs.setdefault("strict", True)
--> 422 super().__init__(*args, **kwargs)
File ~/anaconda3/envs/bambi-dev/lib/python3.11/site-packages/pytensor/compile/builders.py:423, in OpFromGraph.__init__(self, inputs, outputs, inline, lop_overrides, grad_overrides, rop_overrides, connection_pattern, strict, name, **kwargs)
418 self.fgraph, self.shared_inputs, _, _ = construct_nominal_fgraph(
419 inputs, outputs
420 )
422 if strict and self.shared_inputs:
--> 423 raise ValueError(
424 "All variables needed to compute inner-graph must be provided as inputs under strict=True. "
425 f"The inner-graph implicitly depends on the following shared variables {self.shared_inputs}"
426 )
428 self.kwargs = kwargs
429 self.input_types = [inp.type for inp in inputs]
ValueError: All variables needed to compute inner-graph must be provided as inputs under strict=True. The inner-graph implicitly depends on the following shared variables [RandomGeneratorSharedVariable(<Generator(PCG64) at 0x71D9FE9BE420>)]
ValueError: All variables needed to compute inner-graph must be provided as inputs under strict=True. The inner-graph implicitly depends on the following shared variables [RandomGeneratorSharedVariable(<Generator(PCG64) at 0x71D9FE9BE420>)]
PyMC version information:
PyMC = 5.14.0
PyTensor = 2.20.0
Context for the issue:
I'm trying to finalize a large refactor in Bambi and some tests about HurdlePoisson didn't pass and that's how I found this.
The same thing happened with HurdleNegativeBinomial.