Skip to content

Implementation of Maxwell RV #440

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions doc/library/tensor/random/basic.rst
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,9 @@ PyTensor can produce :class:`RandomVariable`\s that draw samples from many diffe
.. autoclass:: pytensor.tensor.random.basic.LogNormalRV
:members: __call__

.. autoclass:: pytensor.tensor.random.basic.MaxwellRV
:members: __call__

.. autoclass:: pytensor.tensor.random.basic.MultinomialRV
:members: __call__

Expand Down
1 change: 1 addition & 0 deletions pytensor/link/jax/dispatch/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ def sample_fn(rng, size, dtype, *parameters):
@jax_sample_fn.register(aer.LogisticRV)
@jax_sample_fn.register(aer.NormalRV)
@jax_sample_fn.register(aer.StandardNormalRV)
@jax_sample_fn.register(aer.MaxwellRV)
def jax_sample_fn_loc_scale(op):
"""JAX implementation of random variables in the loc-scale families.

Expand Down
1 change: 1 addition & 0 deletions pytensor/link/numba/dispatch/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ def {sized_fn_name}({random_fn_input_names}):
@numba_funcify.register(aer.BetaRV)
@numba_funcify.register(aer.NormalRV)
@numba_funcify.register(aer.LogNormalRV)
@numba_funcify.register(aer.MaxwellRV)
@numba_funcify.register(aer.GammaRV)
@numba_funcify.register(aer.ChiSquareRV)
@numba_funcify.register(aer.ParetoRV)
Expand Down
58 changes: 58 additions & 0 deletions pytensor/tensor/random/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,63 @@ def __call__(self, mean=0.0, sigma=1.0, size=None, **kwargs):
lognormal = LogNormalRV()


class MaxwellRV(ScipyRandomVariable):
r"""A Maxwellian continuous random variable.

The probability density function for `maxwell` in terms of its parameters :math:`\mu`
and :math:`\sigma` is:

.. math::

f(x; \mu, \sigma) = \sqrt{\frac{2}{\pi}}\frac{(x-\mu)^2 e^{-(x-\mu)^2/(2\sigma^2)}}{\sigma^3}

for :math:`x \geq 0` and :math:`\sigma > 0`

"""
name = "maxwell"
ndim_supp = 0
ndims_params = [0, 0]
dtype = "floatX"
_print_name = ("Maxwell", "\\operatorname{Maxwell}")

def __call__(self, loc, scale, size=None, **kwargs):
r"""Draw samples from a Maxwell distribution.

Signature
---------

`(), () -> ()`

Parameters
----------
loc
Location parameter :math:`\mu` of the distribution.
scale
Scale parameter :math:`\sigma` of the distribution. Must be
positive.
size
Sample shape. If the given size is, e.g. `(m, n, k)` then `m * n * k`
independent, identically distributed random variables are
returned. Default is `None` in which case a single random variable
is returned.

"""
return super().__call__(loc, scale, size=size, **kwargs)

@classmethod
def rng_fn_scipy(
cls,
rng: Union[np.random.Generator, np.random.RandomState],
loc: Union[np.ndarray, float],
scale: Union[np.ndarray, float],
size: Optional[Union[List[int], int]],
) -> np.ndarray:
return stats.maxwell.rvs(loc=loc, scale=scale, size=size, random_state=rng)


maxwell = MaxwellRV()


class GammaRV(ScipyRandomVariable):
r"""A gamma continuous random variable.

Expand Down Expand Up @@ -2157,6 +2214,7 @@ def permutation(x, **kwargs):
"lognormal",
"halfnormal",
"normal",
"maxwell",
"beta",
"triangular",
"uniform",
Expand Down
16 changes: 16 additions & 0 deletions tests/link/jax/test_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,22 @@ def test_random_updates_input_storage_order():
"lognorm",
lambda mu, sigma: (sigma, 0, np.exp(mu)),
),
(
aer.maxwell,
[
set_test_value(
at.lvector(),
np.array([1, 2], dtype=np.int64),
),
set_test_value(
at.dscalar(),
np.array(1.0, dtype=np.float64),
),
],
(2,),
"maxwell",
lambda *args: args,
),
(
aer.normal,
[
Expand Down
16 changes: 16 additions & 0 deletions tests/link/numba/test_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,22 @@
],
at.as_tensor([3, 2]),
),
(
aer.maxwell,
[
set_test_value(
at.dvector(),
np.array([1.0, 2.0], dtype=np.float64),
),
set_test_value(
at.dscalar(),
np.array(1.0, dtype=np.float64),
),
],
(2,),
"maxwell",
lambda *args: args,
),
pytest.param(
aer.pareto,
[
Expand Down
19 changes: 19 additions & 0 deletions tests/tensor/random/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
laplace,
logistic,
lognormal,
maxwell,
multinomial,
multivariate_normal,
nbinom,
Expand Down Expand Up @@ -338,6 +339,24 @@ def test_lognormal_samples(mean, sigma, size):
compare_sample_values(lognormal, mean, sigma, size=size)


@pytest.mark.parametrize(
"loc, sigma, size",
[
(np.array(0, dtype=config.floatX), np.array(1, dtype=config.floatX), None),
(np.array(0, dtype=config.floatX), np.array(1, dtype=config.floatX), []),
(
np.full((1, 2), 0, dtype=config.floatX),
np.array(1, dtype=config.floatX),
None,
),
],
)
def test_maxwell_samples(loc, sigma, size):
compare_sample_values(
maxwell, loc, sigma, size=size, test_fn=fixed_scipy_rvs("maxwell")
)


@pytest.mark.parametrize(
"a, b, size",
[
Expand Down