diff --git a/pytensor/tensor/basic.py b/pytensor/tensor/basic.py index e8964ccdf9..758b00803f 100644 --- a/pytensor/tensor/basic.py +++ b/pytensor/tensor/basic.py @@ -757,7 +757,34 @@ def cast(x, dtype: str | np.dtype) -> TensorVariable: @scalar_elemwise def switch(cond, ift, iff): - """if cond then ift else iff""" + """ + Conditionally selects elements from two tensors based on a condition tensor. + + This op is similar to NumPy's `np.where` and `np.choose` functions. + + Parameters + ---------- + cond : TensorVariable + A boolean-type tensor determining which output value to choose. + Should be broadcastable to the shapes of `ift` and `iff`. + ift : TensorVariable + Values selected at `True` elements of `cond`. + iff : TensorVariable + Values selected at `False` elements of `cond`. + + Examples + -------- + This example demonstrates how `switch` can be used in PyMC to model a + categorical variable. + + .. code:: python + + import pymc as pm + + with pm.Model(): + x = pm.Categorical('x', np.array([0.1, 0.9])) + y = pm.Bernoulli('y', p=pm.math.switch(x, 0.9, 0.1), shape=10) + """ where = switch diff --git a/pytensor/tensor/extra_ops.py b/pytensor/tensor/extra_ops.py index 06a82744b2..4fc7f385b9 100644 --- a/pytensor/tensor/extra_ops.py +++ b/pytensor/tensor/extra_ops.py @@ -410,9 +410,18 @@ def cumsum(x, axis=None): axis The axis along which the cumulative sum is computed. The default (None) is to compute the cumsum over the flattened array. + # noqa W293 + Example + ------- + Usage in PyMC: + .. code-block:: python - .. versionadded:: 0.7 + with pm.Model() as model: + x0 = pm.Normal('x0') + x = pm.Normal('x', mu=0, sd=1, shape=10) + # Gaussian random walk + grw = pm.Deterministic('grw', x0 + pm.math.cumsum(x)) """ return CumOp(axis=axis, mode="add")(x) @@ -430,7 +439,19 @@ def cumprod(x, axis=None): axis The axis along which the cumulative product is computed. The default (None) is to compute the `cumprod` over the flattened array. + # noqa W293 + Example + ------- + Usage in PyMC: + + .. code-block:: python + + import pymc as pm + with pm.Model() as model: + x = pm.Normal('x', shape=(10, 3)) + # Product of x + prod_x = pm.Deterministic('prod_x', pm.math.cumprod(x, axis=0)) .. versionadded:: 0.7 diff --git a/pytensor/tensor/math.py b/pytensor/tensor/math.py index d515d51c3a..ec3a20954d 100644 --- a/pytensor/tensor/math.py +++ b/pytensor/tensor/math.py @@ -1945,6 +1945,17 @@ def dot(l, r): """Return a symbolic dot product. This is designed to work with both sparse and dense tensors types. + + Example usage with PyMC: + + .. code:: python + + import pymc as pm + + with pm.Model() as model: + x = pm.Normal('x', mu=0, sd=1, shape=2) + y = pm.Normal('y', mu=0, sd=1, shape=2) + z = pt.math.dot(x, y) """ if not isinstance(l, Variable): @@ -2664,6 +2675,16 @@ def prod( If this is set to True, the axes which are reduced are left in the result as dimensions with size one. With this option, the result will broadcast correctly against the original tensor. + # noqa W293 + Example + ------- + .. code-block:: python + + import pymc as pm + + with pm.Model() as model: + n = pm.Poisson('n', 1, shape=(2, 3)) + prod_n = pm.Deterministic('prod_n', pm.math.prod(n, axis=0)) """ diff --git a/pytensor/tensor/special.py b/pytensor/tensor/special.py index a2f02fabd8..b82988fb77 100644 --- a/pytensor/tensor/special.py +++ b/pytensor/tensor/special.py @@ -485,6 +485,35 @@ def c_code_cache_version(): def softmax(c, axis=None): + """ + Compute the softmax of a vector along a specified axis. + + Parameters + ---------- + c : TensorVariable + The input tensor. + axis : int + The axis along which to compute the softmax. + + Returns + ------- + TensorVariable + The softmax of the input tensor along the specified axis. + + Examples + -------- + In PyMC, you can use this function to compute a softmax over a vector of + probabilities representing the likelihood of each class in a multiclass + classification problem. Here is an example:: + + import pymc as pm + + with pm.Model() as model: + weights = pm.Gamma('weights', 1, 1, shape=3) + softmax_prob = pm.math.softmax(weights) + outcome = pm.Categorical('outcome', p=softmax_prob) + + """ c = as_tensor_variable(c) return Softmax(axis=axis)(c)