Skip to content

Commit 39bda72

Browse files
committed
Remove deprecation warning on softmax functions
1 parent 60bc368 commit 39bda72

File tree

4 files changed

+8
-38
lines changed

4 files changed

+8
-38
lines changed

pytensor/tensor/special.py

Lines changed: 2 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import warnings
21
from textwrap import dedent
32

43
import numpy as np
@@ -483,25 +482,8 @@ def c_code_cache_version():
483482
return (4,)
484483

485484

486-
UNSET_AXIS = object()
487-
488-
489-
def softmax(c, axis=UNSET_AXIS):
490-
if axis is UNSET_AXIS:
491-
warnings.warn(
492-
"Softmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
493-
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
494-
FutureWarning,
495-
)
496-
axis = -1
497-
485+
def softmax(c, axis=None):
498486
c = as_tensor_variable(c)
499-
if c.ndim == 1:
500-
# TODO: Create Specific warning type that can be suppressed?
501-
warnings.warn(
502-
"Softmax no longer converts a vector to a row matrix.",
503-
UserWarning,
504-
)
505487
return Softmax(axis=axis)(c)
506488

507489

@@ -749,22 +731,8 @@ def c_code_cache_version():
749731
return (1,)
750732

751733

752-
def log_softmax(c, axis=UNSET_AXIS):
753-
if axis is UNSET_AXIS:
754-
warnings.warn(
755-
"logsoftmax now accepts an axis argument. For backwards-compatibility it defaults to -1 when not specified, "
756-
"but in the future the default will be `None`.\nTo suppress this warning specify axis explicitly.",
757-
FutureWarning,
758-
)
759-
axis = -1
760-
734+
def log_softmax(c, axis=None):
761735
c = as_tensor_variable(c)
762-
if c.ndim == 1:
763-
# TODO: Create Specific warning type that can be suppressed?
764-
warnings.warn(
765-
"Softmax no longer converts a vector to a row matrix.",
766-
UserWarning,
767-
)
768736
return LogSoftmax(axis=axis)(c)
769737

770738

tests/d3viz/models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def __init__(self, nfeatures=100, noutputs=10, nhiddens=50, rng=None):
2525

2626
wy = shared(self.rng.normal(0, 1, (nhiddens, noutputs)))
2727
by = shared(np.zeros(noutputs), borrow=True)
28-
y = softmax(at.dot(h, wy) + by)
28+
y = softmax(at.dot(h, wy) + by, axis=-1)
2929
self.inputs = [x]
3030
self.outputs = [y]
3131

tests/tensor/rewriting/test_special.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def test_logsoftmax_grad_true_div_elemwise(self):
7272
"""
7373

7474
x = matrix("x")
75-
y = log(softmax(x))
75+
y = log(softmax(x, axis=-1))
7676
g = pytensor.tensor.grad(y.sum(), x)
7777

7878
softmax_grad_node = g.owner
@@ -96,7 +96,7 @@ def test_log_softmax_stabilization():
9696
mode = mode.including("local_log_softmax", "specialize")
9797

9898
x = matrix()
99-
y = softmax(x)
99+
y = softmax(x, axis=-1)
100100
z = log(y)
101101

102102
fgraph = FunctionGraph([x], [z])

tests/test_rop.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,9 @@ def test_sum(self):
272272
self.check_mat_rop_lop(self.mx.sum(axis=1), (self.mat_in_shape[0],))
273273

274274
def test_softmax(self):
275-
self.check_rop_lop(pytensor.tensor.special.softmax(self.x), self.in_shape)
275+
self.check_rop_lop(
276+
pytensor.tensor.special.softmax(self.x, axis=-1), self.in_shape
277+
)
276278

277279
def test_alloc(self):
278280
# Alloc of the sum of x into a vector

0 commit comments

Comments
 (0)