diff --git a/pytensor/graph/op.py b/pytensor/graph/op.py index 160a65dd7a..c58258c920 100644 --- a/pytensor/graph/op.py +++ b/pytensor/graph/op.py @@ -13,6 +13,7 @@ import pytensor from pytensor.configdefaults import config +from pytensor.gradient import grad_not_implemented from pytensor.graph.basic import Apply, Variable from pytensor.graph.utils import ( MetaObject, @@ -366,7 +367,14 @@ def grad( .. [1] Giles, Mike. 2008. “An Extended Collection of Matrix Derivative Results for Forward and Reverse Mode Automatic Differentiation.” """ - raise NotImplementedError() + # raise NotImplementedError() + (a,) = inputs + a_grad = grad_not_implemented( + self, + 0, + a, + ) + return [a_grad] def L_op( self, diff --git a/pytensor/tensor/rewriting/linalg.py b/pytensor/tensor/rewriting/linalg.py index cdb1e59101..31959df419 100644 --- a/pytensor/tensor/rewriting/linalg.py +++ b/pytensor/tensor/rewriting/linalg.py @@ -377,3 +377,8 @@ def local_lift_through_linalg( return [block_diag(*inner_matrices)] else: raise NotImplementedError # pragma: no cover + + +# det diag rewrite +def det_diag_rewrite(node: Apply): + pass