diff --git a/coverage/coverage-.xml b/coverage/coverage-.xml new file mode 100644 index 0000000000..5192121c8d --- /dev/null +++ b/coverage/coverage-.xml @@ -0,0 +1,48682 @@ + + + + + + pytensor/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/pytensor/compile/sharedvalue.py b/pytensor/compile/sharedvalue.py index 8c6f0726a4..80f93b79bd 100644 --- a/pytensor/compile/sharedvalue.py +++ b/pytensor/compile/sharedvalue.py @@ -1,6 +1,7 @@ """Provide a simple user friendly API to PyTensor-managed memory.""" import copy +import warnings from contextlib import contextmanager from functools import singledispatch from typing import TYPE_CHECKING @@ -134,6 +135,10 @@ def set_value(self, new_value, borrow=False): self.container.value = copy.deepcopy(new_value) def get_test_value(self): + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) return self.get_value(borrow=True, return_internal_type=True) def clone(self, **kwargs): diff --git a/pytensor/configdefaults.py b/pytensor/configdefaults.py index a81fd63905..105a647591 100644 --- a/pytensor/configdefaults.py +++ b/pytensor/configdefaults.py @@ -7,6 +7,7 @@ import socket import sys import textwrap +import warnings from pathlib import Path import numpy as np @@ -1282,6 +1283,12 @@ def add_caching_dir_configvars(): else: gcc_version_str = "GCC_NOT_FOUND" +if config.compute_test_value != "off": + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) + # TODO: The caching dir resolution is a procedural mess of helper functions, local variables # and config definitions. And the result is also not particularly pretty.. add_caching_dir_configvars() diff --git a/pytensor/graph/basic.py b/pytensor/graph/basic.py index 6b4ca7570d..ee001df796 100644 --- a/pytensor/graph/basic.py +++ b/pytensor/graph/basic.py @@ -790,6 +790,10 @@ def __init__(self, type: _TypeType, data: Any, name: str | None = None): add_tag_trace(self) def get_test_value(self): + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) return self.data def signature(self): diff --git a/pytensor/graph/op.py b/pytensor/graph/op.py index 684add6308..024dd299f1 100644 --- a/pytensor/graph/op.py +++ b/pytensor/graph/op.py @@ -717,6 +717,11 @@ def get_test_values(*args: Variable) -> Any | list[Any]: if config.compute_test_value == "off": return [] + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) + rval = [] for i, arg in enumerate(args): diff --git a/pytensor/graph/utils.py b/pytensor/graph/utils.py index d797504ae6..69efa9d3b0 100644 --- a/pytensor/graph/utils.py +++ b/pytensor/graph/utils.py @@ -1,6 +1,7 @@ import linecache import sys import traceback +import warnings from abc import ABCMeta from collections.abc import Sequence from io import StringIO @@ -282,9 +283,19 @@ def info(self): # These two methods have been added to help Mypy def __getattribute__(self, name): + if name == "test_value": + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) return super().__getattribute__(name) def __setattr__(self, name: str, value: Any) -> None: + if name == "test_value": + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) self.__dict__[name] = value @@ -299,6 +310,11 @@ def __init__(self, attr, attr_filter): def __setattr__(self, attr, obj): if getattr(self, "attr", None) == attr: + if attr == "test_value": + warnings.warn( + "test_value machinery is deprecated and will stop working in the future.", + FutureWarning, + ) obj = self.attr_filter(obj) return object.__setattr__(self, attr, obj) diff --git a/pytensor/scalar/basic.py b/pytensor/scalar/basic.py index ca58006d24..f4f9bf169c 100644 --- a/pytensor/scalar/basic.py +++ b/pytensor/scalar/basic.py @@ -1231,7 +1231,7 @@ def supports_c_code(self, inputs, outputs): tmp_s_input.append(tmp) mapping[ii] = tmp_s_input[-1] - with config.change_flags(compute_test_value="ignore"): + with config.change_flags(compute_test_value="off"): s_op = self(*tmp_s_input, return_list=True) # if the scalar_op don't have a c implementation, diff --git a/tests/compile/test_builders.py b/tests/compile/test_builders.py index d99b13edfc..265a2f65b9 100644 --- a/tests/compile/test_builders.py +++ b/tests/compile/test_builders.py @@ -523,11 +523,12 @@ def test_infer_shape(self): @config.change_flags(compute_test_value="raise") def test_compute_test_value(self): - x = scalar("x") - x.tag.test_value = np.array(1.0, dtype=config.floatX) - op = OpFromGraph([x], [x**3]) - y = scalar("y") - y.tag.test_value = np.array(1.0, dtype=config.floatX) + with pytest.warns(FutureWarning): + x = scalar("x") + x.tag.test_value = np.array(1.0, dtype=config.floatX) + op = OpFromGraph([x], [x**3]) + y = scalar("y") + y.tag.test_value = np.array(1.0, dtype=config.floatX) f = op(y) grad_f = grad(f, y) assert grad_f.tag.test_value is not None diff --git a/tests/compile/test_ops.py b/tests/compile/test_ops.py index 461c7793ad..d9819ef621 100644 --- a/tests/compile/test_ops.py +++ b/tests/compile/test_ops.py @@ -33,9 +33,7 @@ def cumprod(x): def test_2arg(self): x = dmatrix("x") - x.tag.test_value = np.zeros((2, 2)) y = dvector("y") - y.tag.test_value = [0, 0, 0, 0] @as_op([dmatrix, dvector], dvector) def cumprod_plus(x, y): @@ -49,9 +47,7 @@ def cumprod_plus(x, y): def test_infer_shape(self): x = dmatrix("x") - x.tag.test_value = np.zeros((2, 2)) y = dvector("y") - y.tag.test_value = [0, 0, 0, 0] def infer_shape(fgraph, node, shapes): x, y = shapes diff --git a/tests/graph/test_compute_test_value.py b/tests/graph/test_compute_test_value.py index ea59ff68f8..0a2c2bf830 100644 --- a/tests/graph/test_compute_test_value.py +++ b/tests/graph/test_compute_test_value.py @@ -67,7 +67,8 @@ def perform(self, node, inputs, outputs): test_input = SomeType()() orig_object = object() - test_input.tag.test_value = orig_object + with pytest.warns(FutureWarning): + test_input.tag.test_value = orig_object res = InplaceOp(False)(test_input) assert res.tag.test_value is orig_object @@ -76,10 +77,11 @@ def perform(self, node, inputs, outputs): assert res.tag.test_value is not orig_object def test_variable_only(self): - x = matrix("x") - x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) - y = matrix("y") - y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) + with pytest.warns(FutureWarning): + x = matrix("x") + x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) + y = matrix("y") + y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) # should work z = dot(x, y) @@ -88,14 +90,16 @@ def test_variable_only(self): assert _allclose(f(x.tag.test_value, y.tag.test_value), z.tag.test_value) # this test should fail - y.tag.test_value = np.random.random((6, 5)).astype(config.floatX) + with pytest.warns(FutureWarning): + y.tag.test_value = np.random.random((6, 5)).astype(config.floatX) with pytest.raises(ValueError): dot(x, y) def test_compute_flag(self): x = matrix("x") y = matrix("y") - y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) + with pytest.warns(FutureWarning): + y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) # should skip computation of test value with config.change_flags(compute_test_value="off"): @@ -111,10 +115,11 @@ def test_compute_flag(self): dot(x, y) def test_string_var(self): - x = matrix("x") - x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) - y = matrix("y") - y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) + with pytest.warns(FutureWarning): + x = matrix("x") + x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) + y = matrix("y") + y.tag.test_value = np.random.random((4, 5)).astype(config.floatX) z = pytensor.shared(np.random.random((5, 6)).astype(config.floatX)) @@ -134,7 +139,8 @@ def f(x, y, z): def test_shared(self): x = matrix("x") - x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) + with pytest.warns(FutureWarning): + x.tag.test_value = np.random.random((3, 4)).astype(config.floatX) y = pytensor.shared(np.random.random((4, 6)).astype(config.floatX), "y") # should work @@ -190,21 +196,21 @@ def test_constant(self): def test_incorrect_type(self): x = vector("x") with pytest.raises(TypeError): - # Incorrect shape for test value - x.tag.test_value = np.empty((2, 2)) + with pytest.warns(FutureWarning): + # Incorrect shape for test value + x.tag.test_value = np.empty((2, 2)) x = fmatrix("x") with pytest.raises(TypeError): - # Incorrect dtype (float64) for test value - x.tag.test_value = np.random.random((3, 4)) + with pytest.warns(FutureWarning): + # Incorrect dtype (float64) for test value + x.tag.test_value = np.random.random((3, 4)) def test_overided_function(self): # We need to test those as they mess with Exception # And we don't want the exception to be changed. x = matrix() - x.tag.test_value = np.zeros((2, 3), dtype=config.floatX) y = matrix() - y.tag.test_value = np.zeros((2, 2), dtype=config.floatX) with pytest.raises(ValueError): x.__mul__(y) @@ -212,8 +218,9 @@ def test_scan(self): # Test the compute_test_value mechanism Scan. k = iscalar("k") A = vector("A") - k.tag.test_value = 3 - A.tag.test_value = np.random.random((5,)).astype(config.floatX) + with pytest.warns(FutureWarning): + k.tag.test_value = 3 + A.tag.test_value = np.random.random((5,)).astype(config.floatX) def fx(prior_result, A): return prior_result * A @@ -233,8 +240,9 @@ def test_scan_err1(self): # This test should fail when building fx for the first time k = iscalar("k") A = matrix("A") - k.tag.test_value = 3 - A.tag.test_value = np.random.random((5, 3)).astype(config.floatX) + with pytest.warns(FutureWarning): + k.tag.test_value = 3 + A.tag.test_value = np.random.random((5, 3)).astype(config.floatX) def fx(prior_result, A): return dot(prior_result, A) @@ -253,8 +261,9 @@ def test_scan_err2(self): # but when calling the scan's perform() k = iscalar("k") A = matrix("A") - k.tag.test_value = 3 - A.tag.test_value = np.random.random((5, 3)).astype(config.floatX) + with pytest.warns(FutureWarning): + k.tag.test_value = 3 + A.tag.test_value = np.random.random((5, 3)).astype(config.floatX) def fx(prior_result, A): return dot(prior_result, A) @@ -288,7 +297,8 @@ def perform(self, node, inputs, outputs): output[0] = input + 1 i = ps.int32("i") - i.tag.test_value = 3 + with pytest.warns(FutureWarning): + i.tag.test_value = 3 o = IncOnePython()(i) @@ -304,7 +314,8 @@ def perform(self, node, inputs, outputs): ) def test_no_perform(self): i = ps.int32("i") - i.tag.test_value = 3 + with pytest.warns(FutureWarning): + i.tag.test_value = 3 # Class IncOneC is defined outside of the TestComputeTestValue # so it can be pickled and unpickled diff --git a/tests/graph/test_destroyhandler.py b/tests/graph/test_destroyhandler.py index 16a654da26..9549503e25 100644 --- a/tests/graph/test_destroyhandler.py +++ b/tests/graph/test_destroyhandler.py @@ -2,7 +2,6 @@ import pytest -from pytensor.configdefaults import config from pytensor.graph.basic import Apply, Constant, Variable, clone from pytensor.graph.destroyhandler import DestroyHandler from pytensor.graph.features import ReplaceValidate @@ -408,7 +407,6 @@ def test_value_repl(): assert g.consistent() -@config.change_flags(compute_test_value="off") def test_value_repl_2(): x, y, z = inputs() sy = sigmoid(y) diff --git a/tests/graph/test_fg.py b/tests/graph/test_fg.py index f2550d348e..63ad67eb57 100644 --- a/tests/graph/test_fg.py +++ b/tests/graph/test_fg.py @@ -241,18 +241,19 @@ def test_change_input(self): @config.change_flags(compute_test_value="raise") def test_replace_test_value(self): - var1 = MyVariable("var1") - var1.tag.test_value = 1 - var2 = MyVariable("var2") - var2.tag.test_value = 2 - var3 = op1(var2, var1) - var4 = op2(var3, var2) - var4.tag.test_value = np.array([1, 2]) - var5 = op3(var4, var2, var2) - fg = FunctionGraph([var1, var2], [var3, var5], clone=False) - - var6 = op3() - var6.tag.test_value = np.array(0) + with pytest.warns(FutureWarning): + var1 = MyVariable("var1") + var1.tag.test_value = 1 + var2 = MyVariable("var2") + var2.tag.test_value = 2 + var3 = op1(var2, var1) + var4 = op2(var3, var2) + var4.tag.test_value = np.array([1, 2]) + var5 = op3(var4, var2, var2) + fg = FunctionGraph([var1, var2], [var3, var5], clone=False) + + var6 = op3() + var6.tag.test_value = np.array(0) assert var6.tag.test_value.shape != var4.tag.test_value.shape diff --git a/tests/graph/test_op.py b/tests/graph/test_op.py index 5ec545015b..0c92017349 100644 --- a/tests/graph/test_op.py +++ b/tests/graph/test_op.py @@ -131,26 +131,30 @@ def perform(self, node, inputs, outputs): def test_test_value_python_objects(): - for x in ([0, 1, 2], 0, 0.5, 1): - assert np.all(op.get_test_value(x) == x) + with pytest.warns(FutureWarning): + for x in ([0, 1, 2], 0, 0.5, 1): + assert np.all(op.get_test_value(x) == x) def test_test_value_ndarray(): x = np.zeros((5, 5)) - v = op.get_test_value(x) + with pytest.warns(FutureWarning): + v = op.get_test_value(x) assert np.all(v == x) def test_test_value_constant(): x = pt.as_tensor_variable(np.zeros((5, 5))) - v = op.get_test_value(x) + with pytest.warns(FutureWarning): + v = op.get_test_value(x) assert np.all(v == np.zeros((5, 5))) def test_test_value_shared(): x = shared(np.zeros((5, 5))) - v = op.get_test_value(x) + with pytest.warns(FutureWarning): + v = op.get_test_value(x) assert np.all(v == np.zeros((5, 5))) @@ -158,12 +162,12 @@ def test_test_value_shared(): @config.change_flags(compute_test_value="raise") def test_test_value_op(): x = log(np.ones((5, 5))) - v = op.get_test_value(x) + with pytest.warns(FutureWarning): + v = op.get_test_value(x) assert np.allclose(v, np.zeros((5, 5))) -@config.change_flags(compute_test_value="off") def test_get_test_values_no_debugger(): """Tests that `get_test_values` returns `[]` when debugger is off.""" @@ -176,7 +180,8 @@ def test_get_test_values_ignore(): """Tests that `get_test_values` returns `[]` when debugger is set to "ignore" and some values are missing.""" x = vector() - assert op.get_test_values(x) == [] + with pytest.warns(FutureWarning): + assert op.get_test_values(x) == [] def test_get_test_values_success(): @@ -184,19 +189,20 @@ def test_get_test_values_success(): for mode in ["ignore", "warn", "raise"]: with config.change_flags(compute_test_value=mode): - x = vector() - x.tag.test_value = np.zeros((4,), dtype=config.floatX) - y = np.zeros((5, 5)) + with pytest.warns(FutureWarning): + x = vector() + x.tag.test_value = np.zeros((4,), dtype=config.floatX) + y = np.zeros((5, 5)) - iters = 0 + iters = 0 - for x_val, y_val in op.get_test_values(x, y): - assert x_val.shape == (4,) - assert y_val.shape == (5, 5) + for x_val, y_val in op.get_test_values(x, y): + assert x_val.shape == (4,) + assert y_val.shape == (5, 5) - iters += 1 + iters += 1 - assert iters == 1 + assert iters == 1 @config.change_flags(compute_test_value="raise") @@ -204,8 +210,9 @@ def test_get_test_values_exc(): """Tests that `get_test_values` raises an exception when debugger is set to raise and a value is missing.""" with pytest.raises(TestValueError): - x = vector() - assert op.get_test_values(x) == [] + with pytest.warns(FutureWarning): + x = vector() + assert op.get_test_values(x) == [] def test_op_invalid_input_types(): diff --git a/tests/link/jax/test_basic.py b/tests/link/jax/test_basic.py index 5e783984e0..7c0cfbe5dc 100644 --- a/tests/link/jax/test_basic.py +++ b/tests/link/jax/test_basic.py @@ -12,7 +12,7 @@ from pytensor.graph import RewriteDatabaseQuery from pytensor.graph.basic import Apply from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import Op, get_test_value +from pytensor.graph.op import Op from pytensor.ifelse import ifelse from pytensor.link.jax import JAXLinker from pytensor.raise_op import assert_op @@ -192,16 +192,14 @@ def test_jax_ifelse(): compare_jax_and_py(x_fg, []) a = dscalar("a") - a.tag.test_value = np.array(0.2, dtype=config.floatX) x = ifelse(a < 0.5, true_vals, false_vals) x_fg = FunctionGraph([a], [x]) # I.e. False - compare_jax_and_py(x_fg, [get_test_value(i) for i in x_fg.inputs]) + compare_jax_and_py(x_fg, [np.array(0.2, dtype=config.floatX)]) def test_jax_checkandraise(): p = scalar() - p.tag.test_value = 0 res = assert_op(p, p < 1.0) @@ -210,8 +208,8 @@ def test_jax_checkandraise(): def set_test_value(x, v): - x.tag.test_value = v - return x + test_values_dict = {x: v} + return test_values_dict def test_OpFromGraph(): diff --git a/tests/link/jax/test_elemwise.py b/tests/link/jax/test_elemwise.py index 856d8c4881..de0b618639 100644 --- a/tests/link/jax/test_elemwise.py +++ b/tests/link/jax/test_elemwise.py @@ -7,7 +7,6 @@ from pytensor.compile import get_mode from pytensor.configdefaults import config from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import get_test_value from pytensor.tensor import elemwise as pt_elemwise from pytensor.tensor.math import all as pt_all from pytensor.tensor.math import prod @@ -46,7 +45,6 @@ def test_jax_Dimshuffle(): def test_jax_CAReduce(): a_pt = vector("a") - a_pt.tag.test_value = np.r_[1, 2, 3].astype(config.floatX) x = pt_sum(a_pt, axis=None) x_fg = FunctionGraph([a_pt], [x]) @@ -54,7 +52,6 @@ def test_jax_CAReduce(): compare_jax_and_py(x_fg, [np.r_[1, 2, 3].astype(config.floatX)]) a_pt = matrix("a") - a_pt.tag.test_value = np.c_[[1, 2, 3], [1, 2, 3]].astype(config.floatX) x = pt_sum(a_pt, axis=0) x_fg = FunctionGraph([a_pt], [x]) @@ -67,7 +64,6 @@ def test_jax_CAReduce(): compare_jax_and_py(x_fg, [np.c_[[1, 2, 3], [1, 2, 3]].astype(config.floatX)]) a_pt = matrix("a") - a_pt.tag.test_value = np.c_[[1, 2, 3], [1, 2, 3]].astype(config.floatX) x = prod(a_pt, axis=0) x_fg = FunctionGraph([a_pt], [x]) @@ -83,30 +79,30 @@ def test_jax_CAReduce(): @pytest.mark.parametrize("axis", [None, 0, 1]) def test_softmax(axis): x = matrix("x") - x.tag.test_value = np.arange(6, dtype=config.floatX).reshape(2, 3) out = softmax(x, axis=axis) fgraph = FunctionGraph([x], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [np.arange(6, dtype=config.floatX).reshape(2, 3)]) @pytest.mark.parametrize("axis", [None, 0, 1]) def test_logsoftmax(axis): x = matrix("x") - x.tag.test_value = np.arange(6, dtype=config.floatX).reshape(2, 3) out = log_softmax(x, axis=axis) fgraph = FunctionGraph([x], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [np.arange(6, dtype=config.floatX).reshape(2, 3)]) @pytest.mark.parametrize("axis", [None, 0, 1]) def test_softmax_grad(axis): dy = matrix("dy") - dy.tag.test_value = np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) sm = matrix("sm") - sm.tag.test_value = np.arange(6, dtype=config.floatX).reshape(2, 3) + test_value = [ + np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX), + np.arange(6, dtype=config.floatX).reshape(2, 3), + ] out = SoftmaxGrad(axis=axis)(dy, sm) fgraph = FunctionGraph([dy, sm], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, list(test_value)) @pytest.mark.parametrize("size", [(10, 10), (1000, 1000), (10000, 10000)]) diff --git a/tests/link/jax/test_extra_ops.py b/tests/link/jax/test_extra_ops.py index 1427413379..ddb72a0e4c 100644 --- a/tests/link/jax/test_extra_ops.py +++ b/tests/link/jax/test_extra_ops.py @@ -4,7 +4,6 @@ import pytensor.tensor.basic as ptb from pytensor.configdefaults import config from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import get_test_value from pytensor.tensor import extra_ops as pt_extra_ops from pytensor.tensor.type import matrix, tensor from tests.link.jax.test_basic import compare_jax_and_py @@ -52,7 +51,9 @@ def test_extra_ops(): out = pt_extra_ops.unravel_index(indices, (3, 4), order="C") fgraph = FunctionGraph([], out) compare_jax_and_py( - fgraph, [get_test_value(i) for i in fgraph.inputs], must_be_device_array=False + fgraph, + [], + must_be_device_array=False, ) diff --git a/tests/link/jax/test_random.py b/tests/link/jax/test_random.py index f9ae5d00c1..840045537a 100644 --- a/tests/link/jax/test_random.py +++ b/tests/link/jax/test_random.py @@ -474,17 +474,19 @@ def test_random_RandomVariable(rv_op, dist_params, base_size, cdf_name, params_c """ rng = shared(np.random.default_rng(29403)) + test_values = {k: v for d in dist_params for k, v in d.items() if d} + dist_params = list(test_values.keys()) g = rv_op(*dist_params, size=(10000, *base_size), rng=rng) g_fn = compile_random_function(dist_params, g, mode=jax_mode) samples = g_fn( *[ - i.tag.test_value - for i in g_fn.maker.fgraph.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ] ) - bcast_dist_args = np.broadcast_arrays(*[i.tag.test_value for i in dist_params]) + bcast_dist_args = np.broadcast_arrays(*[test_values[i] for i in test_values]) for idx in np.ndindex(*base_size): cdf_params = params_conv(*(arg[idx] for arg in bcast_dist_args)) diff --git a/tests/link/jax/test_scalar.py b/tests/link/jax/test_scalar.py index 0469301791..4eac6b625b 100644 --- a/tests/link/jax/test_scalar.py +++ b/tests/link/jax/test_scalar.py @@ -5,7 +5,6 @@ import pytensor.tensor as pt from pytensor.configdefaults import config from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import get_test_value from pytensor.scalar.basic import Composite from pytensor.tensor import as_tensor from pytensor.tensor.elemwise import Elemwise @@ -80,11 +79,11 @@ def test_second_constant_scalar(): def test_identity(): a = scalar("a") - a.tag.test_value = 10 + a_test_value = 10 out = ps.identity(a) fgraph = FunctionGraph([a], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [a_test_value]) @pytest.mark.parametrize( @@ -238,34 +237,36 @@ def test_log1mexp(): def test_nnet(): x = vector("x") - x.tag.test_value = np.r_[1.0, 2.0].astype(config.floatX) + x_test_value = np.r_[1.0, 2.0].astype(config.floatX) out = sigmoid(x) fgraph = FunctionGraph([x], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [x_test_value]) out = softplus(x) fgraph = FunctionGraph([x], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [x_test_value]) def test_jax_variadic_Scalar(): mu = vector("mu", dtype=config.floatX) - mu.tag.test_value = np.r_[0.1, 1.1].astype(config.floatX) tau = vector("tau", dtype=config.floatX) - tau.tag.test_value = np.r_[1.0, 2.0].astype(config.floatX) + test_values = { + mu: np.r_[0.1, 1.1].astype(config.floatX), + tau: np.r_[1.0, 2.0].astype(config.floatX), + } res = -tau * mu fgraph = FunctionGraph([mu, tau], [res]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [test_values[i] for i in test_values]) res = -tau * (tau - mu) ** 2 fgraph = FunctionGraph([mu, tau], [res]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [test_values[i] for i in test_values]) def test_add_scalars(): @@ -306,27 +307,31 @@ def test_mod_scalars(): def test_jax_multioutput(): x = vector("x") - x.tag.test_value = np.r_[1.0, 2.0].astype(config.floatX) y = vector("y") - y.tag.test_value = np.r_[3.0, 4.0].astype(config.floatX) + test_values = { + x: np.r_[1.0, 2.0].astype(config.floatX), + y: np.r_[3.0, 4.0].astype(config.floatX), + } w = cosh(x**2 + y / 3.0) v = cosh(x / 3.0 + y**2) fgraph = FunctionGraph([x, y], [w, v]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [test_values[i] for i in test_values]) def test_jax_logp(): mu = vector("mu") - mu.tag.test_value = np.r_[0.0, 0.0].astype(config.floatX) tau = vector("tau") - tau.tag.test_value = np.r_[1.0, 1.0].astype(config.floatX) sigma = vector("sigma") - sigma.tag.test_value = (1.0 / get_test_value(tau)).astype(config.floatX) value = vector("value") - value.tag.test_value = np.r_[0.1, -10].astype(config.floatX) + test_values = { + mu: np.r_[0.0, 0.0].astype(config.floatX), + tau: np.r_[1.0, 1.0].astype(config.floatX), + sigma: (1.0 / np.r_[1.0, 1.0].astype(config.floatX)).astype(config.floatX), + value: np.r_[0.1, -10].astype(config.floatX), + } logp = (-tau * (value - mu) ** 2 + log(tau / np.pi / 2.0)) / 2.0 conditions = [sigma > 0] @@ -335,4 +340,4 @@ def test_jax_logp(): fgraph = FunctionGraph([mu, tau, sigma, value], [normal_logp]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [test_values[i] for i in test_values]) diff --git a/tests/link/jax/test_tensor_basic.py b/tests/link/jax/test_tensor_basic.py index 0ee4a236d9..183e3f08ba 100644 --- a/tests/link/jax/test_tensor_basic.py +++ b/tests/link/jax/test_tensor_basic.py @@ -11,7 +11,6 @@ import pytensor.tensor.basic as ptb from pytensor.configdefaults import config from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import get_test_value from pytensor.tensor.type import iscalar, matrix, scalar, vector from tests.link.jax.test_basic import compare_jax_and_py from tests.tensor.test_basic import TestAlloc @@ -81,12 +80,12 @@ def test_arange_nonconcrete(): """JAX cannot JIT-compile `jax.numpy.arange` when arguments are not concrete values.""" a = scalar("a") - a.tag.test_value = 10 + a_test_value = 10 out = ptb.arange(a) with pytest.raises(NotImplementedError): fgraph = FunctionGraph([a], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [a_test_value]) def test_jax_Join(): @@ -230,9 +229,7 @@ def test_tri_nonconcrete(): scalar("n", dtype="int64"), scalar("k", dtype="int64"), ) - m.tag.test_value = 10 - n.tag.test_value = 10 - k.tag.test_value = 0 + test_values = {m: 10, n: 10, k: 0} out = ptb.tri(m, n, k) @@ -240,4 +237,4 @@ def test_tri_nonconcrete(): # the error handler raises an Attribute error first, so that's what this test needs to pass with pytest.raises(AttributeError): fgraph = FunctionGraph([m, n, k], [out]) - compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs]) + compare_jax_and_py(fgraph, [test_values[i] for i in test_values]) diff --git a/tests/link/numba/test_basic.py b/tests/link/numba/test_basic.py index cfbc61eaca..22591570ba 100644 --- a/tests/link/numba/test_basic.py +++ b/tests/link/numba/test_basic.py @@ -24,7 +24,7 @@ from pytensor.compile.sharedvalue import SharedVariable from pytensor.graph.basic import Apply, Constant from pytensor.graph.fg import FunctionGraph -from pytensor.graph.op import Op, get_test_value +from pytensor.graph.op import Op from pytensor.graph.rewriting.db import RewriteDatabaseQuery from pytensor.graph.type import Type from pytensor.ifelse import ifelse @@ -129,8 +129,8 @@ def perform(self, node, inputs, outputs): def set_test_value(x, v): - x.tag.test_value = v - return x + test_values_dict = {x: v} + return test_values_dict def compare_shape_dtype(x, y): @@ -384,7 +384,7 @@ def test_Shape(x, i): @pytest.mark.parametrize( - "v, shape, ndim", + "test_values, shape, ndim", [ (set_test_value(pt.vector(), np.array([4], dtype=config.floatX)), (), 0), (set_test_value(pt.vector(), np.arange(4, dtype=config.floatX)), (2, 2), 2), @@ -395,14 +395,18 @@ def test_Shape(x, i): ), ], ) -def test_Reshape(v, shape, ndim): +def test_Reshape(test_values, shape, ndim): + v = next(iter(test_values.keys())) + if isinstance(shape, dict): + test_values.update(shape) + shape = next(iter(shape.keys())) g = Reshape(ndim)(v, shape) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -410,21 +414,21 @@ def test_Reshape(v, shape, ndim): def test_Reshape_scalar(): v = pt.vector() - v.tag.test_value = np.array([1.0], dtype=config.floatX) + test_values = {v: np.array([1.0], dtype=config.floatX)} g = Reshape(1)(v[0], (1,)) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "v, shape, fails", + "test_values, shape, fails", [ ( set_test_value(pt.matrix(), np.array([[1.0]], dtype=config.floatX)), @@ -443,7 +447,8 @@ def test_Reshape_scalar(): ), ], ) -def test_SpecifyShape(v, shape, fails): +def test_SpecifyShape(test_values, shape, fails): + v = next(iter(test_values.keys())) g = SpecifyShape()(v, *shape) g_fg = FunctionGraph(outputs=[g]) cm = contextlib.suppress() if not fails else pytest.raises(AssertionError) @@ -451,34 +456,35 @@ def test_SpecifyShape(v, shape, fails): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "v", + "test_values", [ set_test_value(pt.vector(), np.arange(4, dtype=config.floatX)), ], ) -def test_ViewOp(v): +def test_ViewOp(test_values): + v = next(iter(test_values.keys())) g = ViewOp()(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "inputs, op, exc", + "test_values, op, exc", [ ( [ @@ -502,7 +508,9 @@ def test_ViewOp(v): ), ], ) -def test_perform(inputs, op, exc): +def test_perform(test_values, op, exc): + test_values = {k: v for d in test_values for k, v in d.items()} + inputs = list(test_values.keys()) g = op()(*inputs) if isinstance(g, list): @@ -515,8 +523,8 @@ def test_perform(inputs, op, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -526,7 +534,8 @@ def test_perform_params(): """This tests for `Op.perform` implementations that require the `params` arguments.""" x = pt.vector() - x.tag.test_value = np.array([1.0, 2.0], dtype=config.floatX) + + test_values = {x: np.array([1.0, 2.0], dtype=config.floatX)} out = assert_op(x, np.array(True)) @@ -534,7 +543,7 @@ def test_perform_params(): out = [out] out_fg = FunctionGraph([x], out) - compare_numba_and_py(out_fg, [get_test_value(i) for i in out_fg.inputs]) + compare_numba_and_py(out_fg, [test_values[i] for i in test_values]) def test_perform_type_convert(): @@ -545,7 +554,8 @@ def test_perform_type_convert(): """ x = pt.vector() - x.tag.test_value = np.array([1.0, 2.0], dtype=config.floatX) + + test_values = {x: np.array([1.0, 2.0], dtype=config.floatX)} out = assert_op(x.sum(), np.array(True)) @@ -553,40 +563,55 @@ def test_perform_type_convert(): out = [out] out_fg = FunctionGraph([x], out) - compare_numba_and_py(out_fg, [get_test_value(i) for i in out_fg.inputs]) + compare_numba_and_py(out_fg, [test_values[i] for i in test_values]) @pytest.mark.parametrize( - "x, y, exc", + "inputs, exc", [ ( - set_test_value(pt.matrix(), rng.random(size=(3, 2)).astype(config.floatX)), - set_test_value(pt.vector(), rng.random(size=(2,)).astype(config.floatX)), + [ + set_test_value( + pt.matrix(), rng.random(size=(3, 2)).astype(config.floatX) + ), + set_test_value( + pt.vector(), rng.random(size=(2,)).astype(config.floatX) + ), + ], None, ), ( - set_test_value( - pt.matrix(dtype="float64"), rng.random(size=(3, 2)).astype("float64") - ), - set_test_value( - pt.vector(dtype="float32"), rng.random(size=(2,)).astype("float32") - ), + [ + set_test_value( + pt.matrix(dtype="float64"), + rng.random(size=(3, 2)).astype("float64"), + ), + set_test_value( + pt.vector(dtype="float32"), rng.random(size=(2,)).astype("float32") + ), + ], None, ), ( - set_test_value(pt.lmatrix(), rng.poisson(size=(3, 2))), - set_test_value(pt.fvector(), rng.random(size=(2,)).astype("float32")), + [ + set_test_value(pt.lmatrix(), rng.poisson(size=(3, 2))), + set_test_value(pt.fvector(), rng.random(size=(2,)).astype("float32")), + ], None, ), ( - set_test_value(pt.lvector(), rng.random(size=(2,)).astype(np.int64)), - set_test_value(pt.lvector(), rng.random(size=(2,)).astype(np.int64)), + [ + set_test_value(pt.lvector(), rng.random(size=(2,)).astype(np.int64)), + set_test_value(pt.lvector(), rng.random(size=(2,)).astype(np.int64)), + ], None, ), ], ) -def test_Dot(x, y, exc): - g = ptm.Dot()(x, y) +def test_Dot(inputs, exc): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = ptm.Dot()(*inputs) g_fg = FunctionGraph(outputs=[g]) cm = contextlib.suppress() if exc is None else pytest.warns(exc) @@ -594,15 +619,15 @@ def test_Dot(x, y, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, exc", + "test_values, exc", [ ( set_test_value(ps.float64(), np.array(0.0, dtype="float64")), @@ -630,7 +655,8 @@ def test_Dot(x, y, exc): ), ], ) -def test_Softplus(x, exc): +def test_Softplus(test_values, exc): + x = next(iter(test_values)) g = psm.Softplus(ps.upgrade_to_float)(x) g_fg = FunctionGraph(outputs=[g]) @@ -639,42 +665,48 @@ def test_Softplus(x, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, y, exc", + "inputs, exc", [ ( - set_test_value( - pt.dtensor3(), - rng.random(size=(2, 3, 3)).astype("float64"), - ), - set_test_value( - pt.dtensor3(), - rng.random(size=(2, 3, 3)).astype("float64"), - ), + [ + set_test_value( + pt.dtensor3(), + rng.random(size=(2, 3, 3)).astype("float64"), + ), + set_test_value( + pt.dtensor3(), + rng.random(size=(2, 3, 3)).astype("float64"), + ), + ], None, ), ( - set_test_value( - pt.dtensor3(), - rng.random(size=(2, 3, 3)).astype("float64"), - ), - set_test_value( - pt.ltensor3(), - rng.poisson(size=(2, 3, 3)).astype("int64"), - ), + [ + set_test_value( + pt.dtensor3(), + rng.random(size=(2, 3, 3)).astype("float64"), + ), + set_test_value( + pt.ltensor3(), + rng.poisson(size=(2, 3, 3)).astype("int64"), + ), + ], None, ), ], ) -def test_BatchedDot(x, y, exc): - g = blas.BatchedDot()(x, y) +def test_BatchedDot(inputs, exc): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = blas.BatchedDot()(*inputs) if isinstance(g, list): g_fg = FunctionGraph(outputs=g) @@ -686,8 +718,8 @@ def test_BatchedDot(x, y, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -804,6 +836,8 @@ def test_shared_updates(): ], ) def test_IfElse(inputs, cond_fn, true_vals, false_vals): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) out = ifelse(cond_fn(*inputs), true_vals, false_vals) if not isinstance(out, list): @@ -811,7 +845,7 @@ def test_IfElse(inputs, cond_fn, true_vals, false_vals): out_fg = FunctionGraph(inputs, out) - compare_numba_and_py(out_fg, [get_test_value(i) for i in out_fg.inputs]) + compare_numba_and_py(out_fg, [test_values[i] for i in test_values]) @pytest.mark.xfail(reason="https://github.com/numba/numba/issues/7409") diff --git a/tests/link/numba/test_elemwise.py b/tests/link/numba/test_elemwise.py index 4c13004409..6e4bcd468f 100644 --- a/tests/link/numba/test_elemwise.py +++ b/tests/link/numba/test_elemwise.py @@ -146,7 +146,7 @@ def test_elemwise_speed(benchmark): @pytest.mark.parametrize( - "v, new_order", + "test_values, new_order", [ # `{'drop': [], 'shuffle': [], 'augment': [0, 1]}` ( @@ -204,14 +204,15 @@ def test_elemwise_speed(benchmark): ), ], ) -def test_Dimshuffle(v, new_order): +def test_Dimshuffle(test_values, new_order): + v = next(iter(test_values.keys())) g = v.dimshuffle(new_order) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -237,7 +238,7 @@ def test_Dimshuffle_non_contiguous(): @pytest.mark.parametrize( - "careduce_fn, axis, v", + "careduce_fn, axis, test_values", [ ( lambda x, axis=None, dtype=None, acc_dtype=None: Sum( @@ -375,15 +376,16 @@ def test_Dimshuffle_non_contiguous(): ), ], ) -def test_CAReduce(careduce_fn, axis, v): +def test_CAReduce(careduce_fn, axis, test_values): + v = next(iter(test_values.keys())) g = careduce_fn(v, axis=axis) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -401,36 +403,50 @@ def test_scalar_Elemwise_Clip(): @pytest.mark.parametrize( - "dy, sm, axis, exc", + "inputs, axis, exc", [ ( - set_test_value( - pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) - ), - set_test_value(pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX)), + [ + set_test_value( + pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) + ), + set_test_value( + pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX) + ), + ], None, None, ), ( - set_test_value( - pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) - ), - set_test_value(pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX)), + [ + set_test_value( + pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) + ), + set_test_value( + pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX) + ), + ], 0, None, ), ( - set_test_value( - pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) - ), - set_test_value(pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX)), + [ + set_test_value( + pt.matrix(), np.array([[1, 1, 1], [0, 0, 0]], dtype=config.floatX) + ), + set_test_value( + pt.matrix(), rng.random(size=(2, 3)).astype(config.floatX) + ), + ], 1, None, ), ], ) -def test_SoftmaxGrad(dy, sm, axis, exc): - g = SoftmaxGrad(axis=axis)(dy, sm) +def test_SoftmaxGrad(inputs, axis, exc): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = SoftmaxGrad(axis=axis)(*inputs) g_fg = FunctionGraph(outputs=[g]) cm = contextlib.suppress() if exc is None else pytest.warns(exc) @@ -438,8 +454,8 @@ def test_SoftmaxGrad(dy, sm, axis, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -456,7 +472,7 @@ def test_SoftMaxGrad_constant_dy(): @pytest.mark.parametrize( - "x, axis, exc", + "test_values, axis, exc", [ ( set_test_value(pt.vector(), rng.random(size=(2,)).astype(config.floatX)), @@ -475,7 +491,8 @@ def test_SoftMaxGrad_constant_dy(): ), ], ) -def test_Softmax(x, axis, exc): +def test_Softmax(test_values, axis, exc): + x = next(iter(test_values.keys())) g = Softmax(axis=axis)(x) g_fg = FunctionGraph(outputs=[g]) @@ -484,15 +501,15 @@ def test_Softmax(x, axis, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, axis, exc", + "test_values, axis, exc", [ ( set_test_value(pt.vector(), rng.random(size=(2,)).astype(config.floatX)), @@ -511,7 +528,8 @@ def test_Softmax(x, axis, exc): ), ], ) -def test_LogSoftmax(x, axis, exc): +def test_LogSoftmax(test_values, axis, exc): + x = next(iter(test_values.keys())) g = LogSoftmax(axis=axis)(x) g_fg = FunctionGraph(outputs=[g]) @@ -520,15 +538,15 @@ def test_LogSoftmax(x, axis, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, axes, exc", + "test_values, axes, exc", [ ( set_test_value(pt.dscalar(), np.array(0.0, dtype="float64")), @@ -552,7 +570,8 @@ def test_LogSoftmax(x, axis, exc): ), ], ) -def test_Max(x, axes, exc): +def test_Max(test_values, axes, exc): + x = next(iter(test_values.keys())) g = ptm.Max(axes)(x) if isinstance(g, list): @@ -565,15 +584,15 @@ def test_Max(x, axes, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, axes, exc", + "test_values, axes, exc", [ ( set_test_value(pt.dscalar(), np.array(0.0, dtype="float64")), @@ -597,7 +616,8 @@ def test_Max(x, axes, exc): ), ], ) -def test_Argmax(x, axes, exc): +def test_Argmax(test_values, axes, exc): + x = next(iter(test_values.keys())) g = ptm.Argmax(axes)(x) if isinstance(g, list): @@ -610,8 +630,8 @@ def test_Argmax(x, axes, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) diff --git a/tests/link/numba/test_extra_ops.py b/tests/link/numba/test_extra_ops.py index e61862ffdf..e39ce180c1 100644 --- a/tests/link/numba/test_extra_ops.py +++ b/tests/link/numba/test_extra_ops.py @@ -16,20 +16,21 @@ @pytest.mark.parametrize( - "val", + "test_values", [ set_test_value(pt.lscalar(), np.array(6, dtype="int64")), ], ) -def test_Bartlett(val): +def test_Bartlett(test_values): + val = next(iter(test_values.keys())) g = extra_ops.bartlett(val) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], assert_fn=lambda x, y: np.testing.assert_allclose(x, y, atol=1e-15), @@ -37,7 +38,7 @@ def test_Bartlett(val): @pytest.mark.parametrize( - "val, axis, mode", + "test_values, axis, mode", [ ( set_test_value( @@ -97,22 +98,23 @@ def test_Bartlett(val): ), ], ) -def test_CumOp(val, axis, mode): +def test_CumOp(test_values, axis, mode): + val = next(iter(test_values.keys())) g = extra_ops.CumOp(axis=axis, mode=mode)(val) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "a, val", + "inputs", [ ( set_test_value(pt.lmatrix(), np.zeros((10, 2), dtype="int64")), @@ -120,22 +122,26 @@ def test_CumOp(val, axis, mode): ) ], ) -def test_FillDiagonal(a, val): - g = extra_ops.FillDiagonal()(a, val) +def test_FillDiagonal(inputs): + print(inputs) + # assert 0 + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = extra_ops.FillDiagonal()(*inputs) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "a, val, offset", + "inputs", [ ( set_test_value(pt.lmatrix(), np.zeros((10, 2), dtype="int64")), @@ -154,15 +160,17 @@ def test_FillDiagonal(a, val): ), ], ) -def test_FillDiagonalOffset(a, val, offset): - g = extra_ops.FillDiagonalOffset()(a, val, offset) +def test_FillDiagonalOffset(inputs): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = extra_ops.FillDiagonalOffset()(*inputs) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -238,6 +246,10 @@ def test_FillDiagonalOffset(a, val, offset): ], ) def test_RavelMultiIndex(arr, shape, mode, order, exc): + test_values = {k: v for d in arr for k, v in d.items()} + arr = tuple(test_values.keys()) + test_values.update(shape) + shape = next(iter(shape.keys())) g = extra_ops.RavelMultiIndex(mode, order)(*((*arr, shape))) g_fg = FunctionGraph(outputs=[g]) @@ -246,15 +258,15 @@ def test_RavelMultiIndex(arr, shape, mode, order, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, repeats, axis, exc", + "test_values, repeats, axis, exc", [ ( set_test_value(pt.lscalar(), np.array(1, dtype="int64")), @@ -282,7 +294,10 @@ def test_RavelMultiIndex(arr, shape, mode, order, exc): ), ], ) -def test_Repeat(x, repeats, axis, exc): +def test_Repeat(test_values, repeats, axis, exc): + x = next(iter(test_values.keys())) + test_values.update(repeats) + repeats = next(iter(repeats.keys())) g = extra_ops.Repeat(axis)(x, repeats) g_fg = FunctionGraph(outputs=[g]) @@ -291,15 +306,15 @@ def test_Repeat(x, repeats, axis, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, axis, return_index, return_inverse, return_counts, exc", + "test_values, axis, return_index, return_inverse, return_counts, exc", [ ( set_test_value(pt.lscalar(), np.array(1, dtype="int64")), @@ -347,7 +362,8 @@ def test_Repeat(x, repeats, axis, exc): ), ], ) -def test_Unique(x, axis, return_index, return_inverse, return_counts, exc): +def test_Unique(test_values, axis, return_index, return_inverse, return_counts, exc): + x = next(iter(test_values.keys())) g = extra_ops.Unique(return_index, return_inverse, return_counts, axis)(x) if isinstance(g, list): @@ -360,15 +376,15 @@ def test_Unique(x, axis, return_index, return_inverse, return_counts, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "arr, shape, order, exc", + "test_values, shape, order, exc", [ ( set_test_value(pt.lvector(), np.array([9, 15, 1], dtype="int64")), @@ -390,7 +406,8 @@ def test_Unique(x, axis, return_index, return_inverse, return_counts, exc): ), ], ) -def test_UnravelIndex(arr, shape, order, exc): +def test_UnravelIndex(test_values, shape, order, exc): + arr = next(iter(test_values.keys())) g = extra_ops.UnravelIndex(order)(arr, shape) if isinstance(g, list): @@ -403,53 +420,71 @@ def test_UnravelIndex(arr, shape, order, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "a, v, side, sorter, exc", + "inputs, side, sorter, exc", [ ( - set_test_value(pt.vector(), np.array([1.0, 2.0, 3.0], dtype=config.floatX)), - set_test_value(pt.matrix(), rng.random((3, 2)).astype(config.floatX)), + [ + set_test_value( + pt.vector(), np.array([1.0, 2.0, 3.0], dtype=config.floatX) + ), + set_test_value(pt.matrix(), rng.random((3, 2)).astype(config.floatX)), + ], "left", None, None, ), pytest.param( - set_test_value( - pt.vector(), - np.array([0.29769574, 0.71649186, 0.20475563]).astype(config.floatX), - ), - set_test_value( - pt.matrix(), - np.array( - [ - [0.18847123, 0.39659508], - [0.56220006, 0.57428752], - [0.86720994, 0.44522637], - ] - ).astype(config.floatX), - ), + [ + set_test_value( + pt.vector(), + np.array([0.29769574, 0.71649186, 0.20475563]).astype( + config.floatX + ), + ), + set_test_value( + pt.matrix(), + np.array( + [ + [0.18847123, 0.39659508], + [0.56220006, 0.57428752], + [0.86720994, 0.44522637], + ] + ).astype(config.floatX), + ), + ], "left", None, None, ), ( - set_test_value(pt.vector(), np.array([1.0, 2.0, 3.0], dtype=config.floatX)), - set_test_value(pt.matrix(), rng.random((3, 2)).astype(config.floatX)), + [ + set_test_value( + pt.vector(), np.array([1.0, 2.0, 3.0], dtype=config.floatX) + ), + set_test_value(pt.matrix(), rng.random((3, 2)).astype(config.floatX)), + ], "right", set_test_value(pt.lvector(), np.array([0, 2, 1])), UserWarning, ), ], ) -def test_Searchsorted(a, v, side, sorter, exc): - g = extra_ops.SearchsortedOp(side)(a, v, sorter) +def test_Searchsorted(inputs, side, sorter, exc): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + if isinstance(sorter, dict): + test_values.update(sorter) + sorter = next(iter(sorter.keys())) + inputs.append(sorter) + g = extra_ops.SearchsortedOp(side)(*inputs) g_fg = FunctionGraph(outputs=[g]) cm = contextlib.suppress() if exc is None else pytest.warns(exc) @@ -457,8 +492,8 @@ def test_Searchsorted(a, v, side, sorter, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) diff --git a/tests/link/numba/test_nlinalg.py b/tests/link/numba/test_nlinalg.py index 6fbb6e6c58..334c189b6b 100644 --- a/tests/link/numba/test_nlinalg.py +++ b/tests/link/numba/test_nlinalg.py @@ -15,32 +15,38 @@ @pytest.mark.parametrize( - "A, x, lower, exc", + "inputs, lower, exc", [ ( - set_test_value( - pt.dmatrix(), - (lambda x: x.T.dot(x))(rng.random(size=(3, 3)).astype("float64")), - ), - set_test_value(pt.dvector(), rng.random(size=(3,)).astype("float64")), + [ + set_test_value( + pt.dmatrix(), + (lambda x: x.T.dot(x))(rng.random(size=(3, 3)).astype("float64")), + ), + set_test_value(pt.dvector(), rng.random(size=(3,)).astype("float64")), + ], "gen", None, ), ( - set_test_value( - pt.lmatrix(), - (lambda x: x.T.dot(x))( - rng.integers(1, 10, size=(3, 3)).astype("int64") + [ + set_test_value( + pt.lmatrix(), + (lambda x: x.T.dot(x))( + rng.integers(1, 10, size=(3, 3)).astype("int64") + ), ), - ), - set_test_value(pt.dvector(), rng.random(size=(3,)).astype("float64")), + set_test_value(pt.dvector(), rng.random(size=(3,)).astype("float64")), + ], "gen", None, ), ], ) -def test_Solve(A, x, lower, exc): - g = slinalg.Solve(lower=lower, b_ndim=1)(A, x) +def test_Solve(inputs, lower, exc): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = slinalg.Solve(lower=lower, b_ndim=1)(*inputs) if isinstance(g, list): g_fg = FunctionGraph(outputs=g) @@ -52,15 +58,15 @@ def test_Solve(A, x, lower, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, exc", + "test_values, exc", [ ( set_test_value( @@ -78,7 +84,8 @@ def test_Solve(A, x, lower, exc): ), ], ) -def test_Det(x, exc): +def test_Det(test_values, exc): + x = next(iter(test_values.keys())) g = nlinalg.Det()(x) g_fg = FunctionGraph(outputs=[g]) @@ -87,15 +94,15 @@ def test_Det(x, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, exc", + "test_values, exc", [ ( set_test_value( @@ -113,7 +120,8 @@ def test_Det(x, exc): ), ], ) -def test_SLogDet(x, exc): +def test_SLogDet(test_values, exc): + x = next(iter(test_values.keys())) g = nlinalg.SLogDet()(x) g_fg = FunctionGraph(outputs=g) @@ -122,8 +130,8 @@ def test_SLogDet(x, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -154,7 +162,7 @@ def test_SLogDet(x, exc): @pytest.mark.parametrize( - "x, exc", + "test_values, exc", [ ( set_test_value( @@ -181,7 +189,8 @@ def test_SLogDet(x, exc): ), ], ) -def test_Eig(x, exc): +def test_Eig(test_values, exc): + x = next(iter(test_values.keys())) g = nlinalg.Eig()(x) if isinstance(g, list): @@ -194,15 +203,15 @@ def test_Eig(x, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, uplo, exc", + "test_values, uplo, exc", [ ( set_test_value( @@ -224,7 +233,8 @@ def test_Eig(x, exc): ), ], ) -def test_Eigh(x, uplo, exc): +def test_Eigh(test_values, uplo, exc): + x = next(iter(test_values.keys())) g = nlinalg.Eigh(uplo)(x) if isinstance(g, list): @@ -237,15 +247,15 @@ def test_Eigh(x, uplo, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "op, x, exc, op_args", + "op, test_values, exc, op_args", [ ( nlinalg.MatrixInverse, @@ -289,7 +299,8 @@ def test_Eigh(x, uplo, exc): ), ], ) -def test_matrix_inverses(op, x, exc, op_args): +def test_matrix_inverses(op, test_values, exc, op_args): + x = next(iter(test_values.keys())) g = op(*op_args)(x) g_fg = FunctionGraph(outputs=[g]) @@ -298,15 +309,15 @@ def test_matrix_inverses(op, x, exc, op_args): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, mode, exc", + "test_values, mode, exc", [ ( set_test_value( @@ -346,7 +357,8 @@ def test_matrix_inverses(op, x, exc, op_args): ), ], ) -def test_QRFull(x, mode, exc): +def test_QRFull(test_values, mode, exc): + x = next(iter(test_values.keys())) g = nlinalg.QRFull(mode)(x) if isinstance(g, list): @@ -359,15 +371,15 @@ def test_QRFull(x, mode, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "x, full_matrices, compute_uv, exc", + "test_values, full_matrices, compute_uv, exc", [ ( set_test_value( @@ -411,7 +423,8 @@ def test_QRFull(x, mode, exc): ), ], ) -def test_SVD(x, full_matrices, compute_uv, exc): +def test_SVD(test_values, full_matrices, compute_uv, exc): + x = next(iter(test_values.keys())) g = nlinalg.SVD(full_matrices, compute_uv)(x) if isinstance(g, list): @@ -424,8 +437,8 @@ def test_SVD(x, full_matrices, compute_uv, exc): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) diff --git a/tests/link/numba/test_random.py b/tests/link/numba/test_random.py index b966ed2870..5dbbdefcd8 100644 --- a/tests/link/numba/test_random.py +++ b/tests/link/numba/test_random.py @@ -327,7 +327,7 @@ def test_multivariate_normal(): np.array(1.0, dtype=np.float64), ), ], - pt.as_tensor(tuple(set_test_value(pt.lscalar(), v) for v in [3, 2])), + [set_test_value(pt.lscalar(), v) for v in [3, 2]], ), ( ptr.poisson, @@ -523,15 +523,22 @@ def test_multivariate_normal(): ) def test_aligned_RandomVariable(rv_op, dist_args, size): """Tests for Numba samplers that are one-to-one with PyTensor's/NumPy's samplers.""" + inputs = {k: v for d in dist_args for k, v in d.items()} + dist_args = list(inputs.keys()) rng = shared(np.random.default_rng(29402)) + test_values = {} + if isinstance(size, list): + size = {k: v for d in size for k, v in d.items()} + test_values.update(size) + size = pt.as_tensor(tuple(size.keys())) + test_values.update(inputs) g = rv_op(*dist_args, size=size, rng=rng) g_fg = FunctionGraph(outputs=[g]) - compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], eval_obj_mode=False, # No python impl @@ -577,18 +584,20 @@ def test_aligned_RandomVariable(rv_op, dist_args, size): ) def test_unaligned_RandomVariable(rv_op, dist_args, base_size, cdf_name, params_conv): """Tests for Numba samplers that are not one-to-one with PyTensor's/NumPy's samplers.""" + test_values = {k: v for d in dist_args for k, v in d.items()} + dist_args = list(test_values.keys()) rng = shared(np.random.default_rng(29402)) g = rv_op(*dist_args, size=(2000, *base_size), rng=rng) g_fn = function(dist_args, g, mode=numba_mode) samples = g_fn( *[ - i.tag.test_value - for i in g_fn.maker.fgraph.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ] ) - bcast_dist_args = np.broadcast_arrays(*[i.tag.test_value for i in dist_args]) + bcast_dist_args = np.broadcast_arrays(*[test_values[i] for i in test_values]) for idx in np.ndindex(*base_size): cdf_params = params_conv(*(arg[idx] for arg in bcast_dist_args)) @@ -599,7 +608,7 @@ def test_unaligned_RandomVariable(rv_op, dist_args, base_size, cdf_name, params_ @pytest.mark.parametrize( - "a, size, cm", + "test_values, size, cm", [ pytest.param( set_test_value( @@ -636,13 +645,14 @@ def test_unaligned_RandomVariable(rv_op, dist_args, base_size, cdf_name, params_ ), ], ) -def test_DirichletRV(a, size, cm): +def test_DirichletRV(test_values, size, cm): + a = next(iter(test_values.keys())) rng = shared(np.random.default_rng(29402)) g = ptr.dirichlet(a, size=size, rng=rng) g_fn = function([a], g, mode=numba_mode) with cm: - a_val = a.tag.test_value + a_val = test_values[a] all_samples = [] for i in range(1000): diff --git a/tests/link/numba/test_scalar.py b/tests/link/numba/test_scalar.py index 437956bdc0..8588188da9 100644 --- a/tests/link/numba/test_scalar.py +++ b/tests/link/numba/test_scalar.py @@ -17,49 +17,58 @@ @pytest.mark.parametrize( - "x, y", + "inputs", [ ( - set_test_value(pt.lvector(), np.arange(4, dtype="int64")), - set_test_value(pt.dvector(), np.arange(4, dtype="float64")), + [ + set_test_value(pt.lvector(), np.arange(4, dtype="int64")), + set_test_value(pt.dvector(), np.arange(4, dtype="float64")), + ] ), ( - set_test_value(pt.dmatrix(), np.arange(4, dtype="float64").reshape((2, 2))), - set_test_value(pt.lscalar(), np.array(4, dtype="int64")), + [ + set_test_value( + pt.dmatrix(), np.arange(4, dtype="float64").reshape((2, 2)) + ), + set_test_value(pt.lscalar(), np.array(4, dtype="int64")), + ] ), ], ) -def test_Second(x, y): +def test_Second(inputs): # We use the `Elemwise`-wrapped version of `Second` - g = pt.second(x, y) + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = pt.second(*inputs) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "v, min, max", + "test_values, min, max", [ (set_test_value(pt.scalar(), np.array(10, dtype=config.floatX)), 3.0, 7.0), (set_test_value(pt.scalar(), np.array(1, dtype=config.floatX)), 3.0, 7.0), (set_test_value(pt.scalar(), np.array(10, dtype=config.floatX)), 7.0, 3.0), ], ) -def test_Clip(v, min, max): +def test_Clip(test_values, min, max): + v = next(iter(test_values.keys())) g = ps.clip(v, min, max) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -104,39 +113,41 @@ def test_Composite(inputs, input_values, scalar_fn): @pytest.mark.parametrize( - "v, dtype", + "test_values, dtype", [ (set_test_value(pt.fscalar(), np.array(1.0, dtype="float32")), psb.float64), (set_test_value(pt.dscalar(), np.array(1.0, dtype="float64")), psb.float32), ], ) -def test_Cast(v, dtype): +def test_Cast(test_values, dtype): + v = next(iter(test_values.keys())) g = psb.Cast(dtype)(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "v, dtype", + "test_values, dtype", [ (set_test_value(pt.iscalar(), np.array(10, dtype="int32")), psb.float64), ], ) -def test_reciprocal(v, dtype): +def test_reciprocal(test_values, dtype): + v = next(iter(test_values.keys())) g = psb.reciprocal(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) diff --git a/tests/link/numba/test_scan.py b/tests/link/numba/test_scan.py index 5db0f24222..4d2ba6900d 100644 --- a/tests/link/numba/test_scan.py +++ b/tests/link/numba/test_scan.py @@ -248,10 +248,8 @@ def seir_one_step(ct0, dt0, st0, et0, it0, logp_c, logp_d, beta, gamma, delta): benchmark(scan_fn, *test_input_vals) -@config.change_flags(compute_test_value="raise") def test_scan_tap_output(): a_pt = pt.scalar("a") - a_pt.tag.test_value = 10.0 b_pt = pt.arange(11).astype(config.floatX) b_pt.name = "b" diff --git a/tests/link/numba/test_tensor_basic.py b/tests/link/numba/test_tensor_basic.py index 269fc57940..62555f4f4b 100644 --- a/tests/link/numba/test_tensor_basic.py +++ b/tests/link/numba/test_tensor_basic.py @@ -36,14 +36,18 @@ ], ) def test_Alloc(v, shape): + test_values = {} + if isinstance(v, dict): + test_values = v + v = next(iter(v.keys())) g = pt.alloc(v, *shape) g_fg = FunctionGraph(outputs=[g]) _, (numba_res,) = compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -64,56 +68,59 @@ def test_AllocEmpty(): @pytest.mark.parametrize( - "v", [set_test_value(ps.float64(), np.array(1.0, dtype="float64"))] + "test_values", [set_test_value(ps.float64(), np.array(1.0, dtype="float64"))] ) -def test_TensorFromScalar(v): +def test_TensorFromScalar(test_values): + v = next(iter(test_values.keys())) g = ptb.TensorFromScalar()(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "v", + "test_values", [ set_test_value(pt.scalar(), np.array(1.0, dtype=config.floatX)), ], ) -def test_ScalarFromTensor(v): +def test_ScalarFromTensor(test_values): + v = next(iter(test_values)) g = ptb.ScalarFromTensor()(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) def test_Unbroadcast(): - v = set_test_value(pt.row(), np.array([[1.0, 2.0]], dtype=config.floatX)) + test_values = set_test_value(pt.row(), np.array([[1.0, 2.0]], dtype=config.floatX)) + v = next(iter(test_values)) g = Unbroadcast(0)(v) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "vals, dtype", + "inputs, dtype", [ ( ( @@ -140,47 +147,53 @@ def test_Unbroadcast(): ), ], ) -def test_MakeVector(vals, dtype): +def test_MakeVector(inputs, dtype): + test_values = {k: v for d in inputs for k, v in d.items()} + vals = list(test_values.keys()) g = ptb.MakeVector(dtype)(*vals) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "start, stop, step, dtype", + "inputs, dtype", [ ( - set_test_value(pt.lscalar(), np.array(1)), - set_test_value(pt.lscalar(), np.array(10)), - set_test_value(pt.lscalar(), np.array(3)), + [ + set_test_value(pt.lscalar(), np.array(1)), + set_test_value(pt.lscalar(), np.array(10)), + set_test_value(pt.lscalar(), np.array(3)), + ], config.floatX, ), ], ) -def test_ARange(start, stop, step, dtype): - g = ptb.ARange(dtype)(start, stop, step) +def test_ARange(inputs, dtype): + test_values = {k: v for d in inputs for k, v in d.items()} + inputs = list(test_values.keys()) + g = ptb.ARange(dtype)(*inputs) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "vals, axis", + "inputs, axis", [ ( ( @@ -228,25 +241,28 @@ def test_ARange(start, stop, step, dtype): ), ], ) -def test_Join(vals, axis): +def test_Join(inputs, axis): + test_values = {k: v for d in inputs for k, v in d.items()} + vals = list(test_values.keys()) g = pt.join(axis, *vals) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) def test_Join_view(): - vals = ( - set_test_value(pt.matrix(), rng.normal(size=(2, 2)).astype(config.floatX)), - set_test_value(pt.matrix(), rng.normal(size=(2, 2)).astype(config.floatX)), - ) + test_values = { + pt.matrix(): rng.normal(size=(2, 2)).astype(config.floatX), + pt.matrix(): rng.normal(size=(2, 2)).astype(config.floatX), + } + vals = list(test_values.keys()) g = ptb.Join(view=1)(1, *vals) g_fg = FunctionGraph(outputs=[g]) @@ -254,15 +270,15 @@ def test_Join_view(): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @pytest.mark.parametrize( - "n_splits, axis, values, sizes", + "n_splits, axis, test_values, sizes", [ ( 0, @@ -304,7 +320,10 @@ def test_Join_view(): ), ], ) -def test_Split(n_splits, axis, values, sizes): +def test_Split(n_splits, axis, test_values, sizes): + values = next(iter(test_values.keys())) + test_values.update(sizes) + sizes = next(iter(sizes.keys())) g = pt.split(values, sizes, n_splits, axis=axis) assert len(g) == n_splits if n_splits == 0: @@ -314,8 +333,8 @@ def test_Split(n_splits, axis, values, sizes): compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -346,7 +365,7 @@ def test_Split_view(): @pytest.mark.parametrize( - "val, offset", + "test_values, offset", [ ( set_test_value( @@ -366,15 +385,16 @@ def test_Split_view(): ), ], ) -def test_ExtractDiag(val, offset): +def test_ExtractDiag(test_values, offset): + val = next(iter(test_values.keys())) g = pt.diag(val, offset) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) @@ -423,14 +443,19 @@ def wrap(x): ], ) def test_Eye(n, m, k, dtype): + test_values = n + if m: + test_values.update(m) + m = next(iter(m)) + n = next(iter(n)) g = pt.eye(n, m, k, dtype=dtype) g_fg = FunctionGraph(outputs=[g]) compare_numba_and_py( g_fg, [ - i.tag.test_value - for i in g_fg.inputs + test_values[i] + for i in test_values if not isinstance(i, SharedVariable | Constant) ], ) diff --git a/tests/scan/test_basic.py b/tests/scan/test_basic.py index 7bdf490b68..fac500192f 100644 --- a/tests/scan/test_basic.py +++ b/tests/scan/test_basic.py @@ -1426,11 +1426,12 @@ def test_grad_multiple_outs_taps(self): x0 = vector("x0") y0 = vector("y0") - W_in1.tag.test_value = vW_in1 - u1.tag.test_value = v_u1 - u2.tag.test_value = v_u2 - x0.tag.test_value = v_x0 - y0.tag.test_value = v_y0 + with pytest.warns(FutureWarning): + W_in1.tag.test_value = vW_in1 + u1.tag.test_value = v_u1 + u2.tag.test_value = v_u2 + x0.tag.test_value = v_x0 + y0.tag.test_value = v_y0 def f_rnn_cmpl(u1_t, u2_tm1, u2_t, u2_tp1, x_tm1, y_tm1, y_tm3, W_in1): return [ @@ -2210,7 +2211,8 @@ def f_py(): def test_compute_test_values(): """Verify that test values can be used with scan.""" x = vector("x") - x.tag.test_value = np.ones(3, dtype=config.floatX) + with pytest.warns(FutureWarning): + x.tag.test_value = np.ones(3, dtype=config.floatX) y = shared(np.arange(3, dtype=config.floatX), name="y") @@ -2242,38 +2244,39 @@ def test_compute_test_value_grad(): WEIGHT = np.array([1, 2, 1, 3, 4, 1, 5, 6, 1, 7, 8, 1], dtype="float32") with config.change_flags(compute_test_value="raise", exception_verbosity="high"): - W_flat = fvector(name="W") - W_flat.tag.test_value = WEIGHT - W = W_flat.reshape((2, 2, 3)) - - outputs_mi = pt.as_tensor_variable(np.asarray(0, dtype="float32")) - outputs_mi.tag.test_value = np.asarray(0, dtype="float32") - - def loss_mi(mi, sum_mi, W): - outputs_ti = pt.as_tensor_variable(np.asarray(0, dtype="float32")) - outputs_ti.tag.test_value = np.asarray(0, dtype="float32") - - def loss_ti(ti, sum_ti, mi, W): - return W.sum().sum().sum() + sum_ti + with pytest.warns(FutureWarning): + W_flat = fvector(name="W") + W_flat.tag.test_value = WEIGHT + W = W_flat.reshape((2, 2, 3)) + + outputs_mi = pt.as_tensor_variable(np.asarray(0, dtype="float32")) + outputs_mi.tag.test_value = np.asarray(0, dtype="float32") + + def loss_mi(mi, sum_mi, W): + outputs_ti = pt.as_tensor_variable(np.asarray(0, dtype="float32")) + outputs_ti.tag.test_value = np.asarray(0, dtype="float32") + + def loss_ti(ti, sum_ti, mi, W): + return W.sum().sum().sum() + sum_ti + + result_ti, _ = scan( + fn=loss_ti, + outputs_info=outputs_ti, + sequences=pt.arange(W.shape[1], dtype="int32"), + non_sequences=[mi, W], + ) + lossmi = result_ti[-1] + return sum_mi + lossmi - result_ti, _ = scan( - fn=loss_ti, - outputs_info=outputs_ti, - sequences=pt.arange(W.shape[1], dtype="int32"), - non_sequences=[mi, W], + result_mi, _ = scan( + fn=loss_mi, + outputs_info=outputs_mi, + sequences=pt.arange(W.shape[0], dtype="int32"), + non_sequences=[W], ) - lossmi = result_ti[-1] - return sum_mi + lossmi - result_mi, _ = scan( - fn=loss_mi, - outputs_info=outputs_mi, - sequences=pt.arange(W.shape[0], dtype="int32"), - non_sequences=[W], - ) - - loss = result_mi[-1] - grad(loss, W_flat) + loss = result_mi[-1] + grad(loss, W_flat) @pytest.mark.xfail(reason="NominalVariables don't support test values") @@ -2283,24 +2286,27 @@ def test_compute_test_value_grad_cast(): See https://groups.google.com/d/topic/theano-users/o4jK9xDe5WI/discussion """ with config.change_flags(compute_test_value="raise"): - h = matrix("h") - h.tag.test_value = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=config.floatX) + with pytest.warns(FutureWarning): + h = matrix("h") + h.tag.test_value = np.array( + [[1, 2, 3, 4], [5, 6, 7, 8]], dtype=config.floatX + ) - w = shared( - np.random.default_rng(utt.fetch_seed()) - .random((4, 3)) - .astype(config.floatX), - name="w", - ) + w = shared( + np.random.default_rng(utt.fetch_seed()) + .random((4, 3)) + .astype(config.floatX), + name="w", + ) - outputs, _ = scan( - lambda i, h, w: (dot(h[i], w), i), - outputs_info=[None, 0], - non_sequences=[h, w], - n_steps=3, - ) + outputs, _ = scan( + lambda i, h, w: (dot(h[i], w), i), + outputs_info=[None, 0], + non_sequences=[h, w], + n_steps=3, + ) - grad(outputs[0].sum(), w) + grad(outputs[0].sum(), w) def test_constant_folding_n_steps(): diff --git a/tests/scan/test_utils.py b/tests/scan/test_utils.py index a26c2cbd4b..ccc6526940 100644 --- a/tests/scan/test_utils.py +++ b/tests/scan/test_utils.py @@ -77,7 +77,8 @@ def test_ScanArgs(): with pytest.raises(TypeError): ScanArgs.from_node(pt.ones(2).owner) - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] scan_op = hmm_model_env["scan_op"] @@ -136,15 +137,16 @@ def test_ScanArgs(): def test_ScanArgs_basics_mit_sot(): srng = pt.random.RandomStream() - N_pt = pt.iscalar("N") - N_pt.tag.test_value = 10 - M_pt = pt.iscalar("M") - M_pt.tag.test_value = 2 + with pytest.warns(FutureWarning): + N_pt = pt.iscalar("N") + N_pt.tag.test_value = 10 + M_pt = pt.iscalar("M") + M_pt.tag.test_value = 2 - mus_pt = pt.matrix("mus") - mus_pt.tag.test_value = np.stack( - [np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1 - ).astype(pytensor.config.floatX) + mus_pt = pt.matrix("mus") + mus_pt.tag.test_value = np.stack( + [np.arange(0.0, 10), np.arange(0.0, -10, -1)], axis=-1 + ).astype(pytensor.config.floatX) sigmas_pt = pt.ones((N_pt,)) sigmas_pt.name = "sigmas" @@ -199,7 +201,8 @@ def scan_fn(mus_t, sigma_t, S_tm2, S_tm1, Gamma_t): def test_ScanArgs_remove_inner_input(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] Y_t = hmm_model_env["Y_t"] @@ -269,7 +272,8 @@ def test_ScanArgs_remove_inner_input(): def test_ScanArgs_remove_outer_input(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] Y_t = hmm_model_env["Y_t"] @@ -315,7 +319,8 @@ def test_ScanArgs_remove_outer_input(): def test_ScanArgs_remove_inner_output(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] Y_t = hmm_model_env["Y_t"] @@ -353,7 +358,8 @@ def test_ScanArgs_remove_inner_output(): def test_ScanArgs_remove_outer_output(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] Y_t = hmm_model_env["Y_t"] @@ -390,7 +396,8 @@ def test_ScanArgs_remove_outer_output(): def test_ScanArgs_remove_nonseq_outer_input(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] Y_t = hmm_model_env["Y_t"] @@ -427,7 +434,8 @@ def test_ScanArgs_remove_nonseq_outer_input(): def test_ScanArgs_remove_nonseq_inner_input(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] hmm_model_env["Y_t"] @@ -463,7 +471,8 @@ def test_ScanArgs_remove_nonseq_inner_input(): def test_ScanArgs_remove_shared_inner_output(): - hmm_model_env = create_test_hmm() + with pytest.warns(FutureWarning): + hmm_model_env = create_test_hmm() scan_args = hmm_model_env["scan_args"] hmm_model_env["scan_op"] hmm_model_env["Y_t"] diff --git a/tests/tensor/random/rewriting/test_basic.py b/tests/tensor/random/rewriting/test_basic.py index f342d5b81c..7ee0b155de 100644 --- a/tests/tensor/random/rewriting/test_basic.py +++ b/tests/tensor/random/rewriting/test_basic.py @@ -146,7 +146,6 @@ def test_inplace_rewrites(rv_op): assert check_stack_trace(f) -@config.change_flags(compute_test_value="raise") @pytest.mark.parametrize( "dist_op, dist_params, size", [ @@ -200,14 +199,18 @@ def test_inplace_rewrites(rv_op): def test_local_rv_size_lift(dist_op, dist_params, size): rng = shared(np.random.default_rng(1233532), borrow=False) - new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( - local_rv_size_lift, - lambda rv: rv, - dist_op, - dist_params, - size, - rng, - ) + with pytest.warns( + FutureWarning, + match="test_value machinery is deprecated and will stop working in the future.", + ): + new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( + local_rv_size_lift, + lambda rv: rv, + dist_op, + dist_params, + size, + rng, + ) assert new_out.owner.op.size_param(new_out.owner).data is None @@ -413,18 +416,18 @@ def test_local_rv_size_lift(dist_op, dist_params, size): ), ], ) -@config.change_flags(compute_test_value_opt="raise", compute_test_value="raise") def test_DimShuffle_lift(ds_order, lifted, dist_op, dist_params, size, rtol): rng = shared(np.random.default_rng(1233532), borrow=False) - new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( - local_dimshuffle_rv_lift, - lambda rv: rv.dimshuffle(ds_order), - dist_op, - dist_params, - size, - rng, - ) + with pytest.warns(FutureWarning): + new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( + local_dimshuffle_rv_lift, + lambda rv: rv.dimshuffle(ds_order), + dist_op, + dist_params, + size, + rng, + ) if lifted: assert isinstance(new_out.owner.op, type(dist_op)) @@ -443,7 +446,8 @@ def test_DimShuffle_lift(ds_order, lifted, dist_op, dist_params, size, rtol): mode=no_mode, ) - arg_values = [p.get_test_value() for p in f_inputs] + with pytest.warns(FutureWarning): + arg_values = [p.get_test_value() for p in f_inputs] res_base = f_base(*arg_values) res_rewritten, _ = f_rewritten(*arg_values) @@ -797,7 +801,6 @@ def rand_bool_mask(shape, rng=None): ), ], ) -@config.change_flags(compute_test_value_opt="raise", compute_test_value="raise") def test_Subtensor_lift(indices, lifted, dist_op, dist_params, size): from pytensor.tensor.subtensor import as_index_constant @@ -807,17 +810,19 @@ def test_Subtensor_lift(indices, lifted, dist_op, dist_params, size): for i in indices: i_pt = as_index_constant(i) if not isinstance(i_pt, slice): - i_pt.tag.test_value = i + with pytest.warns(FutureWarning): + i_pt.tag.test_value = i indices_pt += (i_pt,) - new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( - local_subtensor_rv_lift, - lambda rv: rv[indices_pt], - dist_op, - dist_params, - size, - rng, - ) + with pytest.warns(FutureWarning): + new_out, f_inputs, dist_st, f_rewritten = apply_local_rewrite_to_rv( + local_subtensor_rv_lift, + lambda rv: rv[indices_pt], + dist_op, + dist_params, + size, + rng, + ) def is_subtensor_or_dimshuffle_subtensor(inp) -> bool: subtensor_ops = Subtensor | AdvancedSubtensor | AdvancedSubtensor1 @@ -846,7 +851,8 @@ def is_subtensor_or_dimshuffle_subtensor(inp) -> bool: mode=no_mode, ) - arg_values = [p.get_test_value() for p in f_inputs] + with pytest.warns(FutureWarning): + arg_values = [p.get_test_value() for p in f_inputs] res_base = f_base(*arg_values) res_rewritten, _ = f_rewritten(*arg_values) @@ -857,7 +863,8 @@ def test_Subtensor_lift_restrictions(): rng = shared(np.random.default_rng(1233532), borrow=False) std = vector("std") - std.tag.test_value = np.array([1e-5, 2e-5, 3e-5], dtype=config.floatX) + with pytest.warns(FutureWarning): + std.tag.test_value = np.array([1e-5, 2e-5, 3e-5], dtype=config.floatX) x = normal(pt.arange(2), pt.ones(2), rng=rng) y = x[1] # The non-`Subtensor` client depends on the RNG state, so we can't perform @@ -960,14 +967,15 @@ def test_Dimshuffle_lift_restrictions(): def test_Dimshuffle_lift_rename(ds_order, lifted, dist_op, dist_params, size, rtol): rng = shared(np.random.default_rng(1233532), borrow=False) - new_out, *_ = apply_local_rewrite_to_rv( - local_dimshuffle_rv_lift, - lambda rv: rv.dimshuffle(ds_order), - dist_op, - dist_params, - size, - rng, - name="test_name", - ) + with pytest.warns(FutureWarning): + new_out, *_ = apply_local_rewrite_to_rv( + local_dimshuffle_rv_lift, + lambda rv: rv.dimshuffle(ds_order), + dist_op, + dist_params, + size, + rng, + name="test_name", + ) assert new_out.name == "test_name_lifted" diff --git a/tests/tensor/random/test_basic.py b/tests/tensor/random/test_basic.py index 7d24a49228..6c7fb49593 100644 --- a/tests/tensor/random/test_basic.py +++ b/tests/tensor/random/test_basic.py @@ -269,13 +269,14 @@ def test_normal_infer_shape(M, sd, size): @config.change_flags(compute_test_value="raise") def test_normal_ShapeFeature(): - M_pt = iscalar("M") - M_pt.tag.test_value = 3 - sd_pt = scalar("sd") - sd_pt.tag.test_value = np.array(1.0, dtype=config.floatX) + with pytest.warns(FutureWarning): + M_pt = iscalar("M") + M_pt.tag.test_value = 3 + sd_pt = scalar("sd") + sd_pt.tag.test_value = np.array(1.0, dtype=config.floatX) - d_rv = normal(pt.ones((M_pt,)), sd_pt, size=(2, M_pt)) - d_rv.tag.test_value + d_rv = normal(pt.ones((M_pt,)), sd_pt, size=(2, M_pt)) + d_rv.tag.test_value fg = FunctionGraph( [i for i in graph_inputs([d_rv]) if not isinstance(i, Constant)], @@ -627,7 +628,8 @@ def test_mvnormal_default_args(): @config.change_flags(compute_test_value="raise") def test_mvnormal_ShapeFeature(): M_pt = iscalar("M") - M_pt.tag.test_value = 2 + with pytest.warns(FutureWarning): + M_pt.tag.test_value = 2 d_rv = multivariate_normal(pt.ones((M_pt,)), pt.eye(M_pt), size=2) @@ -645,12 +647,14 @@ def test_mvnormal_ShapeFeature(): # Test broadcasted shapes mean = tensor(dtype=config.floatX, shape=(1, None)) - mean.tag.test_value = np.array([[0, 1, 2]], dtype=config.floatX) + with pytest.warns(FutureWarning): + mean.tag.test_value = np.array([[0, 1, 2]], dtype=config.floatX) test_covar = np.diag(np.array([1, 10, 100], dtype=config.floatX)) test_covar = np.stack([test_covar, test_covar * 10.0]) cov = pt.as_tensor(test_covar).type() - cov.tag.test_value = test_covar + with pytest.warns(FutureWarning): + cov.tag.test_value = test_covar d_rv = multivariate_normal(mean, cov, size=[2, 3, 2]) @@ -736,24 +740,25 @@ def test_dirichlet_infer_shape(M, size): fn_inputs, [pt.as_tensor(o) for o in [*rv_shape, rv]], mode=py_mode ) - *rv_shape_val, rv_val = pytensor_fn( - *[ - i.tag.test_value - for i in fn_inputs - if not isinstance(i, SharedVariable | Constant) - ] - ) + with pytest.warns(FutureWarning): + *rv_shape_val, rv_val = pytensor_fn( + *[ + i.tag.test_value + for i in fn_inputs + if not isinstance(i, SharedVariable | Constant) + ] + ) assert tuple(rv_shape_val) == tuple(rv_val.shape) -@config.change_flags(compute_test_value="raise") def test_dirichlet_ShapeFeature(): """Make sure `RandomVariable.infer_shape` works with `ShapeFeature`.""" - M_pt = iscalar("M") - M_pt.tag.test_value = 2 - N_pt = iscalar("N") - N_pt.tag.test_value = 3 + with pytest.warns(FutureWarning): + M_pt = iscalar("M") + M_pt.tag.test_value = 2 + N_pt = iscalar("N") + N_pt.tag.test_value = 3 d_rv = dirichlet(pt.ones((M_pt, N_pt)), name="Gamma") @@ -1594,7 +1599,6 @@ def test_unnatural_batched_dims(batch_dims_tester): batch_dims_tester() -@config.change_flags(compute_test_value="off") def test_pickle(): # This is an interesting `Op` case, because it has a conditional dtype sample_a = choice(5, replace=False, size=(2, 3)) diff --git a/tests/tensor/random/test_op.py b/tests/tensor/random/test_op.py index 8e74b06bd4..6b1ef6c44e 100644 --- a/tests/tensor/random/test_op.py +++ b/tests/tensor/random/test_op.py @@ -15,8 +15,9 @@ @pytest.fixture(scope="function", autouse=False) def strict_test_value_flags(): - with config.change_flags(cxx="", compute_test_value="raise"): - yield + with pytest.warns(FutureWarning): + with config.change_flags(cxx="", compute_test_value="raise"): + yield def test_RandomVariable_basics(strict_test_value_flags): @@ -90,17 +91,17 @@ def test_RandomVariable_bcast(strict_test_value_flags): rv = RandomVariable("normal", 0, [0, 0], config.floatX, inplace=True) mu = tensor(dtype=config.floatX, shape=(1, None, None)) - mu.tag.test_value = np.zeros((1, 2, 3)).astype(config.floatX) - sd = tensor(dtype=config.floatX, shape=(None, None)) - sd.tag.test_value = np.ones((2, 3)).astype(config.floatX) - - s1 = iscalar() - s1.tag.test_value = 1 - s2 = iscalar() - s2.tag.test_value = 2 - s3 = iscalar() - s3.tag.test_value = 3 - s3 = Assert("testing")(s3, eq(s1, 1)) + with pytest.warns(FutureWarning): + mu.tag.test_value = np.zeros((1, 2, 3)).astype(config.floatX) + sd = tensor(dtype=config.floatX, shape=(None, None)) + sd.tag.test_value = np.ones((2, 3)).astype(config.floatX) + s1 = iscalar() + s1.tag.test_value = 1 + s2 = iscalar() + s2.tag.test_value = 2 + s3 = iscalar() + s3.tag.test_value = 3 + s3 = Assert("testing")(s3, eq(s1, 1)) res = rv(mu, sd, size=(s1, s2, s3)) assert res.broadcastable == (False,) * 3 @@ -119,19 +120,20 @@ def test_RandomVariable_bcast(strict_test_value_flags): def test_RandomVariable_bcast_specify_shape(strict_test_value_flags): rv = RandomVariable("normal", 0, [0, 0], config.floatX, inplace=True) - s1 = pt.as_tensor(1, dtype=np.int64) - s2 = iscalar() - s2.tag.test_value = 2 - s3 = iscalar() - s3.tag.test_value = 3 - s3 = Assert("testing")(s3, eq(s1, 1)) + with pytest.warns(FutureWarning): + s1 = pt.as_tensor(1, dtype=np.int64) + s2 = iscalar() + s2.tag.test_value = 2 + s3 = iscalar() + s3.tag.test_value = 3 + s3 = Assert("testing")(s3, eq(s1, 1)) - size = specify_shape(pt.as_tensor([s1, s3, s2, s2, s1]), (5,)) - mu = tensor(dtype=config.floatX, shape=(None, None, 1)) - mu.tag.test_value = np.random.normal(size=(2, 2, 1)).astype(config.floatX) + size = specify_shape(pt.as_tensor([s1, s3, s2, s2, s1]), (5,)) + mu = tensor(dtype=config.floatX, shape=(None, None, 1)) + mu.tag.test_value = np.random.normal(size=(2, 2, 1)).astype(config.floatX) - std = tensor(dtype=config.floatX, shape=(None, 1, 1)) - std.tag.test_value = np.ones((2, 1, 1)).astype(config.floatX) + std = tensor(dtype=config.floatX, shape=(None, 1, 1)) + std.tag.test_value = np.ones((2, 1, 1)).astype(config.floatX) res = rv(mu, std, size=size) assert res.type.shape == (1, None, None, None, 1) diff --git a/tests/tensor/random/test_utils.py b/tests/tensor/random/test_utils.py index 3616b2fd24..453ac18743 100644 --- a/tests/tensor/random/test_utils.py +++ b/tests/tensor/random/test_utils.py @@ -17,7 +17,7 @@ def set_pytensor_flags(): rewrites_query = RewriteDatabaseQuery(include=[None], exclude=[]) py_mode = Mode("py", rewrites_query) - with config.change_flags(mode=py_mode, compute_test_value="warn"): + with config.change_flags(mode=py_mode): yield @@ -72,16 +72,18 @@ def test_broadcast_params(): # Try it in PyTensor with config.change_flags(compute_test_value="raise"): - mean = tensor(dtype=config.floatX, shape=(None, 1)) - mean.tag.test_value = np.array([[0], [10], [100]], dtype=config.floatX) - cov = matrix() - cov.tag.test_value = np.diag(np.array([1e-6], dtype=config.floatX)) - params = [mean, cov] - res = broadcast_params(params, ndims_params) - assert np.array_equal(res[0].get_test_value(), mean.get_test_value()) - assert np.array_equal( - res[1].get_test_value(), np.broadcast_to(cov.get_test_value(), (3, 1, 1)) - ) + with pytest.warns(FutureWarning): + mean = tensor(dtype=config.floatX, shape=(None, 1)) + mean.tag.test_value = np.array([[0], [10], [100]], dtype=config.floatX) + cov = matrix() + cov.tag.test_value = np.diag(np.array([1e-6], dtype=config.floatX)) + params = [mean, cov] + res = broadcast_params(params, ndims_params) + assert np.array_equal(res[0].get_test_value(), mean.get_test_value()) + assert np.array_equal( + res[1].get_test_value(), + np.broadcast_to(cov.get_test_value(), (3, 1, 1)), + ) class TestSharedRandomStream: diff --git a/tests/tensor/rewriting/test_elemwise.py b/tests/tensor/rewriting/test_elemwise.py index 9488a9f688..07ce5d0dc2 100644 --- a/tests/tensor/rewriting/test_elemwise.py +++ b/tests/tensor/rewriting/test_elemwise.py @@ -1160,9 +1160,10 @@ def test_test_values(self, test_value): """ x, y, z = dmatrices("xyz") - x.tag.test_value = test_value - y.tag.test_value = test_value - z.tag.test_value = test_value + with pytest.warns(FutureWarning): + x.tag.test_value = test_value + y.tag.test_value = test_value + z.tag.test_value = test_value with config.change_flags( compute_test_value="raise", compute_test_value_opt="raise" diff --git a/tests/tensor/rewriting/test_subtensor.py b/tests/tensor/rewriting/test_subtensor.py index 91575bc7da..3855ca926a 100644 --- a/tests/tensor/rewriting/test_subtensor.py +++ b/tests/tensor/rewriting/test_subtensor.py @@ -95,7 +95,9 @@ def test_local_replace_AdvancedSubtensor(indices, is_none): X_val = np.random.normal(size=(4, 4, 4)) X = tensor(dtype=np.float64, shape=(None, None, None), name="X") - X.tag.test_value = X_val + + with pytest.warns(FutureWarning): + X.tag.test_value = X_val Y = X[indices] @@ -124,8 +126,12 @@ def test_local_replace_AdvancedSubtensor(indices, is_none): if v.owner ) - res_val = res_fn(*[i.tag.test_value for i in inputs]) - exp_res_val = exp_res_fn(*[i.tag.test_value for i in inputs]) + with pytest.warns( + FutureWarning, + match="test_value machinery is deprecated and will stop working in the future.", + ): + res_val = res_fn(*[i.tag.test_value for i in inputs]) + exp_res_val = exp_res_fn(*[i.tag.test_value for i in inputs]) assert np.array_equal(res_val, exp_res_val) diff --git a/tests/tensor/test_basic.py b/tests/tensor/test_basic.py index 323d401f42..6dc7be6488 100644 --- a/tests/tensor/test_basic.py +++ b/tests/tensor/test_basic.py @@ -151,8 +151,6 @@ ) -pytestmark = pytest.mark.filterwarnings("error") - if config.mode == "FAST_COMPILE": mode_opt = "FAST_RUN" else: @@ -1110,13 +1108,15 @@ class TestNonzero: def test_nonzero(self): def check(m): m_symb = tensor(dtype=m.dtype, shape=(None,) * m.ndim) - m_symb.tag.test_value = m + with pytest.warns(FutureWarning): + m_symb.tag.test_value = m res_tuple_pt = nonzero(m_symb, return_matrix=False) res_matrix_pt = nonzero(m_symb, return_matrix=True) res_tuple = tuple(r.tag.test_value for r in res_tuple_pt) - res_matrix = res_matrix_pt.tag.test_value + with pytest.warns(FutureWarning): + res_matrix = res_matrix_pt.tag.test_value assert np.allclose(res_matrix, np.vstack(np.nonzero(m))) @@ -1139,11 +1139,13 @@ def check(m): def test_flatnonzero(self): def check(m): m_symb = tensor(dtype=m.dtype, shape=(None,) * m.ndim) - m_symb.tag.test_value = m + with pytest.warns(FutureWarning): + m_symb.tag.test_value = m res_pt = flatnonzero(m_symb) - result = res_pt.tag.test_value + with pytest.warns(FutureWarning): + result = res_pt.tag.test_value assert np.allclose(result, np.flatnonzero(m)) rand0d = np.empty(()) @@ -1168,11 +1170,13 @@ def check(m): def test_nonzero_values(self): def check(m): m_symb = tensor(dtype=m.dtype, shape=(None,) * m.ndim) - m_symb.tag.test_value = m + with pytest.warns(FutureWarning): + m_symb.tag.test_value = m res_pt = nonzero_values(m_symb) - result = res_pt.tag.test_value + with pytest.warns(FutureWarning): + result = res_pt.tag.test_value assert np.allclose(result, m[np.nonzero(m)], equal_nan=True) rand0d = np.empty(()) @@ -3545,11 +3549,11 @@ def test_assert(self): a = Assert()(c, c > 1) assert get_underlying_scalar_constant_value(a) == 2 - with config.change_flags(compute_test_value="off"): - # condition is always False - a = Assert()(c, c > 2) - with pytest.raises(NotScalarConstantError): - get_underlying_scalar_constant_value(a) + # with config.change_flags(compute_test_value="off"): + # condition is always False + a = Assert()(c, c > 2) + with pytest.raises(NotScalarConstantError): + get_underlying_scalar_constant_value(a) # condition is not constant a = Assert()(c, c > x) diff --git a/tests/tensor/test_extra_ops.py b/tests/tensor/test_extra_ops.py index 0da714c3bf..7d853f43d1 100644 --- a/tests/tensor/test_extra_ops.py +++ b/tests/tensor/test_extra_ops.py @@ -63,7 +63,8 @@ def set_test_value(x, v): - x.tag.test_value = v + with pytest.warns(FutureWarning): + x.tag.test_value = v return x @@ -1202,14 +1203,16 @@ def test_broadcast_shape_constants(): def test_broadcast_shape_symbolic(s1_vals, s2_vals, exp_res): s1s = pt.lscalars(len(s1_vals)) eval_point = {} - for s, s_val in zip(s1s, s1_vals): - eval_point[s] = s_val - s.tag.test_value = s_val + with pytest.warns(FutureWarning): + for s, s_val in zip(s1s, s1_vals): + eval_point[s] = s_val + s.tag.test_value = s_val s2s = pt.lscalars(len(s2_vals)) - for s, s_val in zip(s2s, s2_vals): - eval_point[s] = s_val - s.tag.test_value = s_val + with pytest.warns(FutureWarning): + for s, s_val in zip(s2s, s2_vals): + eval_point[s] = s_val + s.tag.test_value = s_val res = broadcast_shape(s1s, s2s, arrays_are_shapes=True) res = pt.as_tensor(res) diff --git a/tests/tensor/test_math_scipy.py b/tests/tensor/test_math_scipy.py index 6ca9279bca..bf7331cb8e 100644 --- a/tests/tensor/test_math_scipy.py +++ b/tests/tensor/test_math_scipy.py @@ -226,7 +226,7 @@ def scipy_special_gammal(k, x): good=_good_broadcast_unary_gammaln, grad=_grad_broadcast_unary_gammaln, mode=mode_no_scipy, - eps=1e-5, + eps=1e-4, ) TestGammaInplaceBroadcast = makeBroadcastTester( op=inplace.gamma_inplace, diff --git a/tests/tensor/test_nlinalg.py b/tests/tensor/test_nlinalg.py index 1a13992011..550fe81f48 100644 --- a/tests/tensor/test_nlinalg.py +++ b/tests/tensor/test_nlinalg.py @@ -528,7 +528,8 @@ def test_numpy_compare(self, n): config.floatX ) A = matrix("A", dtype=config.floatX) - A.tag.test_value = a + with pytest.warns(FutureWarning): + A.tag.test_value = a Q = matrix_power(A, n) n_p = np.linalg.matrix_power(a, n) assert np.allclose(n_p, Q.get_test_value()) diff --git a/tests/tensor/test_shape.py b/tests/tensor/test_shape.py index 2ffcb25fe5..0e22c0ff3f 100644 --- a/tests/tensor/test_shape.py +++ b/tests/tensor/test_shape.py @@ -620,10 +620,11 @@ def test_reshape(self): @config.change_flags(compute_test_value="raise") def test_nonstandard_shapes(): - a = tensor3(config.floatX) - a.tag.test_value = np.random.random((2, 3, 4)).astype(config.floatX) - b = tensor3(config.floatX) - b.tag.test_value = np.random.random((2, 3, 4)).astype(config.floatX) + with pytest.warns(FutureWarning): + a = tensor3(config.floatX) + a.tag.test_value = np.random.random((2, 3, 4)).astype(config.floatX) + b = tensor3(config.floatX) + b.tag.test_value = np.random.random((2, 3, 4)).astype(config.floatX) tl = make_list([a, b]) tl_shape = shape(tl) diff --git a/tests/tensor/test_subtensor.py b/tests/tensor/test_subtensor.py index d02880f543..08cc13512d 100644 --- a/tests/tensor/test_subtensor.py +++ b/tests/tensor/test_subtensor.py @@ -400,7 +400,6 @@ def test_err_invalid(self): with pytest.raises(IndexError): n.__getitem__(0) - @config.change_flags(compute_test_value="off") def test_err_bounds(self): n = self.shared(np.ones(3, dtype=self.dtype)) t = n[7] @@ -481,7 +480,6 @@ def test_ok_strided(self): assert tval.shape == (2,) assert (tval == [0.0, 2.0]).all() - @config.change_flags(compute_test_value="off") def test_err_bounds0(self): n = self.shared(np.ones((2, 3), dtype=self.dtype) * 5) for idx in [(0, 4), (0, -4)]: @@ -497,7 +495,6 @@ def test_err_bounds0(self): finally: _logger.setLevel(oldlevel) - @config.change_flags(compute_test_value="off") def test_err_bounds1(self): n = self.shared(np.ones((2, 3), dtype=self.dtype) * 5) t = n[4:5, 3] @@ -719,71 +716,70 @@ def numpy_inc_subtensor(x, idx, a): inc_subtensor(n4[test_array > 2, ..., 0, 1], 1).eval(), ) - with config.change_flags(compute_test_value="off"): - # the boolean mask should have the correct shape - # - too large, padded with True - mask = np.array([True, False, True]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - test_array[mask, ...].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask, ...], 1).eval() - mask = np.array([[True, False, False, True], [False, True, False, True]]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - # - too large, padded with False (this works in NumPy < 0.13.0) - mask = np.array([True, False, False]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - test_array[mask, ...].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask, ...], 1).eval() - mask = np.array([[True, False, False, False], [False, True, False, False]]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - # - mask too small (this works in NumPy < 0.13.0) - mask = np.array([True]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - test_array[mask, ...].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask, ...], 1).eval() - mask = np.array([[True], [True]]) - with pytest.raises(IndexError): - test_array[mask].eval() - with pytest.raises(IndexError): - inc_subtensor(test_array[mask], 1).eval() - # - too many dimensions - mask = np.array([[[True, False, False], [False, True, False]]]) - with pytest.raises(IndexError): - test_array.__getitem__(mask) - with pytest.raises(IndexError): - test_array.__getitem__(mask) - - # special cases: Python bools and bools nested in Python arrays are not supported - with pytest.raises(TypeError): - test_array.__getitem__((True,)) - with pytest.raises(TypeError): - test_array.__getitem__((False,)) - with pytest.raises(TypeError): - test_array.__getitem__((True, False)) - with pytest.raises(TypeError): - test_array.__getitem__(([0, 1], [0, False])) - with pytest.raises(TypeError): - test_array.__getitem__(([0, 1], [0, pytensor.shared(True)])) + # the boolean mask should have the correct shape + # - too large, padded with True + mask = np.array([True, False, True]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + test_array[mask, ...].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask, ...], 1).eval() + mask = np.array([[True, False, False, True], [False, True, False, True]]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + # - too large, padded with False (this works in NumPy < 0.13.0) + mask = np.array([True, False, False]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + test_array[mask, ...].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask, ...], 1).eval() + mask = np.array([[True, False, False, False], [False, True, False, False]]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + # - mask too small (this works in NumPy < 0.13.0) + mask = np.array([True]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + test_array[mask, ...].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask, ...], 1).eval() + mask = np.array([[True], [True]]) + with pytest.raises(IndexError): + test_array[mask].eval() + with pytest.raises(IndexError): + inc_subtensor(test_array[mask], 1).eval() + # - too many dimensions + mask = np.array([[[True, False, False], [False, True, False]]]) + with pytest.raises(IndexError): + test_array.__getitem__(mask) + with pytest.raises(IndexError): + test_array.__getitem__(mask) + + # special cases: Python bools and bools nested in Python arrays are not supported + with pytest.raises(TypeError): + test_array.__getitem__((True,)) + with pytest.raises(TypeError): + test_array.__getitem__((False,)) + with pytest.raises(TypeError): + test_array.__getitem__((True, False)) + with pytest.raises(TypeError): + test_array.__getitem__(([0, 1], [0, False])) + with pytest.raises(TypeError): + test_array.__getitem__(([0, 1], [0, pytensor.shared(True)])) def test_grad_1d(self): subi = 0 @@ -2486,18 +2482,18 @@ def test_AdvancedSubtensor(self): AdvancedSubtensor, ) - admat.tag.test_value = admat_val - aivec.tag.test_value = aivec_val - bivec.tag.test_value = bivec_val + with pytest.warns(FutureWarning): + admat.tag.test_value = admat_val + aivec.tag.test_value = aivec_val + bivec.tag.test_value = bivec_val # Make sure it doesn't complain about test values - with config.change_flags(compute_test_value="raise"): - self._compile_and_check( - [admat, aivec], - [admat[1:3, aivec]], - [admat_val, aivec_val], - AdvancedSubtensor, - ) + self._compile_and_check( + [admat, aivec], + [admat[1:3, aivec]], + [admat_val, aivec_val], + AdvancedSubtensor, + ) def test_AdvancedSubtensor_bool(self): n = dmatrix() diff --git a/tests/tensor/test_variable.py b/tests/tensor/test_variable.py index 50c36a05fc..86f38d80fa 100644 --- a/tests/tensor/test_variable.py +++ b/tests/tensor/test_variable.py @@ -405,6 +405,7 @@ def test_take(self): indices = [1, 0, 1] assert_array_equal(X.take(indices, 1).eval({X: x}), x.take(indices, 1)) indices = np.array([-10, 5, 12], dtype="int32") + assert_array_equal( X.take(indices, 1, mode="wrap").eval({X: x}), x.take(indices, 1, mode="wrap"), diff --git a/tests/test_gradient.py b/tests/test_gradient.py index c45d07662d..12198beb8c 100644 --- a/tests/test_gradient.py +++ b/tests/test_gradient.py @@ -910,7 +910,6 @@ def test_grad_scale(): assert np.allclose(out, (8, 4)) -@config.change_flags(compute_test_value="off") def test_undefined_grad_opt(): # Make sure that undefined grad get removed in optimized graph. random = RandomStream(np.random.default_rng().integers(1, 2147462579)) diff --git a/tests/test_ifelse.py b/tests/test_ifelse.py index d506d96df6..afb59e1f6f 100644 --- a/tests/test_ifelse.py +++ b/tests/test_ifelse.py @@ -500,11 +500,11 @@ def test_merge_ifs_true_false(self): def test_grad_test_values(self): # Regression test for test values of `ifelse` gradient. - with pytensor.config.change_flags(compute_test_value="raise"): - x = scalar("x") + x = scalar("x") + with pytest.warns(FutureWarning): x.tag.test_value = 1 - # Used to crash due to undefined test value. - pytensor.grad(ifelse(0, x, x), x) + # Used to crash due to undefined test value. + pytensor.grad(ifelse(0, x, x), x) def test_grad_int_value(self): w = pytensor.shared(np.random.random(10))