Skip to content

Bring back tests #4512

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Mar 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 66 additions & 26 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,58 +7,98 @@ on:

jobs:
pytest:
if: false
strategy:
matrix:
os: [ubuntu-18.04]
floatx: [float32, float64]
test-subset:
# Tests are split into multiple jobs to accelerate the CI.
# The first job (starting in the next block) shouldn't run any tests, but
# just ignores tests because that don't work at all, or run in other jobs.'
# Any test that was not ignored runs in the first job.
# A pre-commit hook (scripts/check_all_tests_are_covered.py) enforces that
# test run just once.

# Because YAML doesn't allow comments in the blocks below, here they are..
# 1st block: These tests are temporarily disabled, because they are _very_ broken
# 2nd block: The JAX tests run through their own workflow: jaxtests.yml
# 3nd & 4rd: These tests are covered by other matrix jobs
# 5th block: These tests PASS without a single XFAIL
# 6th block: These have some XFAILs
- |
--ignore=pymc3/tests/test_dist_math.py
--ignore=pymc3/tests/test_distribution_defaults.py
--ignore=pymc3/tests/test_distributions.py
--ignore=pymc3/tests/test_distributions_random.py
--ignore=pymc3/tests/test_distributions_timeseries.py
--ignore=pymc3/tests/test_examples.py
--ignore=pymc3/tests/test_gp.py
--ignore=pymc3/tests/test_missing.py
--ignore=pymc3/tests/test_mixture.py
--ignore=pymc3/tests/test_ode.py
--ignore=pymc3/tests/test_model_graph.py
--ignore=pymc3/tests/test_modelcontext.py
--ignore=pymc3/tests/test_models_linear.py
--ignore=pymc3/tests/test_ndarray_backend.py
--ignore=pymc3/tests/test_parallel_sampling.py
--ignore=pymc3/tests/test_posterior_predictive.py
--ignore=pymc3/tests/test_posteriors.py
--ignore=pymc3/tests/test_quadpotential.py
--ignore=pymc3/tests/test_profile.py
--ignore=pymc3/tests/test_random.py
--ignore=pymc3/tests/test_sampling.py
--ignore=pymc3/tests/test_sampling_jax.py
--ignore=pymc3/tests/test_shape_handling.py
--ignore=pymc3/tests/test_shared.py
--ignore=pymc3/tests/test_smc.py
--ignore=pymc3/tests/test_starting.py
--ignore=pymc3/tests/test_step.py
--ignore=pymc3/tests/test_updates.py
--ignore=pymc3/tests/test_tracetab.py
--ignore=pymc3/tests/test_transforms.py
--ignore=pymc3/tests/test_tuning.py
--ignore=pymc3/tests/test_types.py
--ignore=pymc3/tests/test_util.py
--ignore=pymc3/tests/test_variational_inference.py

--ignore=pymc3/tests/test_sampling_jax.py

--ignore=pymc3/tests/test_dist_math.py
--ignore=pymc3/tests/test_minibatches.py
--ignore=pymc3/tests/test_pickling.py
--ignore=pymc3/tests/test_plots.py
--ignore=pymc3/tests/test_special_functions.py
--ignore=pymc3/tests/test_updates.py

--ignore=pymc3/tests/test_dist_math.py
--ignore=pymc3/tests/test_examples.py
--ignore=pymc3/tests/test_glm.py
--ignore=pymc3/tests/test_gp.py
--ignore=pymc3/tests/test_memo.py
--ignore=pymc3/tests/test_model.py
--ignore=pymc3/tests/test_model_func.py
--ignore=pymc3/tests/test_model_helpers.py
--ignore=pymc3/tests/test_models_utils.py
--ignore=pymc3/tests/test_ode.py
--ignore=pymc3/tests/test_posdef_sym.py
--ignore=pymc3/tests/test_quadpotential.py
--ignore=pymc3/tests/test_shape_handling.py

- |
pymc3/tests/test_dist_math.py
pymc3/tests/test_distribution_defaults.py
pymc3/tests/test_distributions_random.py
pymc3/tests/test_parallel_sampling.py
pymc3/tests/test_random.py
pymc3/tests/test_shared.py
pymc3/tests/test_smc.py
pymc3/tests/test_minibatches.py
pymc3/tests/test_pickling.py
pymc3/tests/test_plots.py
pymc3/tests/test_special_functions.py
pymc3/tests/test_updates.py

- |
pymc3/tests/test_dist_math.py
pymc3/tests/test_examples.py
pymc3/tests/test_mixture.py
pymc3/tests/test_glm.py
pymc3/tests/test_gp.py
pymc3/tests/test_memo.py
pymc3/tests/test_model.py
pymc3/tests/test_model_func.py
pymc3/tests/test_model_helpers.py
pymc3/tests/test_models_utils.py
pymc3/tests/test_ode.py
pymc3/tests/test_posteriors.py
pymc3/tests/test_posdef_sym.py
pymc3/tests/test_quadpotential.py
- |
pymc3/tests/test_distributions_timeseries.py
pymc3/tests/test_shape_handling.py
pymc3/tests/test_step.py
pymc3/tests/test_updates.py
pymc3/tests/test_variational_inference.py
- |
pymc3/tests/test_distributions.py
pymc3/tests/test_gp.py
pymc3/tests/test_sampling.py

fail-fast: false
runs-on: ${{ matrix.os }}
env:
Expand Down
28 changes: 14 additions & 14 deletions pymc3/gp/gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,10 +137,10 @@ def _build_prior(self, name, X, reparameterize=True, **kwargs):
cov = stabilize(self.cov_func(X))
shape = infer_shape(X, kwargs.pop("shape", None))
if reparameterize:
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, shape=shape, **kwargs)
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, size=shape, **kwargs)
f = pm.Deterministic(name, mu + cholesky(cov).dot(v))
else:
f = pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
f = pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)
return f

def prior(self, name, X, reparameterize=True, **kwargs):
Expand Down Expand Up @@ -231,7 +231,7 @@ def conditional(self, name, Xnew, given=None, **kwargs):
givens = self._get_given_vals(given)
mu, cov = self._build_conditional(Xnew, *givens)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)


@conditioned_vars(["X", "f", "nu"])
Expand Down Expand Up @@ -279,10 +279,10 @@ def _build_prior(self, name, X, reparameterize=True, **kwargs):
shape = infer_shape(X, kwargs.pop("shape", None))
if reparameterize:
chi2 = pm.ChiSquared(name + "_chi2_", self.nu)
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, shape=shape, **kwargs)
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, size=shape, **kwargs)
f = pm.Deterministic(name, (aet.sqrt(self.nu) / chi2) * (mu + cholesky(cov).dot(v)))
else:
f = pm.MvStudentT(name, nu=self.nu, mu=mu, cov=cov, shape=shape, **kwargs)
f = pm.MvStudentT(name, nu=self.nu, mu=mu, cov=cov, size=shape, **kwargs)
return f

def prior(self, name, X, reparameterize=True, **kwargs):
Expand Down Expand Up @@ -349,7 +349,7 @@ def conditional(self, name, Xnew, **kwargs):
f = self.f
nu2, mu, cov = self._build_conditional(Xnew, X, f)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvStudentT(name, nu=nu2, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvStudentT(name, nu=nu2, mu=mu, cov=cov, size=shape, **kwargs)


@conditioned_vars(["X", "y", "noise"])
Expand Down Expand Up @@ -447,7 +447,7 @@ def marginal_likelihood(self, name, X, y, noise, is_observed=True, **kwargs):
return pm.MvNormal(name, mu=mu, cov=cov, observed=y, **kwargs)
else:
shape = infer_shape(X, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)

def _get_given_vals(self, given):
if given is None:
Expand Down Expand Up @@ -525,7 +525,7 @@ def conditional(self, name, Xnew, pred_noise=False, given=None, **kwargs):
givens = self._get_given_vals(given)
mu, cov = self._build_conditional(Xnew, pred_noise, False, *givens)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)

def predict(self, Xnew, point=None, diag=False, pred_noise=False, given=None):
R"""
Expand Down Expand Up @@ -740,7 +740,7 @@ def marginal_likelihood(self, name, X, Xu, y, noise=None, is_observed=True, **kw
return pm.DensityDist(name, logp, observed=y, **kwargs)
else:
shape = infer_shape(X, kwargs.pop("shape", None))
return pm.DensityDist(name, logp, shape=shape, **kwargs)
return pm.DensityDist(name, logp, size=shape, **kwargs)

def _build_conditional(self, Xnew, pred_noise, diag, X, Xu, y, sigma, cov_total, mean_total):
sigma2 = aet.square(sigma)
Expand Down Expand Up @@ -819,7 +819,7 @@ def conditional(self, name, Xnew, pred_noise=False, given=None, **kwargs):
givens = self._get_given_vals(given)
mu, cov = self._build_conditional(Xnew, pred_noise, False, *givens)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)


@conditioned_vars(["Xs", "f"])
Expand Down Expand Up @@ -892,7 +892,7 @@ def _build_prior(self, name, Xs, **kwargs):
mu = self.mean_func(cartesian(*Xs))
chols = [cholesky(stabilize(cov(X))) for cov, X in zip(self.cov_funcs, Xs)]
# remove reparameterization option
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, shape=self.N, **kwargs)
v = pm.Normal(name + "_rotated_", mu=0.0, sigma=1.0, size=self.N, **kwargs)
f = pm.Deterministic(name, mu + aet.flatten(kron_dot(chols, v)))
return f

Expand Down Expand Up @@ -971,7 +971,7 @@ def conditional(self, name, Xnew, **kwargs):
"""
mu, cov = self._build_conditional(Xnew)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)


@conditioned_vars(["Xs", "y", "sigma"])
Expand Down Expand Up @@ -1095,7 +1095,7 @@ def marginal_likelihood(self, name, Xs, y, sigma, is_observed=True, **kwargs):
return pm.KroneckerNormal(name, mu=mu, covs=covs, sigma=sigma, observed=y, **kwargs)
else:
shape = np.prod([len(X) for X in Xs])
return pm.KroneckerNormal(name, mu=mu, covs=covs, sigma=sigma, shape=shape, **kwargs)
return pm.KroneckerNormal(name, mu=mu, covs=covs, sigma=sigma, size=shape, **kwargs)

def _build_conditional(self, Xnew, pred_noise, diag):
Xs, y, sigma = self.Xs, self.y, self.sigma
Expand Down Expand Up @@ -1172,7 +1172,7 @@ def conditional(self, name, Xnew, pred_noise=False, **kwargs):
"""
mu, cov = self._build_conditional(Xnew, pred_noise, False)
shape = infer_shape(Xnew, kwargs.pop("shape", None))
return pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
return pm.MvNormal(name, mu=mu, cov=cov, size=shape, **kwargs)

def predict(self, Xnew, point=None, diag=False, pred_noise=False):
R"""
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def simple_model():
mu = -2.1
tau = 1.3
with Model() as model:
Normal("x", mu, tau=tau, size=2, testval=np.ones(2) * 0.1)
Normal("x", mu, tau=tau, size=2, testval=floatX_array([0.1, 0.1]))

return model.test_point, model, (mu, tau ** -0.5)

Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/test_coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pymc3 as pm


@pytest.mark.xfail("Arviz incompatibilities")
@pytest.mark.xfail(reason="Arviz incompatibilities")
def test_coords():
chains = 2
n_features = 3
Expand Down
4 changes: 2 additions & 2 deletions pymc3/tests/test_data_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def test_shared_data_as_rv_input(self):
"""
with pm.Model() as m:
x = pm.Data("x", [1.0, 2.0, 3.0])
_ = pm.Normal("y", mu=x, shape=3)
_ = pm.Normal("y", mu=x, size=3)
trace = pm.sample(chains=1)

np.testing.assert_allclose(np.array([1.0, 2.0, 3.0]), x.get_value(), atol=1e-1)
Expand All @@ -148,7 +148,7 @@ def test_shared_scalar_as_rv_input(self):
# See https://github.com/pymc-devs/pymc3/issues/3139
with pm.Model() as m:
shared_var = shared(5.0)
v = pm.Normal("v", mu=shared_var, shape=1)
v = pm.Normal("v", mu=shared_var, size=1)

np.testing.assert_allclose(
logpt(v, 5.0).eval(),
Expand Down
10 changes: 8 additions & 2 deletions pymc3/tests/test_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@

from pymc3.math import cartesian, kronecker

pytestmark = pytest.mark.xfail(reason="GP not refactored")

np.random.seed(101)


Expand Down Expand Up @@ -769,6 +767,7 @@ def test_raises3(self):
B = pm.gp.cov.Coregion(1)


@pytest.mark.xfail(reason="MvNormal was not yet refactored")
class TestMarginalVsLatent:
R"""
Compare the logp of models Marginal, noise=0 and Latent.
Expand Down Expand Up @@ -814,6 +813,7 @@ def testLatent2(self):
npt.assert_allclose(latent_logp, self.logp, atol=5)


@pytest.mark.xfail(reason="MvNormal was not yet refactored")
class TestMarginalVsMarginalSparse:
R"""
Compare logp of models Marginal and MarginalSparse.
Expand Down Expand Up @@ -888,6 +888,7 @@ def setup_method(self):
)
self.means = (pm.gp.mean.Constant(0.5), pm.gp.mean.Constant(0.5), pm.gp.mean.Constant(0.5))

@pytest.mark.xfail(reason="MvNormal was not yet refactored")
def testAdditiveMarginal(self):
with pm.Model() as model1:
gp1 = pm.gp.Marginal(self.means[0], self.covs[0])
Expand All @@ -914,6 +915,7 @@ def testAdditiveMarginal(self):
fp = np.random.randn(self.Xnew.shape[0])
npt.assert_allclose(fp1.logp({"fp1": fp}), fp2.logp({"fp2": fp}), atol=0, rtol=1e-2)

@pytest.mark.xfail(reason="DensityDist was not yet refactored")
@pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"])
def testAdditiveMarginalSparse(self, approx):
Xu = np.random.randn(10, 3)
Expand Down Expand Up @@ -947,6 +949,7 @@ def testAdditiveMarginalSparse(self, approx):
fp = np.random.randn(self.Xnew.shape[0])
npt.assert_allclose(fp1.logp({"fp1": fp}), fp2.logp({"fp2": fp}), atol=0, rtol=1e-2)

@pytest.mark.xfail(reason="MvNormal was not yet refactored")
def testAdditiveLatent(self):
with pm.Model() as model1:
gp1 = pm.gp.Latent(self.means[0], self.covs[0])
Expand Down Expand Up @@ -1002,6 +1005,7 @@ def testAdditiveTypeRaises2(self):
gp1 + gp2


@pytest.mark.xfail(reason="MvNormal was not yet refactored")
class TestTP:
R"""
Compare TP with high degress of freedom to GP
Expand Down Expand Up @@ -1054,6 +1058,7 @@ def testAdditiveTPRaises(self):
gp1 + gp2


@pytest.mark.xfail(reason="MvNormal was not yet refactored")
class TestLatentKron:
"""
Compare gp.LatentKron to gp.Latent, both with Gaussian noise.
Expand Down Expand Up @@ -1109,6 +1114,7 @@ def testLatentKronRaisesSizes(self):
gp.prior("f", Xs=[np.linspace(0, 1, 7)[:, None], np.linspace(0, 1, 5)[:, None]])


@pytest.mark.xfail(reason="MvNormal was not yet refactored")
class TestMarginalKron:
"""
Compare gp.MarginalKron to gp.Marginal.
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/test_minibatches.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def true_dens():

for i in range(10):
_1, _2, _t = p1(), p2(), next(t)
decimals = select_by_precision(float64=7, float32=2)
decimals = select_by_precision(float64=7, float32=1)
np.testing.assert_almost_equal(_1, _t, decimal=decimals) # Value O(-50,000)
np.testing.assert_almost_equal(_1, _2)
# Done
Expand Down
4 changes: 2 additions & 2 deletions pymc3/tests/test_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pymc3 import ImputationWarning, Model, Normal, sample, sample_prior_predictive


@pytest.mark.xfail("Missing values not fully refactored")
@pytest.mark.xfail(reason="Missing values not fully refactored")
def test_missing():
data = ma.masked_values([1, 2, -1, 4, -1], value=-1)
with Model() as model:
Expand Down Expand Up @@ -82,7 +82,7 @@ def test_missing_dual_observations():
obs2 = ma.masked_values([-1, -1, 6, -1, 8], value=-1)
beta1 = Normal("beta1", 1, 1)
beta2 = Normal("beta2", 2, 1)
latent = Normal("theta", shape=5)
latent = Normal("theta", size=5)
with pytest.warns(ImputationWarning):
ovar1 = Normal("o1", mu=beta1 * latent, observed=obs1)
with pytest.warns(ImputationWarning):
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def test_observed_rv_fail(self):
Normal("n", observed=x)

def test_observed_type(self):
X_ = np.random.randn(100, 5)
X_ = np.random.randn(100, 5).astype(aesara.config.floatX)
X = pm.floatX(aesara.shared(X_))
with pm.Model():
x1 = pm.Normal("x1", observed=X_)
Expand Down
Loading