You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Fedora Linux is testing pytest 7.0.0rc1 with existing Python packages, and this revealed some new test failures in pytest-bdd with pytest 7. These were bisected to pytest-dev/pytest@6d6cfd8 by @hroncok.
=================================== FAILURES ===================================
________________________ test_feature_path_not_found[.] ________________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found0')>
base_dir = '.'
@pytest.mark.parametrize("base_dir", NOT_EXISTING_FEATURE_PATHS)
def test_feature_path_not_found(testdir, base_dir):
"""Test feature base dir."""
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_not_found_by_ini")
> result.assert_outcomes(passed=2)
E AssertionError: assert {'deselected': 8,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 8} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 8,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = '.'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found0')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:13: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found0, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 8 deselected / 2 selected
test_feature_path_not_found.py .. [100%]
======================= 2 passed, 8 deselected in 0.02s ========================
________________ test_feature_path_not_found[/does/not/exist/] _________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found1')>
base_dir = '/does/not/exist/'
@pytest.mark.parametrize("base_dir", NOT_EXISTING_FEATURE_PATHS)
def test_feature_path_not_found(testdir, base_dir):
"""Test feature base dir."""
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_not_found_by_ini")
> result.assert_outcomes(passed=2)
E AssertionError: assert {'deselected': 8,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 8} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 8,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = '/does/not/exist/'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found1')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:13: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_not_found1, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 8 deselected / 2 selected
test_feature_path_not_found.py .. [100%]
======================= 2 passed, 8 deselected in 0.02s ========================
_____________________________ test_feature_path_ok _____________________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_ok0')>
def test_feature_path_ok(testdir):
base_dir = "features"
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_ok_by_ini")
> result.assert_outcomes(passed=2)
E AssertionError: assert {'deselected': 8,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 8} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 8,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = 'features'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_ok0')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:21: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_ok0, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 8 deselected / 2 selected
test_feature_path_ok.py .. [100%]
======================= 2 passed, 8 deselected in 0.02s ========================
_____________________ test_feature_path_by_param_not_found _____________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_not_found0')>
def test_feature_path_by_param_not_found(testdir):
"""As param takes precedence even if ini config is correct it should fail
if passed param is incorrect"""
base_dir = "features"
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_not_found_by_param")
> result.assert_outcomes(passed=4)
E AssertionError: assert {'deselected': 6,\n 'errors': 0,\n 'failed': 0,\n 'passed': 4,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 4,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 4,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 6} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 6,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 4,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = 'features'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_not_found0')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:31: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_not_found0, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 6 deselected / 4 selected
test_feature_path_by_param_not_found.py .... [100%]
======================= 4 passed, 6 deselected in 0.02s ========================
_______________________ test_feature_path_by_param_ok[.] _______________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok0')>
base_dir = '.'
@pytest.mark.parametrize("base_dir", NOT_EXISTING_FEATURE_PATHS)
def test_feature_path_by_param_ok(testdir, base_dir):
"""If ini config is incorrect but param path is fine it should be able
to find features"""
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_ok_by_param")
> result.assert_outcomes(passed=2)
E AssertionError: assert {'deselected': 8,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 8} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 8,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = '.'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok0')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:41: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok0, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 8 deselected / 2 selected
test_feature_path_by_param_ok.py .. [100%]
======================= 2 passed, 8 deselected in 0.02s ========================
_______________ test_feature_path_by_param_ok[/does/not/exist/] ________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok1')>
base_dir = '/does/not/exist/'
@pytest.mark.parametrize("base_dir", NOT_EXISTING_FEATURE_PATHS)
def test_feature_path_by_param_ok(testdir, base_dir):
"""If ini config is incorrect but param path is fine it should be able
to find features"""
prepare_testdir(testdir, base_dir)
result = testdir.runpytest("-k", "test_ok_by_param")
> result.assert_outcomes(passed=2)
E AssertionError: assert {'deselected': 8,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 0,\n 'passed': 2,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 8} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 8,
E ? ^
E 'errors': 0,
E 'failed': 0,
E 'passed': 2,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
base_dir = '/does/not/exist/'
result = <RunResult ret=ExitCode.OK len(stdout.lines)=9 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok1')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_feature_base_dir.py:41: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_feature_path_by_param_ok1, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collected 10 items / 8 deselected / 2 selected
test_feature_path_by_param_ok.py .. [100%]
======================= 2 passed, 8 deselected in 0.02s ========================
_______________________________ test_step_trace ________________________________
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_step_trace2')>
def test_step_trace(testdir):
"""Test step trace."""
testdir.makeini(
"""
[pytest]
console_output_style=classic
"""
)
testdir.makefile(
".feature",
test="""
Scenario: When step has failure
Given I have a bar
When it fails
Scenario: When step is not found
Given not found
Scenario: When step validation error happens
Given foo
And foo
""",
)
testdir.makepyfile(
"""
import pytest
from pytest_bdd import given, when, scenario
@given('I have a bar')
def i_have_bar():
return 'bar'
@when('it fails')
def when_it_fails():
raise Exception('when fails')
@scenario('test.feature', 'When step has failure')
def test_when_fails_inline():
pass
@scenario('test.feature', 'When step has failure')
def test_when_fails_decorated():
pass
@scenario('test.feature', 'When step is not found')
def test_when_not_found():
pass
@when('foo')
def foo():
return 'foo'
@scenario('test.feature', 'When step validation error happens')
def test_when_step_validation_error():
pass
"""
)
result = testdir.runpytest("-k test_when_fails_inline", "-vv")
> result.assert_outcomes(failed=1)
E AssertionError: assert {'deselected': 3,\n 'errors': 0,\n 'failed': 1,\n 'passed': 0,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0} == {'deselected': 0,\n 'errors': 0,\n 'failed': 1,\n 'passed': 0,\n 'skipped': 0,\n 'warnings': 0,\n 'xfailed': 0,\n 'xpassed': 0}
E Common items:
E {'errors': 0,
E 'failed': 1,
E 'passed': 0,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0}
E Differing items:
E {'deselected': 3} != {'deselected': 0}
E Full diff:
E {
E - 'deselected': 0,
E ? ^
E + 'deselected': 3,
E ? ^
E 'errors': 0,
E 'failed': 1,
E 'passed': 0,
E 'skipped': 0,
E 'warnings': 0,
E 'xfailed': 0,
E 'xpassed': 0,
E }
result = <RunResult ret=ExitCode.TESTS_FAILED len(stdout.lines)=37 len(stderr.lines)=0 duration=0.05s>
testdir = <Testdir local('/tmp/pytest-of-mockbuild/pytest-0/test_step_trace2')>
/builddir/build/BUILD/pytest-bdd-5.0.0/_empty/tests/feature/test_steps.py:469: AssertionError
----------------------------- Captured stdout call -----------------------------
============================= test session starts ==============================
platform linux -- Python 3.10.1, pytest-7.0.0rc1, pluggy-1.0.0 -- /usr/bin/python3
cachedir: .pytest_cache
rootdir: /tmp/pytest-of-mockbuild/pytest-0/test_step_trace2, configfile: tox.ini
plugins: bdd-5.0.0, forked-1.4.0, xdist-2.5.0
collecting ... collected 4 items / 3 deselected / 1 selected
test_step_trace.py::test_when_fails_inline <- ../../../../builddir/build/BUILDROOT/python-pytest-bdd-5.0.0-3.fc36.x86_64/usr/lib/python3.10/site-packages/pytest_bdd/scenario.py FAILED
=================================== FAILURES ===================================
____________________________ test_when_fails_inline ____________________________
request = <FixtureRequest for <Function test_when_fails_inline>>
_pytest_bdd_example = {}
@pytest.mark.usefixtures(*args)
def scenario_wrapper(request, _pytest_bdd_example):
scenario = templated_scenario.render(_pytest_bdd_example)
> _execute_scenario(feature, scenario, request)
/builddir/build/BUILDROOT/python-pytest-bdd-5.0.0-3.fc36.x86_64/usr/lib/python3.10/site-packages/pytest_bdd/scenario.py:174:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/builddir/build/BUILDROOT/python-pytest-bdd-5.0.0-3.fc36.x86_64/usr/lib/python3.10/site-packages/pytest_bdd/scenario.py:144: in _execute_scenario
_execute_step_function(request, scenario, step, step_func)
/builddir/build/BUILDROOT/python-pytest-bdd-5.0.0-3.fc36.x86_64/usr/lib/python3.10/site-packages/pytest_bdd/scenario.py:114: in _execute_step_function
return_value = step_func(**kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
@when('it fails')
def when_it_fails():
> raise Exception('when fails')
E Exception: when fails
test_step_trace.py:10: Exception
=========================== short test summary info ============================
FAILED test_step_trace.py::test_when_fails_inline - Exception: when fails
======================= 1 failed, 3 deselected in 0.02s ========================
=========================== short test summary info ============================
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_not_found[.]
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_not_found[/does/not/exist/]
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_ok - Asserti...
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_by_param_not_found
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_by_param_ok[.]
FAILED tests/feature/test_feature_base_dir.py::test_feature_path_by_param_ok[/does/not/exist/]
FAILED tests/feature/test_steps.py::test_step_trace - AssertionError: assert ...
======================== 7 failed, 108 passed in 12.85s ========================
ERROR: InvocationError for command /usr/bin/pytest -vvl (exited with code 1)
___________________________________ summary ____________________________________
ERROR: py310: commands failed
The text was updated successfully, but these errors were encountered:
Fedora Linux is testing pytest 7.0.0rc1 with existing Python packages, and this revealed some new test failures in
pytest-bdd
with pytest 7. These were bisected to pytest-dev/pytest@6d6cfd8 by @hroncok.The text was updated successfully, but these errors were encountered: