Just normal build, install and test cycle used on building package from non-root account:
+ PATH=/home/tkloczko/rpmbuild/BUILDROOT/python-pytest_check-1.0.1-2.fc35.x86_64/usr/bin:/usr/bin:/usr/sbin:/usr/local/sbin
+ PYTHONPATH=/home/tkloczko/rpmbuild/BUILDROOT/python-pytest_check-1.0.1-2.fc35.x86_64/usr/lib64/python3.8/site-packages:/home/tkloczko/rpmbuild/BUILDROOT/python-pytest_check-1.0.1-2.fc35.x86_64/usr/lib/python3.8/site-packages
+ PYTHONDONTWRITEBYTECODE=1
+ /usr/bin/pytest -ra
=========================================================================== test session starts ============================================================================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 40 items
. . [ 2%]
tests/test_check.py ..................FFF..FF [ 66%]
tests/test_check_context_manager.py ..FFFF [ 82%]
tests/test_check_errors.py FFF [ 89%]
tests/test_check_fixture.py E [ 92%]
tests/test_check_func_decorator.py ..F [100%]
================================================================================== ERRORS ==================================================================================
___________________________________________________________________ ERROR at setup of test_check_fixture ___________________________________________________________________
file /home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_fixture.py, line 1
def test_check_fixture(check):
E fixture 'check' not found
> available fixtures: LineMatcher, _config_for_test, _pytest, _session_faker, _sys_snapshot, aiohttp_client, aiohttp_raw_server, aiohttp_server, aiohttp_unused_port, benchmark, benchmark_weave, betamax_parametrized_recorder, betamax_parametrized_session, betamax_recorder, betamax_session, cache, capfd, capfdbinary, caplog, capsys, capsysbinary, class_based_httpbin, class_based_httpbin_secure, class_mocker, cov, current_cases, doctest_namespace, event_loop, faker, fast, freezer, fs, httpbin, httpbin_both, httpbin_ca_bundle, httpbin_secure, linecomp, loop, loop_debug, mocker, module_mocker, monkeypatch, no_cover, package_mocker, patching, proactor_loop, pytestconfig, pytester, raw_test_server, record_property, record_testsuite_property, record_xml_attribute, recwarn, requests_mock, session_mocker, smart_caplog, stdouts, test_client, test_server, testdir, testrun_uid, tmp_path, tmp_path_factory, tmpdir, tmpdir_factory, tmpworkdir, unused_port, unused_tcp_port, unused_tcp_port_factory, virtualenv, weave, worker_id, workspace, xprocess
> use 'pytest --fixtures [testpath]' for help on them.
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_fixture.py:1
================================================================================= FAILURES =================================================================================
_________________________________________________________________________ test_watch_them_all_fail _________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_watch_them_all_fail0')>
def test_watch_them_all_fail(testdir):
testdir.makepyfile(
"""
import pytest_check as check
def test_equal():
check.equal(1,2)
def test_not_equal():
check.not_equal(1,1)
def test_is():
x = ["foo"]
y = ["foo"]
check.is_(x, y)
def test_is_not():
x = ["foo"]
y = x
check.is_not(x, y)
def test_is_true():
check.is_true(False)
def test_is_false():
check.is_false(True)
def test_is_none():
a = 1
check.is_none(a)
def test_is_not_none():
a = None
check.is_not_none(a)
def test_is_in():
check.is_in(4, [1, 2, 3])
def test_is_not_in():
check.is_not_in(2, [1, 2, 3])
def test_is_instance():
check.is_instance(1, str)
def test_is_not_instance():
check.is_not_instance(1, int)
def test_almost_equal():
check.almost_equal(1, 2)
check.almost_equal(1, 2.1, abs=0.1)
check.almost_equal(1, 3, rel=1)
def test_not_almost_equal():
check.not_almost_equal(1, 1)
check.not_almost_equal(1, 1.1, abs=0.1)
check.not_almost_equal(1, 2, rel=1)
def test_greater():
check.greater(1, 2)
check.greater(1, 1)
def test_greater_equal():
check.greater_equal(1, 2)
def test_less():
check.less(2, 1)
check.less(1, 1)
def test_less_equal():
check.less_equal(2, 1)
#check.equal(2, 1)
"""
)
result = testdir.runpytest()
> result.assert_outcomes(failed=18, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 18}
E {'passed': 18} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check.py:183: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_watch_them_all_fail0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 18 items
test_watch_them_all_fail.py .................. [100%]
============================== 18 passed in 0.26s ==============================
_____________________________________________________________________________ test_check_xfail _____________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_check_xfail0')>
def test_check_xfail(testdir):
testdir.makepyfile(
"""
import pytest_check as check
import pytest
@pytest.mark.xfail()
def test_fail():
check.equal(1, 2)
"""
)
result = testdir.runpytest()
> result.assert_outcomes(xfailed=1)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'xfailed': 0} != {'xfailed': 1}
E {'xpassed': 1} != {'xpassed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check.py:199: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_check_xfail0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_check_xfail.py X [100%]
============================== 1 xpassed in 0.09s ==============================
_________________________________________________________________________ test_check_xfail_strict __________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_check_xfail_strict0')>
def test_check_xfail_strict(testdir):
testdir.makepyfile(
"""
import pytest_check as check
import pytest
@pytest.mark.xfail(strict=True)
def test_fail():
check.equal(1, 2)
"""
)
result = testdir.runpytest()
> result.assert_outcomes(xfailed=1)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'xfailed': 0} != {'xfailed': 1}
E {'failed': 1} != {'failed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check.py:215: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_check_xfail_strict0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_check_xfail_strict.py F [100%]
=================================== FAILURES ===================================
__________________________________ test_fail ___________________________________
[XPASS(strict)]
=========================== short test summary info ============================
FAILED test_check_xfail_strict.py::test_fail
============================== 1 failed in 0.09s ===============================
__________________________________________________________________________ test_check_and_assert ___________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_check_and_assert0')>
def test_check_and_assert(testdir):
testdir.makepyfile(
"""
import pytest_check as check
import pytest
def test_fail_check():
check.equal(1, 2)
def test_fail_assert():
assert 1 == 2
"""
)
result = testdir.runpytest()
> result.assert_outcomes(failed=2)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 1} != {'failed': 2}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check.py:271: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_check_and_assert0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 2 items
test_check_and_assert.py .F [100%]
=================================== FAILURES ===================================
_______________________________ test_fail_assert _______________________________
def test_fail_assert():
> assert 1 == 2
E assert 1 == 2
test_check_and_assert.py:8: AssertionError
=========================== short test summary info ============================
FAILED test_check_and_assert.py::test_fail_assert - assert 1 == 2
========================= 1 failed, 1 passed in 0.09s ==========================
____________________________________________________________________________ test_stop_on_fail _____________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail0')>
def test_stop_on_fail(testdir):
testdir.makepyfile(
"""
import pytest_check as check
class TestStopOnFail():
def test_1(self):
check.equal(1, 1)
check.equal(1, 2)
check.equal(1, 3)
def test_2(self):
check.equal(1, 1)
check.equal(1, 2)
check.equal(1, 3)
"""
)
result = testdir.runpytest("-x")
> result.assert_outcomes(failed=1)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 2} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check.py:296: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 2 items
test_stop_on_fail.py .. [100%]
============================== 2 passed in 0.10s ===============================
________________________________________________________________________ test_context_manager_fail _________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_context_manager_fail0')>
def test_context_manager_fail(testdir):
testdir.makepyfile(
"""
from pytest_check import check
def test_failures():
with check: assert 1 == 0
with check: assert 1 > 2
with check: assert 1 < 5 < 4
"""
)
result = testdir.runpytest()
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_context_manager.py:34: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_context_manager_fail0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_context_manager_fail.py . [100%]
============================== 1 passed in 0.10s ===============================
____________________________________________________________________ test_context_manager_fail_with_msg ____________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_context_manager_fail_with_msg0')>
def test_context_manager_fail_with_msg(testdir):
testdir.makepyfile(
"""
from pytest_check import check
def test_failures():
with check("first fail"): assert 1 == 0
with check("second fail"): assert 1 > 2
with check("third fail"): assert 1 < 5 < 4
"""
)
result = testdir.runpytest()
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_context_manager.py:57: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_context_manager_fail_with_msg0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_context_manager_fail_with_msg.py . [100%]
============================== 1 passed in 0.09s ===============================
____________________________________________________________________________ test_stop_on_fail _____________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail1')>
def test_stop_on_fail(testdir):
testdir.makepyfile(
"""
from pytest_check import check
def test_failures():
with check: assert 1 == 0
with check: assert 1 > 2
with check: assert 1 < 5 < 4
"""
)
result = testdir.runpytest('-x')
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_context_manager.py:80: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail1
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_stop_on_fail.py . [100%]
============================== 1 passed in 0.09s ===============================
________________________________________________________________________ test_stop_on_fail_with_msg ________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail_with_msg0')>
def test_stop_on_fail_with_msg(testdir):
testdir.makepyfile(
"""
from pytest_check import check
def test_failures():
with check("first fail"): assert 1 == 0
with check("second fail"): assert 1 > 2
with check("third fail"): assert 1 < 5 < 4
"""
)
result = testdir.runpytest('-x')
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_context_manager.py:99: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_stop_on_fail_with_msg0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_stop_on_fail_with_msg.py . [100%]
============================== 1 passed in 0.09s ===============================
____________________________________________________________________________ test_setup_failure ____________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_setup_failure0')>
def test_setup_failure(testdir):
testdir.makepyfile(
"""
import pytest
import pytest_check as check
@pytest.fixture()
def a_fixture():
check.equal(1, 2)
def test_1(a_fixture):
pass
"""
)
result = testdir.runpytest()
> result.assert_outcomes(errors=1)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 1,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'errors': 0} != {'errors': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_errors.py:16: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_setup_failure0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_setup_failure.py . [100%]
============================== 1 passed in 0.09s ===============================
__________________________________________________________________________ test_teardown_failure ___________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_teardown_failure0')>
def test_teardown_failure(testdir):
testdir.makepyfile(
"""
import pytest
import pytest_check as check
@pytest.fixture()
def a_fixture():
yield
check.equal(1, 2)
def test_1(a_fixture):
pass
"""
)
result = testdir.runpytest()
> result.assert_outcomes(passed=1, errors=1)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 1,...pped': 0, ...}
E Omitting 5 identical items, use -vv to show
E Differing items:
E {'errors': 0} != {'errors': 1}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_errors.py:36: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_teardown_failure0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_teardown_failure.py . [100%]
============================== 1 passed in 0.09s ===============================
_________________________________________________________________________________ test_mix _________________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_mix0')>
def test_mix(testdir):
testdir.makepyfile(
"""
from pytest_check import check
def test_fail_and_error(check):
check.equal(1, 2)
assert 2 == 3
"""
)
result = testdir.runpytest()
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'errors': 1} != {'errors': 0}
E {'failed': 0} != {'failed': 1}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_errors.py:51: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_mix0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_mix.py E [100%]
==================================== ERRORS ====================================
____________________ ERROR at setup of test_fail_and_error _____________________
file /tmp/pytest-of-tkloczko/pytest-117/test_mix0/test_mix.py, line 3
def test_fail_and_error(check):
E fixture 'check' not found
> available fixtures: _session_faker, aiohttp_client, aiohttp_raw_server, aiohttp_server, aiohttp_unused_port, benchmark, benchmark_weave, betamax_parametrized_recorder, betamax_parametrized_session, betamax_recorder, betamax_session, cache, capfd, capfdbinary, caplog, capsys, capsysbinary, class_based_httpbin, class_based_httpbin_secure, class_mocker, cov, current_cases, doctest_namespace, event_loop, faker, fast, freezer, fs, httpbin, httpbin_both, httpbin_ca_bundle, httpbin_secure, loop, loop_debug, mocker, module_mocker, monkeypatch, no_cover, package_mocker, patching, proactor_loop, pytestconfig, raw_test_server, record_property, record_testsuite_property, record_xml_attribute, recwarn, requests_mock, session_mocker, smart_caplog, stdouts, test_client, test_server, testrun_uid, tmp_path, tmp_path_factory, tmpdir, tmpdir_factory, tmpworkdir, unused_port, unused_tcp_port, unused_tcp_port_factory, virtualenv, weave, worker_id, workspace, xprocess
> use 'pytest --fixtures [testpath]' for help on them.
/tmp/pytest-of-tkloczko/pytest-117/test_mix0/test_mix.py:3
=========================== short test summary info ============================
ERROR test_mix.py::test_fail_and_error
=============================== 1 error in 0.08s ===============================
________________________________________________________________________________ test_fail _________________________________________________________________________________
testdir = <Testdir local('/tmp/pytest-of-tkloczko/pytest-117/test_fail0')>
def test_fail(testdir):
testdir.makepyfile(
"""
from pytest_check import check_func
@check_func
def is_four(a):
assert a == 4
def test_all_four():
is_four(1)
is_four(2)
should_be_False = is_four(3)
should_be_True = is_four(4)
print('should_be_True={}'.format(should_be_True))
print('should_be_False={}'.format(should_be_False))
"""
)
result = testdir.runpytest("-s")
> result.assert_outcomes(failed=1, passed=0)
E AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'failed': 0} != {'failed': 1}
E {'passed': 1} != {'passed': 0}
E Use -v to get the full diff
/home/tkloczko/rpmbuild/BUILD/pytest_check-1.0.1/tests/test_check_func_decorator.py:41: AssertionError
--------------------------------------------------------------------------- Captured stdout call ---------------------------------------------------------------------------
============================= test session starts ==============================
platform linux -- Python 3.8.9, pytest-6.2.4, py-1.10.0, pluggy-0.13.1
benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000)
rootdir: /tmp/pytest-of-tkloczko/pytest-117/test_fail0
plugins: forked-1.3.0, shutil-1.7.0, virtualenv-1.7.0, expect-1.1.0, httpbin-1.0.0, flake8-1.0.7, timeout-1.4.2, betamax-0.8.1, freezegun-0.4.2, case-1.5.3, isort-1.3.0, aspectlib-1.5.2, asyncio-0.15.1, toolbox-0.5, xprocess-0.17.1, aiohttp-0.3.0, checkdocs-2.7.0, mock-3.6.1, rerunfailures-9.1.1, requests-mock-1.9.3, cov-2.12.1, pyfakefs-4.5.0, cases-3.6.1, flaky-3.7.0, hypothesis-6.14.0, benchmark-3.4.1, xdist-2.3.0, Faker-8.8.1
collected 1 item
test_fail.py should_be_True=True
should_be_False=False
.
============================== 1 passed in 0.09s ===============================
========================================================================= short test summary info ==========================================================================
ERROR tests/test_check_fixture.py::test_check_fixture
FAILED tests/test_check.py::test_watch_them_all_fail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check.py::test_check_xfail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check.py::test_check_xfail_strict - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check.py::test_check_and_assert - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check.py::test_stop_on_fail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_context_manager.py::test_context_manager_fail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_context_manager.py::test_context_manager_fail_with_msg - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_context_manager.py::test_stop_on_fail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_context_manager.py::test_stop_on_fail_with_msg - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_errors.py::test_setup_failure - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 1,...pped': 0, ...}
FAILED tests/test_check_errors.py::test_teardown_failure - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 1,...pped': 0, ...}
FAILED tests/test_check_errors.py::test_mix - AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
FAILED tests/test_check_func_decorator.py::test_fail - AssertionError: assert {'errors': 0,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
================================================================== 13 failed, 25 passed, 1 error in 9.47s ==================================================================