forked from RKrahl/pytest-dependency
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_04_automark.py
95 lines (79 loc) · 2.63 KB
/
test_04_automark.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
"""Test the automark_dependency option.
"""
import pytest
def test_not_set(ctestdir):
"""No pytest.ini file, e.g. automark_dependency is not set.
Since automark_dependency defaults to false and test_a is not
marked, the outcome of test_a will not be recorded. As a result,
test_b will be skipped due to a missing dependency.
"""
ctestdir.makepyfile("""
import pytest
def test_a():
pass
@pytest.mark.dependency(depends=["test_a"])
def test_b():
pass
""")
result = ctestdir.runpytest("--verbose", "-rs")
result.assert_outcomes(passed=1, skipped=1, failed=0)
result.stdout.re_match_lines(r"""
.*::test_a PASSED
.*::test_b SKIPPED(?:\s+\(.*\))?
""")
@pytest.mark.parametrize(
"false_value", ["0", "no", "n", "False", "false", "f", "off"]
)
def test_set_false(ctestdir, false_value):
"""A pytest.ini is present, automark_dependency is set to false.
Since automark_dependency is set to false and test_a is not
marked, the outcome of test_a will not be recorded. As a result,
test_b will be skipped due to a missing dependency.
"""
ctestdir.makefile('.ini', pytest="""
[pytest]
automark_dependency = %s
console_output_style = classic
""" % false_value)
ctestdir.makepyfile("""
import pytest
def test_a():
pass
@pytest.mark.dependency(depends=["test_a"])
def test_b():
pass
""")
result = ctestdir.runpytest("--verbose", "-rs")
result.assert_outcomes(passed=1, skipped=1, failed=0)
result.stdout.re_match_lines(r"""
.*::test_a PASSED
.*::test_b SKIPPED(?:\s+\(.*\))?
""")
@pytest.mark.parametrize(
"true_value", ["1", "yes", "y", "True", "true", "t", "on"]
)
def test_set_true(ctestdir, true_value):
"""A pytest.ini is present, automark_dependency is set to false.
Since automark_dependency is set to true, the outcome of test_a
will be recorded, even though it is not marked. As a result,
test_b will be skipped due to a missing dependency.
"""
ctestdir.makefile('.ini', pytest="""
[pytest]
automark_dependency = %s
console_output_style = classic
""" % true_value)
ctestdir.makepyfile("""
import pytest
def test_a():
pass
@pytest.mark.dependency(depends=["test_a"])
def test_b():
pass
""")
result = ctestdir.runpytest("--verbose", "-rs")
result.assert_outcomes(passed=2, skipped=0, failed=0)
result.stdout.re_match_lines(r"""
.*::test_a PASSED
.*::test_b PASSED
""")