I am working on pytest and want result in json format, see this problem.
There is single file which is tested using monkeypatch for custom inputs now I need result for each test cases. I know we can use pytest test_example.py -v --junitxml="result.xml"
for the result in xml format but I want result in json format.
# example.py
x = input("x")
y = input("y")
z = input("z")
if int(x):
print(x)
above is the file which is going to be tested and
# test_example.py
@pytest.mark.parametrize(
"test_input, expected_output",
[
("456", "456"),
("-999", "-999"),
("0", ""), # Expect int("0") to be 0, so it is False-y
("0.9", "0")
],
)
def test_example(monkeypatch: MonkeyPatch, test_input: str, expected_output: str):
mocked_input = lambda prompt="": test_input
mocked_stdout = io.StringIO()
with monkeypatch.context() as m:
m.setattr(builtins, "input", mocked_input)
m.setattr(sys, "stdout", mocked_stdout)
sys.modules.pop("example", None)
importlib.import_module(name="example", package="files")
assert mocked_stdout.getvalue().strip() == expected_output
this is a testing file an I use pytest test_example.py -v --junitxml="result.xml"
which gives me result like this:
<?xml version="1.0" encoding="utf-8"?>
<testsuites>
<testsuite name="pytest" errors="0" failures="1" skipped="0" tests="4" time="0.397" timestamp="2022-08-20T23:07:01.073263" hostname="HP">
<testcase classname="test_example" name="test_example[456-456]" time="0.005" />
<testcase classname="test_example" name="test_example[-999--999]" time="0.006" />
<testcase classname="test_example" name="test_example[0-]" time="0.005" />
<testcase classname="test_example" name="test_example[0.9-0]" time="0.007">
<failure message="ValueError: invalid literal for int() with base 10: '0.9'">monkeypatch = <_pytest.monkeypatch.MonkeyPatch object at 0x000001962E6322E0>, test_input = '0.9', expected_output = '0'
@pytest.mark.parametrize(
"test_input, expected_output",
[
("456", "456"),
("-999", "-999"),
("0", ""), # Expect int("0") to be 0, so it is False-y
("0.9", "0")
],
)
def test_example(monkeypatch: MonkeyPatch, test_input: str, expected_output: str):
mocked_input = lambda prompt="": test_input
mocked_stdout = io.StringIO()
with monkeypatch.context() as m:
m.setattr(builtins, "input", mocked_input)
m.setattr(sys, "stdout", mocked_stdout)
sys.modules.pop("example", None)
> importlib.import_module(name="example", package="files")
test_example.py:27:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
C:\Users\hp\AppData\Local\Programs\Python\Python39\lib\importlib\__init__.py:127: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
<frozen importlib._bootstrap>:1030: in _gcd_import
???
<frozen importlib._bootstrap>:1007: in _find_and_load
???
<frozen importlib._bootstrap>:986: in _find_and_load_unlocked
???
<frozen importlib._bootstrap>:680: in _load_unlocked
???
<frozen importlib._bootstrap_external>:850: in exec_module
???
<frozen importlib._bootstrap>:228: in _call_with_frames_removed
???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
x = input("x")
y = input("y")
z = input("z")
> if int(x):
E ValueError: invalid literal for int() with base 10: '0.9'
example.py:4: ValueError</failure>
</testcase>
</testsuite>
</testsuites>
How do we get info in json format like this one
{"name": "test_example[456-456]", "time": "0.005", "result": "pass", "inputs": "456", "user_output": "456", "expected_output": "456", "failure": "reason"}
{"name": "test_example[-999--999]", "time": "0.006", "result": "pass", "inputs": "-999", "user_output": "-999", "expected_output": "-999", "failure": "reason"}
:
:
{"name": "some_test", "time": "some_time", "result": "pass or fail", "inputs": "custom-inputs", "user_output": "user-output", "expected-output": "custom-outputs", "failure": "reason"}
Is there any module that can do this type of stuff, I already use https://pypi.org/project/pytest-json-report/ but didn't work.