Skip to content

Commit 34c0812

Browse files
authored
Merge pull request #19 from ESA-APEx/apex5-html-report
add basic HTML+JSON reporting
2 parents 54c9080 + 42de3ac commit 34c0812

File tree

8 files changed

+227
-1
lines changed

8 files changed

+227
-1
lines changed

.github/workflows/benchmarks.yaml

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,17 @@ jobs:
2121
- name: Test with pytest
2222
run: |
2323
cd qa/benchmarks
24-
pytest --random-subset=1
24+
mkdir report
25+
pytest \
26+
--random-subset=1 \
27+
--html report/report.html --self-contained-html \
28+
--track-metrics-report=report/metrics.json
2529
env:
2630
OPENEO_AUTH_METHOD: client_credentials
2731
OPENEO_AUTH_CLIENT_CREDENTIALS_CDSEFED: ${{ secrets.OPENEO_AUTH_CLIENT_CREDENTIALS_CDSEFED }}
32+
- name: upload report
33+
uses: actions/upload-artifact@v4
34+
if: always()
35+
with:
36+
name: report
37+
path: qa/benchmarks/report/

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,3 +166,7 @@ poetry.toml
166166

167167
# LSP config files
168168
pyrightconfig.json
169+
170+
171+
tmp*
172+
temp*

qa/benchmarks/.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
report/

qa/benchmarks/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ requests>=2.32.0
77
xarray>=2024.6.0
88
netCDF4>=1.7.1
99
rioxarray>=0.15.7
10+
pytest-html>=4.1.1

qa/benchmarks/tests/conftest.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@
1010
# TODO: how to make sure the logging/printing from this plugin is actually visible by default?
1111
_log = logging.getLogger(__name__)
1212

13+
pytest_plugins = [
14+
"apex_algorithm_qa_tools.pytest_track_metrics",
15+
]
16+
1317

1418
def pytest_addoption(parser):
1519
parser.addoption(
Lines changed: 140 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,140 @@
1+
"""
2+
Pytest plugin to track test/benchmark metrics and report them with a JSON file.
3+
4+
5+
Usage:
6+
7+
- Enable the plugin in `conftest.py`:
8+
9+
```python
10+
pytest_plugins = [
11+
"apex_algorithm_qa_tools.pytest_track_metrics",
12+
]
13+
```
14+
15+
- Use the `track_metric` fixture to record metrics during tests:
16+
17+
```python
18+
def test_dummy(track_metric):
19+
x = 3
20+
track_metric("x squared", x*x)
21+
...
22+
23+
- Run the tests with `--track-metrics-report=path/to/metrics.json`
24+
to store metrics in a JSON file
25+
"""
26+
27+
import json
28+
import warnings
29+
from pathlib import Path
30+
from typing import Any, Callable, List, Tuple, Union
31+
32+
import pytest
33+
34+
_TRACK_METRICS_PATH = "track_metrics_path"
35+
_TRACK_METRICS_NAME = "track_metrics"
36+
37+
38+
def pytest_addoption(parser):
39+
parser.addoption(
40+
"--track-metrics-report",
41+
metavar="PATH",
42+
action="store",
43+
dest=_TRACK_METRICS_PATH,
44+
default=None,
45+
help="Path to JSON file to store test/benchmark metrics.",
46+
)
47+
48+
49+
def pytest_configure(config):
50+
track_metrics_path = config.getoption(_TRACK_METRICS_PATH)
51+
if (
52+
track_metrics_path
53+
# Don't register on xdist worker nodes
54+
and not hasattr(config, "workerinput")
55+
):
56+
config.pluginmanager.register(
57+
TrackMetricsReporter(path=track_metrics_path),
58+
name=_TRACK_METRICS_NAME,
59+
)
60+
61+
62+
def pytest_unconfigure(config):
63+
if config.pluginmanager.hasplugin(_TRACK_METRICS_NAME):
64+
config.pluginmanager.unregister(name=_TRACK_METRICS_NAME)
65+
66+
67+
class TrackMetricsReporter:
68+
def __init__(
69+
self, path: Union[str, Path], user_properties_key: str = "track_metrics"
70+
):
71+
self.path = Path(path)
72+
self.metrics: List[dict] = []
73+
self.user_properties_key = user_properties_key
74+
75+
def pytest_runtest_logreport(self, report: pytest.TestReport):
76+
if report.when == "call":
77+
self.metrics.append(
78+
{
79+
"nodeid": report.nodeid,
80+
"report": {
81+
"outcome": report.outcome,
82+
"duration": report.duration,
83+
"start": report.start,
84+
"stop": report.stop,
85+
},
86+
"metrics": self.get_metrics(report.user_properties),
87+
}
88+
)
89+
90+
def pytest_sessionfinish(self, session):
91+
with self.path.open("w", encoding="utf8") as f:
92+
json.dump(self.metrics, f, indent=2)
93+
94+
def pytest_terminal_summary(self, terminalreporter):
95+
terminalreporter.write_sep("-", f"Generated track_metrics report: {self.path}")
96+
97+
def get_metrics(
98+
self, user_properties: List[Tuple[str, Any]]
99+
) -> List[Tuple[str, Any]]:
100+
"""
101+
Extract existing test metrics items from user properties
102+
or create new one.
103+
"""
104+
for name, value in user_properties:
105+
if name == self.user_properties_key:
106+
return value
107+
# Not found: create it
108+
metrics = []
109+
user_properties.append((self.user_properties_key, metrics))
110+
return metrics
111+
112+
113+
@pytest.fixture
114+
def track_metric(
115+
pytestconfig: pytest.Config, request: pytest.FixtureRequest
116+
) -> Callable[[str, Any], None]:
117+
"""
118+
Fixture to record a metric during tests/benchmarks,
119+
which will be stored in the pytest node's "user_properties".
120+
121+
Returns a callable that expects a metric name and value
122+
"""
123+
124+
reporter: Union[TrackMetricsReporter, None] = pytestconfig.pluginmanager.get_plugin(
125+
_TRACK_METRICS_NAME
126+
)
127+
128+
if reporter:
129+
130+
def append(name: str, value: Any):
131+
reporter.get_metrics(request.node.user_properties).append((name, value))
132+
else:
133+
warnings.warn(
134+
"The `track_metric` fixture is requested, but no output file is defined (e.g. with `--metrics-tracker-report=path/to/metrics.json`."
135+
)
136+
137+
def append(name: str, value: Any):
138+
pass
139+
140+
return append

qa/unittests/tests/conftest.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
import pytest
22

3+
pytest_plugins = [
4+
"pytester",
5+
]
6+
37
pytest.register_assert_rewrite("apex_algorithm_qa_tools.scenarios")
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import json
2+
import re
3+
import time
4+
5+
import pytest
6+
7+
8+
def test_track_metric_basics(pytester: pytest.Pytester, tmp_path):
9+
pytester.makeconftest(
10+
"""
11+
pytest_plugins = [
12+
"apex_algorithm_qa_tools.pytest_track_metrics",
13+
]
14+
"""
15+
)
16+
pytester.makepyfile(
17+
test_addition="""
18+
import pytest
19+
20+
@pytest.mark.parametrize("x", [5, 6])
21+
def test_3plus(track_metric, x):
22+
track_metric("x squared", x * x)
23+
assert 3 + x == 8
24+
"""
25+
)
26+
27+
metrics_path = tmp_path / "metrics.json"
28+
result = pytester.runpytest(f"--track-metrics-report={metrics_path}")
29+
result.assert_outcomes(passed=1, failed=1)
30+
31+
assert metrics_path.exists()
32+
result.stdout.re_match_lines([f".*Generated.*{re.escape(str(metrics_path))}.*"])
33+
34+
with metrics_path.open("r", encoding="utf8") as f:
35+
metrics = json.load(f)
36+
approx_now = pytest.approx(time.time(), abs=1)
37+
assert metrics == [
38+
{
39+
"nodeid": "test_addition.py::test_3plus[5]",
40+
"report": {
41+
"outcome": "passed",
42+
"duration": pytest.approx(0, abs=1),
43+
"start": approx_now,
44+
"stop": approx_now,
45+
},
46+
"metrics": [
47+
["x squared", 25],
48+
],
49+
},
50+
{
51+
"nodeid": "test_addition.py::test_3plus[6]",
52+
"report": {
53+
"outcome": "failed",
54+
"duration": pytest.approx(0, abs=1),
55+
"start": approx_now,
56+
"stop": approx_now,
57+
},
58+
"metrics": [
59+
["x squared", 36],
60+
],
61+
},
62+
]

0 commit comments

Comments
 (0)