Skip to content

Commit ed6e218

Browse files
committed
Issue #5/#7 refactor metrics plugin from conftest to onw module
1 parent 10d431a commit ed6e218

File tree

4 files changed

+135
-89
lines changed

4 files changed

+135
-89
lines changed

.github/workflows/benchmarks.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
run: |
2323
cd qa/benchmarks
2424
mkdir report
25-
pytest -k dummy --html report/report.html --self-contained-html --junit-xml=report/report.xml
25+
pytest -k dummy --html report/report.html --self-contained-html --test-metrics=report/metrics.json
2626
env:
2727
OPENEO_AUTH_METHOD: client_credentials
2828
OPENEO_AUTH_CLIENT_CREDENTIALS_CDSEFED: ${{ secrets.OPENEO_AUTH_CLIENT_CREDENTIALS_CDSEFED }}

qa/benchmarks/tests/conftest.py

+5-85
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
import json
21
import logging
32
import os
43
import random
5-
from pathlib import Path
6-
from typing import Any, Callable, List, Tuple, Union
4+
from typing import Callable
75

86
import openeo
97
import pytest
@@ -12,6 +10,10 @@
1210
# TODO: how to make sure the logging/printing from this plugin is actually visible by default?
1311
_log = logging.getLogger(__name__)
1412

13+
pytest_plugins = [
14+
"apex_algorithm_qa_tools.test_metrics",
15+
]
16+
1517

1618
def pytest_addoption(parser):
1719
parser.addoption(
@@ -22,33 +24,6 @@ def pytest_addoption(parser):
2224
type=int,
2325
help="Only run random selected subset benchmarks.",
2426
)
25-
parser.addoption(
26-
"--openeo-metrics",
27-
metavar="path",
28-
action="store",
29-
dest="openeo_metrics_path",
30-
default=None,
31-
help="File to store openEO metrics.",
32-
)
33-
34-
35-
def pytest_configure(config):
36-
openeo_metrics_path = config.getoption("openeo_metrics_path")
37-
if (
38-
openeo_metrics_path
39-
# Don't register on xdist worker nodes
40-
and not hasattr(config, "workerinput")
41-
):
42-
config.pluginmanager.register(
43-
# TODO: create config for this path
44-
OpeneoMetricReporter(openeo_metrics_path),
45-
name="openeo_metrics_reporter",
46-
)
47-
48-
49-
def pytest_unconfigure(config):
50-
if config.pluginmanager.hasplugin("openeo_metrics_report"):
51-
config.pluginmanager.unregister(name="openeo_metrics_reporter")
5227

5328

5429
def pytest_collection_modifyitems(session, config, items):
@@ -66,61 +41,6 @@ def pytest_collection_modifyitems(session, config, items):
6641
items[:] = random.sample(items, k=subset_size)
6742

6843

69-
@pytest.fixture
70-
def openeo_metric(request: pytest.FixtureRequest) -> Callable[[str, Any], None]:
71-
"""
72-
Fixture to record openEO metrics during openEO tests/benchmarks,
73-
which will be stored in the pytest node's "user_properties".
74-
75-
Collect and export these metrics with OpeneoMetricReporter.
76-
"""
77-
78-
def append(name: str, value: Any):
79-
_get_openeo_metrics(request.node.user_properties).append((name, value))
80-
81-
return append
82-
83-
84-
def _get_openeo_metrics(user_properties: List[Tuple[str, Any]]) -> List:
85-
for name, value in user_properties:
86-
if name == OpeneoMetricReporter.USER_PROPERTY_KEY:
87-
return value
88-
# Not found: create it
89-
metrics = []
90-
user_properties.append((OpeneoMetricReporter.USER_PROPERTY_KEY, metrics))
91-
return metrics
92-
93-
94-
class OpeneoMetricReporter:
95-
# TODO: isolate all this openeo_metrics stuff to proper plugin
96-
USER_PROPERTY_KEY = "openeo_metrics"
97-
98-
def __init__(self, path: Union[str, Path]):
99-
self.path = Path(path)
100-
self.metrics = []
101-
102-
def pytest_runtest_logreport(self, report: pytest.TestReport):
103-
if report.when == "call":
104-
self.metrics.append(
105-
{
106-
"nodeid": report.nodeid,
107-
"outcome": report.outcome,
108-
"openeo_metrics": _get_openeo_metrics(report.user_properties),
109-
"duration": report.duration,
110-
"start": report.start,
111-
"stop": report.stop,
112-
"longrepr": repr(report.longrepr),
113-
}
114-
)
115-
116-
def pytest_sessionfinish(self, session):
117-
with self.path.open("w") as f:
118-
json.dump(self.metrics, f, indent=2)
119-
120-
def pytest_terminal_summary(self, terminalreporter):
121-
terminalreporter.write_sep("-", f"Generated openEO metrics report: {self.path}")
122-
123-
12444
def _get_client_credentials_env_var(url: str) -> str:
12545
"""
12646
Get client credentials env var name for a given backend URL.

qa/benchmarks/tests/test_dummy.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@
33
"""
44

55

6-
def test_dummy(openeo_metric):
6+
def test_dummy(test_metric):
77
x = 3
88
y = 5
9-
openeo_metric("x", x)
10-
openeo_metric("y", y)
9+
test_metric("x squared", x * x)
10+
test_metric("y", y)
1111
assert x + y == 7
1212

1313

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
"""
2+
Pytest plugin to record test/benchmark metrics to a JSON file.
3+
4+
5+
Usage:
6+
7+
- Enable the plugin in `conftest.py`:
8+
9+
```python
10+
pytest_plugins = [
11+
"apex_algorithm_qa_tools.test_metrics",
12+
]
13+
```
14+
15+
- Use the `test_metric` fixture to record test metrics:
16+
17+
```python
18+
def test_dummy(test_metric):
19+
x = 3
20+
test_metric("x squared", x*x)
21+
...
22+
23+
- Run the tests with `--test-metrics=path/to/metrics.json`
24+
to store metrics in a JSON file
25+
"""
26+
27+
import json
28+
from pathlib import Path
29+
from typing import Any, Callable, List, Tuple, Union
30+
31+
import pytest
32+
33+
_TEST_METRICS_PATH = "test_metrics_path"
34+
_TEST_METRICS_REPORTER = "test_metrics_reporter"
35+
36+
37+
def pytest_addoption(parser):
38+
parser.addoption(
39+
"--test-metrics",
40+
metavar="PATH",
41+
action="store",
42+
dest=_TEST_METRICS_PATH,
43+
default=None,
44+
help="Path to JSON file to store test/benchmark metrics.",
45+
)
46+
47+
48+
def pytest_configure(config):
49+
test_metrics_path = config.getoption(_TEST_METRICS_PATH)
50+
if (
51+
test_metrics_path
52+
# Don't register on xdist worker nodes
53+
and not hasattr(config, "workerinput")
54+
):
55+
config.pluginmanager.register(
56+
MetricsReporter(path=test_metrics_path),
57+
name=_TEST_METRICS_REPORTER,
58+
)
59+
60+
61+
def pytest_unconfigure(config):
62+
if config.pluginmanager.hasplugin(_TEST_METRICS_REPORTER):
63+
config.pluginmanager.unregister(name=_TEST_METRICS_REPORTER)
64+
65+
66+
class MetricsReporter:
67+
def __init__(
68+
self, path: Union[str, Path], user_properties_key: str = "test_metrics"
69+
):
70+
self.path = Path(path)
71+
self.metrics: List[dict] = []
72+
self.user_properties_key = user_properties_key
73+
74+
def pytest_runtest_logreport(self, report: pytest.TestReport):
75+
if report.when == "call":
76+
self.metrics.append(
77+
{
78+
"nodeid": report.nodeid,
79+
"report": {
80+
"outcome": report.outcome,
81+
"duration": report.duration,
82+
"start": report.start,
83+
"stop": report.stop,
84+
},
85+
"metrics": self.get_test_metrics(report.user_properties),
86+
}
87+
)
88+
89+
def pytest_sessionfinish(self, session):
90+
with self.path.open("w", encoding="utf8") as f:
91+
json.dump(self.metrics, f, indent=2)
92+
93+
def pytest_terminal_summary(self, terminalreporter):
94+
terminalreporter.write_sep("-", f"Generated test metrics report: {self.path}")
95+
96+
def get_test_metrics(
97+
self, user_properties: List[Tuple[str, Any]]
98+
) -> List[Tuple[str, Any]]:
99+
"""
100+
Extract existing test metrics items from user properties
101+
or create new one.
102+
"""
103+
for name, value in user_properties:
104+
if name == self.user_properties_key:
105+
return value
106+
# Not found: create it
107+
metrics = []
108+
user_properties.append((self.user_properties_key, metrics))
109+
return metrics
110+
111+
112+
@pytest.fixture
113+
def test_metric(
114+
pytestconfig: pytest.Config, request: pytest.FixtureRequest
115+
) -> Callable[[str, Any], None]:
116+
"""
117+
Fixture to record a test metrics during openEO tests/benchmarks,
118+
which will be stored in the pytest node's "user_properties".
119+
"""
120+
121+
reporter = pytestconfig.pluginmanager.get_plugin(_TEST_METRICS_REPORTER)
122+
123+
def append(name: str, value: Any):
124+
reporter.get_test_metrics(request.node.user_properties).append((name, value))
125+
126+
return append

0 commit comments

Comments
 (0)