Skip to content

Commit 10d431a

Browse files
committed
Issue #5/#7 initial pytest plugin to collect metrics and dump to JSON
1 parent 56dbad6 commit 10d431a

File tree

2 files changed

+88
-2
lines changed

2 files changed

+88
-2
lines changed

qa/benchmarks/tests/conftest.py

Lines changed: 85 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1+
import json
12
import logging
23
import os
34
import random
4-
from typing import Callable
5+
from pathlib import Path
6+
from typing import Any, Callable, List, Tuple, Union
57

68
import openeo
79
import pytest
@@ -20,6 +22,33 @@ def pytest_addoption(parser):
2022
type=int,
2123
help="Only run random selected subset benchmarks.",
2224
)
25+
parser.addoption(
26+
"--openeo-metrics",
27+
metavar="path",
28+
action="store",
29+
dest="openeo_metrics_path",
30+
default=None,
31+
help="File to store openEO metrics.",
32+
)
33+
34+
35+
def pytest_configure(config):
36+
openeo_metrics_path = config.getoption("openeo_metrics_path")
37+
if (
38+
openeo_metrics_path
39+
# Don't register on xdist worker nodes
40+
and not hasattr(config, "workerinput")
41+
):
42+
config.pluginmanager.register(
43+
# TODO: create config for this path
44+
OpeneoMetricReporter(openeo_metrics_path),
45+
name="openeo_metrics_reporter",
46+
)
47+
48+
49+
def pytest_unconfigure(config):
50+
if config.pluginmanager.hasplugin("openeo_metrics_report"):
51+
config.pluginmanager.unregister(name="openeo_metrics_reporter")
2352

2453

2554
def pytest_collection_modifyitems(session, config, items):
@@ -37,6 +66,61 @@ def pytest_collection_modifyitems(session, config, items):
3766
items[:] = random.sample(items, k=subset_size)
3867

3968

69+
@pytest.fixture
70+
def openeo_metric(request: pytest.FixtureRequest) -> Callable[[str, Any], None]:
71+
"""
72+
Fixture to record openEO metrics during openEO tests/benchmarks,
73+
which will be stored in the pytest node's "user_properties".
74+
75+
Collect and export these metrics with OpeneoMetricReporter.
76+
"""
77+
78+
def append(name: str, value: Any):
79+
_get_openeo_metrics(request.node.user_properties).append((name, value))
80+
81+
return append
82+
83+
84+
def _get_openeo_metrics(user_properties: List[Tuple[str, Any]]) -> List:
85+
for name, value in user_properties:
86+
if name == OpeneoMetricReporter.USER_PROPERTY_KEY:
87+
return value
88+
# Not found: create it
89+
metrics = []
90+
user_properties.append((OpeneoMetricReporter.USER_PROPERTY_KEY, metrics))
91+
return metrics
92+
93+
94+
class OpeneoMetricReporter:
95+
# TODO: isolate all this openeo_metrics stuff to proper plugin
96+
USER_PROPERTY_KEY = "openeo_metrics"
97+
98+
def __init__(self, path: Union[str, Path]):
99+
self.path = Path(path)
100+
self.metrics = []
101+
102+
def pytest_runtest_logreport(self, report: pytest.TestReport):
103+
if report.when == "call":
104+
self.metrics.append(
105+
{
106+
"nodeid": report.nodeid,
107+
"outcome": report.outcome,
108+
"openeo_metrics": _get_openeo_metrics(report.user_properties),
109+
"duration": report.duration,
110+
"start": report.start,
111+
"stop": report.stop,
112+
"longrepr": repr(report.longrepr),
113+
}
114+
)
115+
116+
def pytest_sessionfinish(self, session):
117+
with self.path.open("w") as f:
118+
json.dump(self.metrics, f, indent=2)
119+
120+
def pytest_terminal_summary(self, terminalreporter):
121+
terminalreporter.write_sep("-", f"Generated openEO metrics report: {self.path}")
122+
123+
40124
def _get_client_credentials_env_var(url: str) -> str:
41125
"""
42126
Get client credentials env var name for a given backend URL.

qa/benchmarks/tests/test_dummy.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,11 @@
33
"""
44

55

6-
def test_dummy():
6+
def test_dummy(openeo_metric):
77
x = 3
88
y = 5
9+
openeo_metric("x", x)
10+
openeo_metric("y", y)
911
assert x + y == 7
1012

1113

0 commit comments

Comments
 (0)