1
+ import json
1
2
import logging
2
3
import os
3
4
import random
4
- from typing import Callable
5
+ from pathlib import Path
6
+ from typing import Any , Callable , List , Tuple , Union
5
7
6
8
import openeo
7
9
import pytest
@@ -20,6 +22,33 @@ def pytest_addoption(parser):
20
22
type = int ,
21
23
help = "Only run random selected subset benchmarks." ,
22
24
)
25
+ parser .addoption (
26
+ "--openeo-metrics" ,
27
+ metavar = "path" ,
28
+ action = "store" ,
29
+ dest = "openeo_metrics_path" ,
30
+ default = None ,
31
+ help = "File to store openEO metrics." ,
32
+ )
33
+
34
+
35
+ def pytest_configure (config ):
36
+ openeo_metrics_path = config .getoption ("openeo_metrics_path" )
37
+ if (
38
+ openeo_metrics_path
39
+ # Don't register on xdist worker nodes
40
+ and not hasattr (config , "workerinput" )
41
+ ):
42
+ config .pluginmanager .register (
43
+ # TODO: create config for this path
44
+ OpeneoMetricReporter (openeo_metrics_path ),
45
+ name = "openeo_metrics_reporter" ,
46
+ )
47
+
48
+
49
+ def pytest_unconfigure (config ):
50
+ if config .pluginmanager .hasplugin ("openeo_metrics_report" ):
51
+ config .pluginmanager .unregister (name = "openeo_metrics_reporter" )
23
52
24
53
25
54
def pytest_collection_modifyitems (session , config , items ):
@@ -37,6 +66,61 @@ def pytest_collection_modifyitems(session, config, items):
37
66
items [:] = random .sample (items , k = subset_size )
38
67
39
68
69
+ @pytest .fixture
70
+ def openeo_metric (request : pytest .FixtureRequest ) -> Callable [[str , Any ], None ]:
71
+ """
72
+ Fixture to record openEO metrics during openEO tests/benchmarks,
73
+ which will be stored in the pytest node's "user_properties".
74
+
75
+ Collect and export these metrics with OpeneoMetricReporter.
76
+ """
77
+
78
+ def append (name : str , value : Any ):
79
+ _get_openeo_metrics (request .node .user_properties ).append ((name , value ))
80
+
81
+ return append
82
+
83
+
84
+ def _get_openeo_metrics (user_properties : List [Tuple [str , Any ]]) -> List :
85
+ for name , value in user_properties :
86
+ if name == OpeneoMetricReporter .USER_PROPERTY_KEY :
87
+ return value
88
+ # Not found: create it
89
+ metrics = []
90
+ user_properties .append ((OpeneoMetricReporter .USER_PROPERTY_KEY , metrics ))
91
+ return metrics
92
+
93
+
94
+ class OpeneoMetricReporter :
95
+ # TODO: isolate all this openeo_metrics stuff to proper plugin
96
+ USER_PROPERTY_KEY = "openeo_metrics"
97
+
98
+ def __init__ (self , path : Union [str , Path ]):
99
+ self .path = Path (path )
100
+ self .metrics = []
101
+
102
+ def pytest_runtest_logreport (self , report : pytest .TestReport ):
103
+ if report .when == "call" :
104
+ self .metrics .append (
105
+ {
106
+ "nodeid" : report .nodeid ,
107
+ "outcome" : report .outcome ,
108
+ "openeo_metrics" : _get_openeo_metrics (report .user_properties ),
109
+ "duration" : report .duration ,
110
+ "start" : report .start ,
111
+ "stop" : report .stop ,
112
+ "longrepr" : repr (report .longrepr ),
113
+ }
114
+ )
115
+
116
+ def pytest_sessionfinish (self , session ):
117
+ with self .path .open ("w" ) as f :
118
+ json .dump (self .metrics , f , indent = 2 )
119
+
120
+ def pytest_terminal_summary (self , terminalreporter ):
121
+ terminalreporter .write_sep ("-" , f"Generated openEO metrics report: { self .path } " )
122
+
123
+
40
124
def _get_client_credentials_env_var (url : str ) -> str :
41
125
"""
42
126
Get client credentials env var name for a given backend URL.
0 commit comments