|
| 1 | +""" |
| 2 | +Pytest plugin to track test/benchmark metrics and report them with a JSON file. |
| 3 | +
|
| 4 | +
|
| 5 | +Usage: |
| 6 | +
|
| 7 | +- Enable the plugin in `conftest.py`: |
| 8 | +
|
| 9 | + ```python |
| 10 | + pytest_plugins = [ |
| 11 | + "apex_algorithm_qa_tools.pytest_track_metrics", |
| 12 | + ] |
| 13 | + ``` |
| 14 | +
|
| 15 | +- Use the `track_metric` fixture to record metrics during tests: |
| 16 | +
|
| 17 | + ```python |
| 18 | + def test_dummy(track_metric): |
| 19 | + x = 3 |
| 20 | + track_metric("x squared", x*x) |
| 21 | + ... |
| 22 | +
|
| 23 | +- Run the tests with `--track-metrics-report=path/to/metrics.json` |
| 24 | + to store metrics in a JSON file |
| 25 | +""" |
| 26 | + |
| 27 | +import json |
| 28 | +import warnings |
| 29 | +from pathlib import Path |
| 30 | +from typing import Any, Callable, List, Tuple, Union |
| 31 | + |
| 32 | +import pytest |
| 33 | + |
| 34 | +_TRACK_METRICS_PATH = "track_metrics_path" |
| 35 | +_TRACK_METRICS_NAME = "track_metrics" |
| 36 | + |
| 37 | + |
| 38 | +def pytest_addoption(parser): |
| 39 | + parser.addoption( |
| 40 | + "--track-metrics-report", |
| 41 | + metavar="PATH", |
| 42 | + action="store", |
| 43 | + dest=_TRACK_METRICS_PATH, |
| 44 | + default=None, |
| 45 | + help="Path to JSON file to store test/benchmark metrics.", |
| 46 | + ) |
| 47 | + |
| 48 | + |
| 49 | +def pytest_configure(config): |
| 50 | + track_metrics_path = config.getoption(_TRACK_METRICS_PATH) |
| 51 | + if ( |
| 52 | + track_metrics_path |
| 53 | + # Don't register on xdist worker nodes |
| 54 | + and not hasattr(config, "workerinput") |
| 55 | + ): |
| 56 | + config.pluginmanager.register( |
| 57 | + TrackMetricsReporter(path=track_metrics_path), |
| 58 | + name=_TRACK_METRICS_NAME, |
| 59 | + ) |
| 60 | + |
| 61 | + |
| 62 | +def pytest_unconfigure(config): |
| 63 | + if config.pluginmanager.hasplugin(_TRACK_METRICS_NAME): |
| 64 | + config.pluginmanager.unregister(name=_TRACK_METRICS_NAME) |
| 65 | + |
| 66 | + |
| 67 | +class TrackMetricsReporter: |
| 68 | + def __init__( |
| 69 | + self, path: Union[str, Path], user_properties_key: str = "track_metrics" |
| 70 | + ): |
| 71 | + self.path = Path(path) |
| 72 | + self.metrics: List[dict] = [] |
| 73 | + self.user_properties_key = user_properties_key |
| 74 | + |
| 75 | + def pytest_runtest_logreport(self, report: pytest.TestReport): |
| 76 | + if report.when == "call": |
| 77 | + self.metrics.append( |
| 78 | + { |
| 79 | + "nodeid": report.nodeid, |
| 80 | + "report": { |
| 81 | + "outcome": report.outcome, |
| 82 | + "duration": report.duration, |
| 83 | + "start": report.start, |
| 84 | + "stop": report.stop, |
| 85 | + }, |
| 86 | + "metrics": self.get_metrics(report.user_properties), |
| 87 | + } |
| 88 | + ) |
| 89 | + |
| 90 | + def pytest_sessionfinish(self, session): |
| 91 | + with self.path.open("w", encoding="utf8") as f: |
| 92 | + json.dump(self.metrics, f, indent=2) |
| 93 | + |
| 94 | + def pytest_terminal_summary(self, terminalreporter): |
| 95 | + terminalreporter.write_sep("-", f"Generated track_metrics report: {self.path}") |
| 96 | + |
| 97 | + def get_metrics( |
| 98 | + self, user_properties: List[Tuple[str, Any]] |
| 99 | + ) -> List[Tuple[str, Any]]: |
| 100 | + """ |
| 101 | + Extract existing test metrics items from user properties |
| 102 | + or create new one. |
| 103 | + """ |
| 104 | + for name, value in user_properties: |
| 105 | + if name == self.user_properties_key: |
| 106 | + return value |
| 107 | + # Not found: create it |
| 108 | + metrics = [] |
| 109 | + user_properties.append((self.user_properties_key, metrics)) |
| 110 | + return metrics |
| 111 | + |
| 112 | + |
| 113 | +@pytest.fixture |
| 114 | +def track_metric( |
| 115 | + pytestconfig: pytest.Config, request: pytest.FixtureRequest |
| 116 | +) -> Callable[[str, Any], None]: |
| 117 | + """ |
| 118 | + Fixture to record a metric during tests/benchmarks, |
| 119 | + which will be stored in the pytest node's "user_properties". |
| 120 | +
|
| 121 | + Returns a callable that expects a metric name and value |
| 122 | + """ |
| 123 | + |
| 124 | + reporter: Union[TrackMetricsReporter, None] = pytestconfig.pluginmanager.get_plugin( |
| 125 | + _TRACK_METRICS_NAME |
| 126 | + ) |
| 127 | + |
| 128 | + if reporter: |
| 129 | + |
| 130 | + def append(name: str, value: Any): |
| 131 | + reporter.get_metrics(request.node.user_properties).append((name, value)) |
| 132 | + else: |
| 133 | + warnings.warn( |
| 134 | + "The `track_metric` fixture is requested, but no output file is defined (e.g. with `--metrics-tracker-report=path/to/metrics.json`." |
| 135 | + ) |
| 136 | + |
| 137 | + def append(name: str, value: Any): |
| 138 | + pass |
| 139 | + |
| 140 | + return append |
0 commit comments