Skip to content

Commit f2224f2

Browse files
committed
Added endpoint tests
1 parent bfbcbd9 commit f2224f2

File tree

1 file changed

+97
-0
lines changed

1 file changed

+97
-0
lines changed

backend/tests/unit/endpoints/test_pipelines.py

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Copyright (C) 2025 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
33

4+
from datetime import UTC, datetime
45
from unittest.mock import MagicMock
56
from uuid import uuid4
67

@@ -11,6 +12,7 @@
1112
from app.api.dependencies import get_pipeline_service
1213
from app.main import app
1314
from app.schemas import Pipeline, PipelineStatus
15+
from app.schemas.metrics import InferenceMetrics, LatencyMetrics, PipelineMetrics, TimeWindow
1416
from app.services import (
1517
PipelineService,
1618
ResourceAlreadyExistsError,
@@ -199,3 +201,98 @@ def test_delete_pipeline_in_use(self, fxt_pipeline, fxt_pipeline_service, fxt_cl
199201

200202
assert response.status_code == status.HTTP_409_CONFLICT
201203
assert str(err) == response.json()["detail"]
204+
205+
def test_get_pipeline_metrics_success(self, fxt_pipeline, fxt_pipeline_service, fxt_client):
206+
"""Test successful retrieval of pipeline metrics with default time window."""
207+
mock_metrics = PipelineMetrics(
208+
time_window=TimeWindow(start=datetime.now(UTC), end=datetime.now(UTC), time_window=60),
209+
inference=InferenceMetrics(
210+
latency=LatencyMetrics(avg_ms=100.5, min_ms=50.0, max_ms=200.0, p95_ms=180.0, latest_ms=120.0)
211+
),
212+
)
213+
fxt_pipeline_service.get_pipeline_metrics.return_value = mock_metrics
214+
215+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics")
216+
217+
assert response.status_code == status.HTTP_200_OK
218+
fxt_pipeline_service.get_pipeline_metrics.assert_called_once_with(fxt_pipeline.id, 60)
219+
220+
def test_get_pipeline_metrics_invalid_pipeline_id(self, fxt_pipeline_service, fxt_client):
221+
"""Test metrics endpoint with invalid pipeline ID format."""
222+
response = fxt_client.get("/api/pipelines/invalid-id/metrics")
223+
224+
assert response.status_code == status.HTTP_400_BAD_REQUEST
225+
fxt_pipeline_service.get_pipeline_metrics.assert_not_called()
226+
227+
def test_get_pipeline_metrics_pipeline_not_found(self, fxt_pipeline, fxt_pipeline_service, fxt_client):
228+
"""Test metrics endpoint when pipeline doesn't exist."""
229+
fxt_pipeline_service.get_pipeline_metrics.side_effect = ResourceNotFoundError(
230+
ResourceType.PIPELINE, str(fxt_pipeline.id)
231+
)
232+
233+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics")
234+
235+
assert response.status_code == status.HTTP_404_NOT_FOUND
236+
fxt_pipeline_service.get_pipeline_metrics.assert_called_once_with(fxt_pipeline.id, 60)
237+
238+
def test_get_pipeline_metrics_pipeline_not_running(self, fxt_pipeline, fxt_pipeline_service, fxt_client):
239+
"""Test metrics endpoint when pipeline is not in running state."""
240+
fxt_pipeline_service.get_pipeline_metrics.side_effect = ValueError(
241+
"Cannot get metrics for a pipeline that is not running."
242+
)
243+
244+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics")
245+
246+
assert response.status_code == status.HTTP_400_BAD_REQUEST
247+
assert "Cannot get metrics for a pipeline that is not running" in response.json()["detail"]
248+
fxt_pipeline_service.get_pipeline_metrics.assert_called_once_with(fxt_pipeline.id, 60)
249+
250+
@pytest.mark.parametrize("invalid_time_window", [0, -1, 3601, 7200])
251+
def test_get_pipeline_metrics_invalid_time_window(
252+
self, invalid_time_window, fxt_pipeline, fxt_pipeline_service, fxt_client
253+
):
254+
"""Test metrics endpoint with invalid time window values."""
255+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics?time_window={invalid_time_window}")
256+
257+
assert response.status_code == status.HTTP_400_BAD_REQUEST
258+
assert "Duration must be between 1 and 3600 seconds" in response.json()["detail"]
259+
fxt_pipeline_service.get_pipeline_metrics.assert_not_called()
260+
261+
@pytest.mark.parametrize("valid_time_window", [1, 30, 300, 1800, 3600])
262+
def test_get_pipeline_metrics_valid_time_windows(
263+
self, valid_time_window, fxt_pipeline, fxt_pipeline_service, fxt_client
264+
):
265+
"""Test metrics endpoint with various valid time window values."""
266+
mock_metrics = PipelineMetrics(
267+
time_window=TimeWindow(start=datetime.now(UTC), end=datetime.now(UTC), time_window=valid_time_window),
268+
inference=InferenceMetrics(
269+
latency=LatencyMetrics(avg_ms=100.0, min_ms=50.0, max_ms=200.0, p95_ms=180.0, latest_ms=120.0)
270+
),
271+
)
272+
fxt_pipeline_service.get_pipeline_metrics.return_value = mock_metrics
273+
274+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics?time_window={valid_time_window}")
275+
276+
assert response.status_code == status.HTTP_200_OK
277+
fxt_pipeline_service.get_pipeline_metrics.assert_called_once_with(fxt_pipeline.id, valid_time_window)
278+
279+
def test_get_pipeline_metrics_no_data_available(self, fxt_pipeline, fxt_pipeline_service, fxt_client):
280+
"""Test metrics endpoint when no latency data is available."""
281+
mock_metrics = PipelineMetrics(
282+
time_window=TimeWindow(start=datetime.now(UTC), end=datetime.now(UTC), time_window=60),
283+
inference=InferenceMetrics(
284+
latency=LatencyMetrics(avg_ms=None, min_ms=None, max_ms=None, p95_ms=None, latest_ms=None)
285+
),
286+
)
287+
fxt_pipeline_service.get_pipeline_metrics.return_value = mock_metrics
288+
289+
response = fxt_client.get(f"/api/pipelines/{str(fxt_pipeline.id)}/metrics")
290+
291+
assert response.status_code == status.HTTP_200_OK
292+
response_data = response.json()
293+
assert response_data["inference"]["latency"]["avg_ms"] is None
294+
assert response_data["inference"]["latency"]["min_ms"] is None
295+
assert response_data["inference"]["latency"]["max_ms"] is None
296+
assert response_data["inference"]["latency"]["p95_ms"] is None
297+
assert response_data["inference"]["latency"]["latest_ms"] is None
298+
fxt_pipeline_service.get_pipeline_metrics.assert_called_once_with(fxt_pipeline.id, 60)

0 commit comments

Comments
 (0)