Skip to content

Commit 3af8204

Browse files
committed
feat: add telemetry export support with multiple backends
- Add support for OpenTelemetry (OTLP), Datadog, and Sentry exporters - Enable telemetry-only mode (project_id now optional) - Implement distributed tracing with W3C trace context propagation - Add TelemetryManager for coordinating multiple exporters - Support parallel export to multiple telemetry backends - Add requests dependency for HTTP-based exporters
1 parent 6dda36d commit 3af8204

File tree

10 files changed

+1282
-30
lines changed

10 files changed

+1282
-30
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ dependencies = [
2121
"mcp>=1.2.0",
2222
"mcpcat-api==0.1.4",
2323
"pydantic>=2.0.0",
24+
"requests>=2.31.0",
2425
]
2526

2627
[project.urls]

src/mcpcat/__init__.py

Lines changed: 46 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,33 @@
2121
)
2222

2323

24-
def track(server: Any, project_id: str, options: MCPCatOptions | None = None) -> Any:
24+
def track(server: Any, project_id: str | None = None, options: MCPCatOptions | None = None) -> Any:
25+
"""
26+
Initialize MCPCat tracking with optional telemetry export.
27+
28+
Args:
29+
server: MCP server instance to track
30+
project_id: MCPCat project ID (optional if using telemetry-only mode)
31+
options: Configuration options including telemetry exporters
32+
33+
Returns:
34+
The server instance with tracking enabled
35+
36+
Raises:
37+
ValueError: If neither project_id nor exporters are provided
38+
TypeError: If server is not a compatible MCP server instance
39+
"""
2540
# Use default options if not provided
2641
if options is None:
2742
options = MCPCatOptions()
2843

44+
# Validate configuration
45+
if not project_id and not options.exporters:
46+
raise ValueError(
47+
"Either project_id or exporters must be provided. "
48+
"Use project_id for MCPCat, exporters for telemetry-only mode, or both."
49+
)
50+
2951
# Validate server compatibility
3052
if not is_compatible_server(server):
3153
raise TypeError(
@@ -37,12 +59,25 @@ def track(server: Any, project_id: str, options: MCPCatOptions | None = None) ->
3759
if is_fastmcp:
3860
lowlevel_server = server._mcp_server
3961

62+
# Initialize telemetry if exporters configured
63+
if options.exporters:
64+
from mcpcat.modules.telemetry import TelemetryManager
65+
from mcpcat.modules.event_queue import event_queue, set_telemetry_manager
66+
67+
# Share the event queue's executor for consistency
68+
telemetry_manager = TelemetryManager(
69+
options.exporters,
70+
event_queue.executor
71+
)
72+
set_telemetry_manager(telemetry_manager)
73+
write_to_log(f"Telemetry initialized with {len(options.exporters)} exporter(s)")
74+
4075
# Create and store tracking data
4176
session_id = new_session_id()
4277
session_info = get_session_info(lowlevel_server)
4378
data = MCPCatData(
4479
session_id=session_id,
45-
project_id=project_id,
80+
project_id=project_id or "", # Use empty string if None for compatibility
4681
last_activity=datetime.now(timezone.utc),
4782
session_info=session_info,
4883
identified_sessions=dict(),
@@ -53,13 +88,12 @@ def track(server: Any, project_id: str, options: MCPCatOptions | None = None) ->
5388
try:
5489
# Always initialize dynamic tracking for complete tool coverage
5590
from mcpcat.modules.overrides.monkey_patch import apply_monkey_patches
56-
91+
5792
# Initialize the dynamic tracking system by setting the flag
5893
if not data.tracker_initialized:
5994
data.tracker_initialized = True
60-
from mcpcat.modules.logging import write_to_log
6195
write_to_log(f"Dynamic tracking initialized for server {id(lowlevel_server)}")
62-
96+
6397
# Apply appropriate tracking method based on server type
6498
if is_fastmcp:
6599
# For FastMCP servers, use monkey-patching for tool tracking
@@ -70,12 +104,15 @@ def track(server: Any, project_id: str, options: MCPCatOptions | None = None) ->
70104
else:
71105
# For low-level servers, use the traditional overrides (no monkey patching needed)
72106
override_lowlevel_mcp_server(lowlevel_server, data)
73-
74-
write_to_log(f"MCPCat initialized with dynamic tracking for session {session_id} on project {project_id}")
75-
107+
108+
if project_id:
109+
write_to_log(f"MCPCat initialized with dynamic tracking for session {session_id} on project {project_id}")
110+
else:
111+
write_to_log(f"MCPCat initialized in telemetry-only mode for session {session_id}")
112+
76113
except Exception as e:
77114
write_to_log(f"Error initializing MCPCat: {e}")
78-
115+
79116
return server
80117

81118
__all__ = [

src/mcpcat/modules/event_queue.py

Lines changed: 50 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@
88
import time
99
from datetime import datetime, timezone
1010
from concurrent.futures import ThreadPoolExecutor
11-
from typing import Any
11+
from typing import Any, Optional, TYPE_CHECKING
12+
13+
if TYPE_CHECKING:
14+
from .telemetry import TelemetryManager
1215

1316
from mcpcat_api import ApiClient, Configuration, EventsApi
1417
from mcpcat.modules.constants import EVENT_ID_PREFIX, MCPCAT_API_URL
@@ -30,21 +33,21 @@ def __init__(self, api_client=None):
3033
self.max_retries = 3
3134
self.max_queue_size = 10000 # Prevent unbounded growth
3235
self.concurrency = 5 # Max parallel requests
33-
36+
3437
# Allow injection of api_client for testing
3538
if api_client is None:
3639
config = Configuration(host=MCPCAT_API_URL)
3740
api_client_instance = ApiClient(configuration=config)
3841
self.api_client = EventsApi(api_client=api_client_instance)
3942
else:
4043
self.api_client = api_client
41-
44+
4245
self._shutdown = False
4346
self._shutdown_event = threading.Event()
44-
47+
4548
# Thread pool for processing events
4649
self.executor = ThreadPoolExecutor(max_workers=self.concurrency)
47-
50+
4851
# Start worker thread
4952
self.worker_thread = threading.Thread(target=self._worker, daemon=True)
5053
self.worker_thread.start()
@@ -68,7 +71,7 @@ def _worker(self) -> None:
6871
try:
6972
# Wait for an event with timeout
7073
event = self.queue.get(timeout=0.1)
71-
74+
7275
# Submit event processing to thread pool
7376
# The executor will queue it if all workers are busy
7477
try:
@@ -80,7 +83,7 @@ def _worker(self) -> None:
8083
self.queue.put_nowait(event)
8184
except queue.Full:
8285
write_to_log(f"Could not requeue event {event.id or 'unknown'} - queue full")
83-
86+
8487
except queue.Empty:
8588
continue
8689
except Exception as e:
@@ -105,7 +108,20 @@ def _process_event(self, event: UnredactedEvent) -> None:
105108

106109
if event:
107110
event.id = event.id or generate_prefixed_ksuid("evt")
108-
self._send_event(event)
111+
112+
# Export to telemetry backends if configured (non-blocking)
113+
if _telemetry_manager:
114+
try:
115+
_telemetry_manager.export(event)
116+
except Exception as e:
117+
write_to_log(f"Telemetry export submission failed: {e}")
118+
119+
# Send to MCPCat API only if project_id exists
120+
if event.project_id:
121+
self._send_event(event)
122+
elif not _telemetry_manager:
123+
# Only warn if we have neither MCPCat nor telemetry configured
124+
write_to_log("Warning: Event has no project_id and no telemetry exporters configured")
109125

110126
def _send_event(self, event: Event, retries: int = 0) -> None:
111127
"""Send event to API."""
@@ -164,22 +180,39 @@ def destroy(self) -> None:
164180
write_to_log(f"Shutdown complete. {remaining} events were not processed.")
165181

166182

183+
# Global telemetry manager instance (optional)
184+
_telemetry_manager: Optional['TelemetryManager'] = None
185+
186+
187+
def set_telemetry_manager(manager: Optional['TelemetryManager']) -> None:
188+
"""
189+
Set the global telemetry manager instance.
190+
191+
Args:
192+
manager: TelemetryManager instance or None to disable telemetry
193+
"""
194+
global _telemetry_manager
195+
_telemetry_manager = manager
196+
if manager:
197+
write_to_log(f"Telemetry manager set with {manager.get_exporter_count()} exporter(s)")
198+
199+
167200
# Global event queue instance
168201
event_queue = EventQueue()
169202

170203

171204
def _shutdown_handler(signum, frame):
172205
"""Handle shutdown signals."""
173-
206+
174207
write_to_log("Received shutdown signal, gracefully shutting down...")
175-
208+
176209
# Reset signal handlers to default behavior to avoid recursive calls
177210
signal.signal(signal.SIGINT, signal.SIG_DFL)
178211
signal.signal(signal.SIGTERM, signal.SIG_DFL)
179-
212+
180213
# Perform graceful shutdown
181214
event_queue.destroy()
182-
215+
183216
# Force exit after graceful shutdown
184217
os._exit(0)
185218

@@ -215,15 +248,16 @@ def publish_event(server: Any, event: UnredactedEvent) -> None:
215248
session_info = get_session_info(server, data)
216249

217250
# Create full event with all required fields
218-
# Merge event data with session info
251+
# Merge event data with session info
219252
event_data = event.model_dump(exclude_none=True)
220253
session_data = session_info.model_dump(exclude_none=True)
221-
254+
255+
# Merge data, ensuring project_id from data takes precedence
222256
merged_data = {**event_data, **session_data}
223-
257+
merged_data['project_id'] = data.project_id # Override with tracking data's project_id
258+
224259
full_event = UnredactedEvent(
225260
**merged_data,
226-
project_id=data.project_id,
227261
redaction_fn=data.options.redact_sensitive_information,
228262
)
229263

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
"""Base exporter interface for telemetry exporters."""
2+
3+
from abc import ABC, abstractmethod
4+
from ...types import Event
5+
6+
7+
class Exporter(ABC):
8+
"""Abstract base class for telemetry exporters."""
9+
10+
@abstractmethod
11+
def export(self, event: Event) -> None:
12+
"""
13+
Export an event to the telemetry backend.
14+
15+
Args:
16+
event: The MCPCat event to export
17+
18+
Note:
19+
This method should handle all errors internally and never
20+
raise exceptions that could affect the main MCP server.
21+
"""
22+
pass

0 commit comments

Comments
 (0)