Skip to content

Commit f3e74fa

Browse files
authored
#667 - fixes emtpy content handling (#668)
## Summary Improve OpenTelemetry plugin stability and observability configuration. Closes #667 ## Changes - Fixed a potential nil pointer dereference in the chat request parameter conversion by adding a nil check for message content - Enhanced Tempo configuration to use a multiplexed HTTP/gRPC port (3200) for better compatibility - Improved Grafana configuration with additional trace visualization options: - Added tracesToLogs, serviceMap, and search configurations - Enabled nodeGraph visualization - Reduced Grafana log verbosity by setting log level to "warn" - Disabled feature toggles in Grafana for more stable behavior ## Type of change - [x] Bug fix - [x] Feature - [ ] Refactor - [ ] Documentation - [ ] Chore/CI ## Affected areas - [ ] Core (Go) - [ ] Transports (HTTP) - [ ] Providers/Integrations - [x] Plugins - [ ] UI (Next.js) - [ ] Docs ## How to test 1. Start the OpenTelemetry stack with docker-compose: ```sh cd plugins/otel docker-compose up -d ``` 2. Access Grafana at http://localhost:4000 and verify: - Reduced log output in the Grafana container - Improved trace visualization options in the Tempo data source - Ability to navigate between traces, logs, and metrics 3. Test the nil pointer fix by sending a chat request with a null content field ## Breaking changes - [ ] Yes - [x] No ## Related issues Fixes potential nil pointer panic in OpenTelemetry trace collection ## Security considerations No security implications. ## Checklist - [x] I read `docs/contributing/README.md` and followed the guidelines - [x] I added/updated tests where appropriate - [x] I updated documentation where needed - [x] I verified builds succeed (Go and UI) - [x] I verified the CI pipeline passes locally if applicable
2 parents 733a99f + f641d74 commit f3e74fa

File tree

2 files changed

+55
-33
lines changed

2 files changed

+55
-33
lines changed

plugins/otel/converter.go

Lines changed: 41 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -75,23 +75,27 @@ func hexToBytes(hexStr string, length int) []byte {
7575
func getSpeechRequestParams(req *schemas.BifrostSpeechRequest) []*KeyValue {
7676
params := []*KeyValue{}
7777
if req.Params != nil {
78-
if req.Params.VoiceConfig.Voice != nil {
79-
params = append(params, kvStr("gen_ai.request.voice", *req.Params.VoiceConfig.Voice))
80-
}
81-
if len(req.Params.VoiceConfig.MultiVoiceConfig) > 0 {
82-
multiVoiceConfigParams := []*KeyValue{}
83-
for _, voiceConfig := range req.Params.VoiceConfig.MultiVoiceConfig {
84-
multiVoiceConfigParams = append(multiVoiceConfigParams, kvStr("gen_ai.request.voice", voiceConfig.Voice))
78+
if req.Params.VoiceConfig != nil {
79+
if req.Params.VoiceConfig.Voice != nil {
80+
params = append(params, kvStr("gen_ai.request.voice", *req.Params.VoiceConfig.Voice))
81+
}
82+
if len(req.Params.VoiceConfig.MultiVoiceConfig) > 0 {
83+
multiVoiceConfigParams := []*KeyValue{}
84+
for _, voiceConfig := range req.Params.VoiceConfig.MultiVoiceConfig {
85+
multiVoiceConfigParams = append(multiVoiceConfigParams, kvStr("gen_ai.request.voice", voiceConfig.Voice))
86+
}
87+
params = append(params, kvAny("gen_ai.request.multi_voice_config", arrValue(listValue(multiVoiceConfigParams...))))
8588
}
86-
params = append(params, kvAny("gen_ai.request.multi_voice_config", arrValue(listValue(multiVoiceConfigParams...))))
8789
}
8890
params = append(params, kvStr("gen_ai.request.instructions", req.Params.Instructions))
8991
params = append(params, kvStr("gen_ai.request.response_format", req.Params.ResponseFormat))
9092
if req.Params.Speed != nil {
9193
params = append(params, kvDbl("gen_ai.request.speed", *req.Params.Speed))
9294
}
9395
}
94-
params = append(params, kvStr("gen_ai.input.speech", req.Input.Input))
96+
if req.Input != nil {
97+
params = append(params, kvStr("gen_ai.input.speech", req.Input.Input))
98+
}
9599
return params
96100
}
97101

@@ -226,6 +230,9 @@ func getChatRequestParams(req *schemas.BifrostChatRequest) []*KeyValue {
226230
if req.Input != nil {
227231
messages := []*AnyValue{}
228232
for _, message := range req.Input {
233+
if message.Content == nil {
234+
continue
235+
}
229236
switch message.Role {
230237
case schemas.ChatMessageRoleUser:
231238
kvs := []*KeyValue{kvStr("role", "user")}
@@ -504,18 +511,20 @@ func completeResourceSpan(span *ResourceSpan, timestamp time.Time, resp *schemas
504511
continue
505512
}
506513
kvs := []*KeyValue{kvStr("role", string(*message.Role))}
507-
if message.Content.ContentStr != nil && *message.Content.ContentStr != "" {
508-
kvs = append(kvs, kvStr("content", *message.Content.ContentStr))
509-
} else if message.Content.ContentBlocks != nil {
510-
blockText := ""
511-
for _, block := range message.Content.ContentBlocks {
512-
if block.Text != nil {
513-
blockText += *block.Text
514+
if message.Content != nil {
515+
if message.Content.ContentStr != nil && *message.Content.ContentStr != "" {
516+
kvs = append(kvs, kvStr("content", *message.Content.ContentStr))
517+
} else if message.Content.ContentBlocks != nil {
518+
blockText := ""
519+
for _, block := range message.Content.ContentBlocks {
520+
if block.Text != nil {
521+
blockText += *block.Text
522+
}
514523
}
524+
kvs = append(kvs, kvStr("content", blockText))
515525
}
516-
kvs = append(kvs, kvStr("content", blockText))
517526
}
518-
if message.ResponsesReasoning != nil {
527+
if message.ResponsesReasoning != nil && message.ResponsesReasoning.Summary != nil {
519528
reasoningText := ""
520529
for _, block := range message.ResponsesReasoning.Summary {
521530
if block.Text != "" {
@@ -620,18 +629,20 @@ func completeResourceSpan(span *ResourceSpan, timestamp time.Time, resp *schemas
620629
kvs := []*KeyValue{kvStr("text", resp.TranscriptionResponse.Text)}
621630
outputMessages = append(outputMessages, listValue(kvs...))
622631
params = append(params, kvAny("gen_ai.transcribe.output_messages", arrValue(outputMessages...)))
623-
if resp.TranscriptionResponse.Usage.InputTokens != nil {
624-
params = append(params, kvInt("gen_ai.usage.input_tokens", int64(*resp.TranscriptionResponse.Usage.InputTokens)))
625-
}
626-
if resp.TranscriptionResponse.Usage.OutputTokens != nil {
627-
params = append(params, kvInt("gen_ai.usage.completion_tokens", int64(*resp.TranscriptionResponse.Usage.OutputTokens)))
628-
}
629-
if resp.TranscriptionResponse.Usage.TotalTokens != nil {
630-
params = append(params, kvInt("gen_ai.usage.total_tokens", int64(*resp.TranscriptionResponse.Usage.TotalTokens)))
631-
}
632-
if resp.TranscriptionResponse.Usage.InputTokenDetails != nil {
633-
params = append(params, kvInt("gen_ai.usage.input_token_details.text_tokens", int64(resp.TranscriptionResponse.Usage.InputTokenDetails.TextTokens)))
634-
params = append(params, kvInt("gen_ai.usage.input_token_details.audio_tokens", int64(resp.TranscriptionResponse.Usage.InputTokenDetails.AudioTokens)))
632+
if resp.TranscriptionResponse.Usage != nil {
633+
if resp.TranscriptionResponse.Usage.InputTokens != nil {
634+
params = append(params, kvInt("gen_ai.usage.input_tokens", int64(*resp.TranscriptionResponse.Usage.InputTokens)))
635+
}
636+
if resp.TranscriptionResponse.Usage.OutputTokens != nil {
637+
params = append(params, kvInt("gen_ai.usage.completion_tokens", int64(*resp.TranscriptionResponse.Usage.OutputTokens)))
638+
}
639+
if resp.TranscriptionResponse.Usage.TotalTokens != nil {
640+
params = append(params, kvInt("gen_ai.usage.total_tokens", int64(*resp.TranscriptionResponse.Usage.TotalTokens)))
641+
}
642+
if resp.TranscriptionResponse.Usage.InputTokenDetails != nil {
643+
params = append(params, kvInt("gen_ai.usage.input_token_details.text_tokens", int64(resp.TranscriptionResponse.Usage.InputTokenDetails.TextTokens)))
644+
params = append(params, kvInt("gen_ai.usage.input_token_details.audio_tokens", int64(resp.TranscriptionResponse.Usage.InputTokenDetails.AudioTokens)))
645+
}
635646
}
636647
}
637648
}

plugins/otel/docker-compose.yml

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ services:
2626
- source: tempo-config
2727
target: /etc/tempo.yaml
2828
ports:
29-
- "3200:3200" # tempo HTTP API
29+
- "3200:3200" # tempo HTTP/gRPC API (multiplexed)
3030
expose:
3131
- "4317" # OTLP gRPC (internal)
3232
volumes:
@@ -67,6 +67,8 @@ services:
6767
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: "grafana-pyroscope-app,grafana-exploretraces-app,grafana-metricsdrilldown-app"
6868
GF_PLUGINS_ENABLE_ALPHA: "true"
6969
GF_INSTALL_PLUGINS: ""
70+
GF_LOG_LEVEL: "warn"
71+
GF_FEATURE_TOGGLES_ENABLE: ""
7072
ports:
7173
- "4000:3000"
7274
volumes:
@@ -138,6 +140,7 @@ configs:
138140
content: |
139141
server:
140142
http_listen_port: 3200
143+
grpc_listen_port: 3200
141144
log_level: info
142145
143146
distributor:
@@ -207,10 +210,18 @@ configs:
207210
url: http://tempo:3200
208211
editable: true
209212
jsonData:
210-
tracesToMetrics:
211-
datasourceUid: prometheus
212213
nodeGraph:
213214
enabled: true
215+
tracesToLogs:
216+
datasourceUid: prometheus
217+
tracesToMetrics:
218+
datasourceUid: prometheus
219+
serviceMap:
220+
datasourceUid: prometheus
221+
search:
222+
hide: false
223+
lokiSearch:
224+
datasourceUid: prometheus
214225
215226
volumes:
216227
prometheus-data:

0 commit comments

Comments
 (0)