generated from cloudwego/.github
-
Notifications
You must be signed in to change notification settings - Fork 138
Open
Description
我正在根据这个示例写一个客服MultiAgent的功能
https://github.com/cloudwego/eino-examples/tree/main/flow/agent/multiagent/host/journal
目前遇到的问题是
当选择了其中某个agent之后 没办法进行流式输出,我应该如何做
示例代码:
payagent.go
package agents
import (
"context"
"fmt"
"github.com/cloudwego/eino/components/prompt"
"github.com/cloudwego/eino/compose"
"github.com/cloudwego/eino/flow/agent"
"github.com/cloudwego/eino/flow/agent/multiagent/host"
"github.com/cloudwego/eino/schema"
)
func NewPayGraph() (*host.Specialist, error) {
ctx := context.Background()
chatModel := NewOpenaiLLM(ctx)
graph := compose.NewGraph[[]*schema.Message, *schema.Message]()
chatTpl := prompt.FromMessages(schema.FString,
schema.SystemMessage(`
你负责支付相关问题的回答,当你回答不了就直接返回'不好意思我不知道这个问题'
当前的获取的知识信息:{rag_content}
`),
schema.UserMessage("{query}"),
)
var err error
//提示词节点
if err = graph.AddChatTemplateNode("template", chatTpl); err != nil {
return nil, err
}
//大模型节点
if err = graph.AddChatModelNode("model", chatModel); err != nil {
return nil, err
}
if err = graph.AddLambdaNode("search_rag", compose.InvokableLambda(func(ctx context.Context, input []*schema.Message) (string, error) {
fmt.Println("search_rag", input)
return "查询的知识", nil
}), compose.WithOutputKey("rag_content")); err != nil {
return nil, err
}
if err = graph.AddLambdaNode("query_extractor", compose.InvokableLambda(func(ctx context.Context, input []*schema.Message) (string, error) {
return input[len(input)-1].Content, nil
}), compose.WithOutputKey("query")); err != nil {
return nil, err
}
//查询知识库
graph.AddEdge("search_rag", "template")
//整理请求信息 最后一句是用户的问题
graph.AddEdge("query_extractor", "template")
graph.AddEdge(compose.START, "search_rag")
graph.AddEdge("template", "model")
graph.AddEdge(compose.START, "query_extractor")
graph.AddEdge("model", compose.END)
r, err := graph.Compile(ctx,
[]compose.GraphCompileOption{compose.WithGraphCompileCallbacks(&cb{}),
compose.WithGraphName("top_level")}...)
return &host.Specialist{
AgentMeta: host.AgentMeta{
Name: "pay agent",
IntendedUse: "当用户询问支付相关问题时使用",
},
Invokable: func(ctx context.Context, input []*schema.Message, opts ...agent.AgentOption) (*schema.Message, error) {
return r.Invoke(ctx, input, agent.GetComposeOptions(opts...)...)
},
Streamable: func(ctx context.Context, input []*schema.Message, opts ...agent.AgentOption) (*schema.StreamReader[*schema.Message], error) {
return r.Stream(ctx, input, agent.GetComposeOptions(opts...)...)
},
}, nil
}
type cb struct {
}
func (c *cb) OnFinish(ctx context.Context, info *compose.GraphInfo) {
fmt.Println(info.Name)
fmt.Println(info.Nodes)
}
helloagent.go
package agents
import (
"context"
"fmt"
"github.com/cloudwego/eino/compose"
"github.com/cloudwego/eino/flow/agent"
"github.com/cloudwego/eino/flow/agent/multiagent/host"
"github.com/cloudwego/eino/schema"
)
func NewHelloGraph() (*host.Specialist, error) {
ctx := context.Background()
chatModel := NewOpenaiLLM(ctx)
chain := compose.NewChain[[]*schema.Message, *schema.Message]()
chain.AppendLambda(compose.InvokableLambda(func(ctx context.Context, input []*schema.Message) ([]*schema.Message, error) {
systemMsg := &schema.Message{
Role: schema.System,
Content: "你负责与用户友好的交流",
}
return append([]*schema.Message{systemMsg}, input...), nil
})).
AppendChatModel(chatModel).
AppendLambda(compose.InvokableLambda(func(ctx context.Context, input *schema.Message) (*schema.Message, error) {
fmt.Println(`HelloGraph`, input.Content)
return &schema.Message{
Role: schema.Assistant,
Content: "你好啊: " + input.Content,
}, nil
}))
r, err := chain.Compile(ctx)
if err != nil {
return nil, err
}
return &host.Specialist{
AgentMeta: host.AgentMeta{
Name: "hello agent",
IntendedUse: "负责与用户友好的交流,当用户的意图不清晰的时候使用",
},
Invokable: func(ctx context.Context, input []*schema.Message, opts ...agent.AgentOption) (*schema.Message, error) {
return r.Invoke(ctx, input, agent.GetComposeOptions(opts...)...)
},
Streamable: func(ctx context.Context, input []*schema.Message, opts ...agent.AgentOption) (*schema.StreamReader[*schema.Message], error) {
return r.Stream(ctx, input, agent.GetComposeOptions(opts...)...)
},
}, nil
}
main.go
package main
import (
"bufio"
"context"
"eino_study_new/agents"
"fmt"
"github.com/cloudwego/eino/flow/agent/multiagent/host"
"github.com/cloudwego/eino/schema"
"io"
"os"
)
func main() {
ctx := context.Background()
payAgent := initAgent(agents.NewPayGraph())
hello := initAgent(agents.NewHelloGraph())
h, _ := newHost(ctx)
hostMA, err := host.NewMultiAgent(ctx, &host.MultiAgentConfig{
Host: *h,
Specialists: []*host.Specialist{
hello,
payAgent,
},
})
if err != nil {
panic(err)
}
cb := &logCallback{}
for { // 多轮对话,除非用户输入了 "exit",否则一直循环
println("\n\nYou: ") // 提示轮到用户输入了
var message string
scanner := bufio.NewScanner(os.Stdin) // 获取用户在命令行的输入
for scanner.Scan() {
message += scanner.Text()
break
}
if err := scanner.Err(); err != nil {
panic(err)
}
if message == "exit" {
return
}
msg := &schema.Message{
Role: schema.User,
Content: message,
}
out, err := hostMA.Stream(ctx, []*schema.Message{msg}, host.WithAgentCallbacks(cb))
if err != nil {
panic(err)
}
for {
msg, err := out.Recv()
if err != nil {
if err == io.EOF {
break
}
}
fmt.Println(msg)
println("Answer:", msg.Content)
}
out.Close()
}
}
func newHost(ctx context.Context) (*host.Host, error) {
chatModel := agents.NewOpenaiLLM(ctx)
return &host.Host{
ToolCallingModel: chatModel,
SystemPrompt: "分析与的意图,进行合适的工具使用",
}, nil
}
func initAgent(agent *host.Specialist, err error) *host.Specialist {
if err != nil {
panic(err)
}
return agent
}
type logCallback struct{}
func (l *logCallback) OnHandOff(ctx context.Context, info *host.HandOffInfo) context.Context {
println("\nHandOff to", info.ToAgentName, "with argument", info.Argument)
return ctx
}
Metadata
Metadata
Assignees
Labels
No labels