1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
| package main
import ( "context" "fmt" "log" "os" "time"
"github.com/cloudwego/eino-ext/components/model/openai" "github.com/cloudwego/eino/compose" "github.com/cloudwego/eino/schema" "github.com/joho/godotenv" )
var GlobalClient *openai.ChatModel
func initGlobalClient() { err := godotenv.Load() if err != nil { panic(err) } key := os.Getenv("OPENAI_API_KEY") model := os.Getenv("OPENAI_MODEL") baseURL := os.Getenv("OPENAI_BASE_URL") GlobalClient, err = openai.NewChatModel(context.Background(), &openai.ChatModelConfig{ ByAzure: false, BaseURL: baseURL, APIKey: key, Model: model, }) if err != nil { panic(err) } }
func main() { ctx := context.Background() initGlobalClient()
parallel := compose.NewParallel()
parallel.AddLambda("current_emotion", compose.InvokableLambda(func(ctx context.Context, input map[string]any) (string, error) { userInput, ok := input["user_input"].(string) if !ok { return "neutral", nil }
switch { case contains(userInput, []string{"开心", "高兴", "愉快", "happy", "good"}): return "positive", nil case contains(userInput, []string{"难过", "沮丧", "生气", "sad", "angry"}): return "negative", nil default: return "neutral", nil } }))
parallel.AddLambda("context_history", compose.InvokableLambda(func(ctx context.Context, input map[string]any) ([]string, error) { history := []string{ "用户: 我失恋了怎么办啊", "助手: 时间会治愈一切, 专注自我成长, 你会遇到更好的。", "用户: 可是我放不下她啊", } return history, nil }))
parallel.AddLambda("current_time", compose.InvokableLambda(func(ctx context.Context, input map[string]any) (string, error) { return time.Now().Format("2006-01-02 15:04:05"), nil }))
parallel.AddLambda("user_input", compose.InvokableLambda(func(ctx context.Context, input map[string]any) (string, error) { if val, exists := input["user_input"]; exists { if val != nil { if str, ok := val.(string); ok { return str, nil } } } return "未知哦", nil }))
chain := compose.NewChain[map[string]any, *schema.Message]()
chain.AppendParallel(parallel)
chain.AppendLambda(compose.InvokableLambda(func(ctx context.Context, input map[string]any) ([]*schema.Message, error) { emotion := input["current_emotion"].(string) history := input["context_history"].([]string) currentTime := input["current_time"].(string) userInput := input["user_input"].(string)
systemPrompt := fmt.Sprintf( "你是一个智能助手, 根据以下信息来回答用户问题:\n\n"+ "1. 用户当前情绪状态: %s\n"+ "2. 最近的对话历史:\n %s\n"+ "3. 当前时间: %s\n\n"+ "请综合考虑以上信息, 给出恰当的回答。", emotion, fmt.Sprintf("%s", history), currentTime, )
messages := []*schema.Message{ schema.SystemMessage(systemPrompt), schema.UserMessage(userInput), }
return messages, nil }))
chain.AppendLambda(compose.InvokableLambda(func(ctx context.Context, messages []*schema.Message) (*schema.Message, error) { resp, err := GlobalClient.Generate(ctx, messages) if err != nil { return nil, fmt.Errorf("调用LLM失败: %w", err) } return resp, nil }))
r, err := chain.Compile(ctx) if err != nil { log.Fatalf("编译chain失败, err=%v", err) return }
input := make(map[string]any) input["user_input"] = string("夜深了, 还是想她")
result, err := r.Invoke(ctx, input) if err != nil { log.Fatalf("执行chain失败, err=%v", err) return } fmt.Println("最终输出:") fmt.Println(result.Content) }
func contains(text string, keywords []string) bool { for _, keyword := range keywords { if len(keyword) > 0 && len(text) >= len(keyword) { for i := 0; i <= len(text)-len(keyword); i++ { if text[i:i+len(keyword)] == keyword { return true } } } } return false }
|