155 lines
4.9 KiB
Go
155 lines
4.9 KiB
Go
package headmaster
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log"
|
|
"net"
|
|
"net/http"
|
|
"net/url"
|
|
"strings"
|
|
"time"
|
|
|
|
"git.lxtend.com/qqbot/config"
|
|
"git.lxtend.com/qqbot/constants"
|
|
"git.lxtend.com/qqbot/handler"
|
|
"git.lxtend.com/qqbot/model"
|
|
"git.lxtend.com/qqbot/util"
|
|
"github.com/sashabaranov/go-openai"
|
|
"golang.org/x/net/proxy"
|
|
)
|
|
|
|
var (
|
|
histories = make(map[string][]openai.ChatCompletionMessage)
|
|
histories_time = make(map[string]time.Time)
|
|
)
|
|
|
|
func init() {
|
|
handler.RegisterHandler("校长", headmasterHandler, constants.LEVEL_USER)
|
|
handler.RegisterPrivateHandler(headmasterHandler)
|
|
}
|
|
|
|
func headmasterHandler(msg model.Message) (reply *model.Reply) {
|
|
var from string
|
|
if msg.GroupInfo.IsGroupMsg {
|
|
from = util.From(msg.GroupInfo.GroupId, 0)
|
|
} else {
|
|
from = util.From(0, msg.UserId)
|
|
}
|
|
if len(msg.RawMsg) > 7 && msg.RawMsg[0:7] == "校长 " {
|
|
return &model.Reply{
|
|
ReplyMsg: ask(from, fmt.Sprintf("{\"qqid\":%d,\"group_nickname\":\"%s\",\"nickname\":\"%s\",\"referid\":%d,\"received_time\":%s,\"msg\":\"%s\"}", msg.UserId, msg.GroupInfo.UserCard, msg.UserNickName, msg.OriginMsgId, time.Now().Format("2006-01-02 15:04:05"), msg.RawMsg[7:])),
|
|
ReferOriginMsg: false,
|
|
FromMsg: msg,
|
|
}
|
|
}
|
|
return &model.Reply{
|
|
ReplyMsg: ask(from, fmt.Sprintf("{\"qqid\":%d,\"group_nickname\":\"%s\",\"nickname\":\"%s\",\"referid\":%d,\"received_time\":%s,\"msg\":\"%s\"}", msg.UserId, msg.GroupInfo.UserCard, msg.UserNickName, msg.OriginMsgId, time.Now().Format("2006-01-02 15:04:05"), msg.RawMsg)),
|
|
ReferOriginMsg: false,
|
|
FromMsg: msg,
|
|
}
|
|
}
|
|
|
|
func ask(from string, question string) (reply string) {
|
|
llmConfig := openai.DefaultAzureConfig(config.ConfigManager.GetConfig().OpenaiApiKey, config.ConfigManager.GetConfig().OpenaiApiBaseUrl)
|
|
llmConfig.APIType = openai.APITypeOpenAI
|
|
llmConfig.APIVersion = ""
|
|
if config.ConfigManager.GetConfig().Management.ProxyAddr != "" {
|
|
proxyURL, err := url.Parse(config.ConfigManager.GetConfig().Management.ProxyAddr)
|
|
if err != nil {
|
|
log.Printf("解析代理地址失败: %v\n", err)
|
|
} else {
|
|
if proxyURL.Scheme == "socks5" {
|
|
log.Printf("使用SOCKS5代理: %s\n", proxyURL.Host)
|
|
dialer, err := proxy.SOCKS5("tcp", proxyURL.Host, nil, proxy.Direct)
|
|
if err == nil {
|
|
llmConfig.HTTPClient = &http.Client{
|
|
Timeout: 10 * time.Second,
|
|
Transport: &http.Transport{
|
|
Dial: func(network, addr string) (net.Conn, error) {
|
|
return dialer.Dial(network, addr)
|
|
},
|
|
},
|
|
}
|
|
} else {
|
|
log.Printf("创建SOCKS5代理失败: %v\n", err)
|
|
}
|
|
} else {
|
|
log.Printf("使用HTTP/HTTPS代理: %s\n", proxyURL.Host)
|
|
llmConfig.HTTPClient = &http.Client{
|
|
Timeout: 10 * time.Second,
|
|
Transport: &http.Transport{
|
|
Proxy: http.ProxyURL(proxyURL),
|
|
},
|
|
}
|
|
}
|
|
}
|
|
}
|
|
client := openai.NewClientWithConfig(llmConfig)
|
|
resp, err := client.CreateChatCompletion(
|
|
context.Background(),
|
|
openai.ChatCompletionRequest{
|
|
Model: config.ConfigManager.GetConfig().OpenaiModelName,
|
|
Messages: GenRequestFromUsr(from, question),
|
|
// Tools: []openai.Tool{
|
|
// {
|
|
// Type: openai.ToolTypeFunction,
|
|
// Function: &openai.FunctionDefinition{
|
|
// Name: "try_get_qqid_by_nickname",
|
|
// Description: "尝试在群内获取昵称对应的qq号",
|
|
// Parameters: json.RawMessage(`{"type": "object", "properties": {"nickname": {"type": "string", "description": "用户的昵称"}}}`),
|
|
// },
|
|
// },
|
|
// },
|
|
// ToolChoice: &openai.ToolChoice{
|
|
// Type: openai.ToolTypeFunction,
|
|
// Function: openai.ToolFunction{
|
|
// Name: "try_get_qqid_by_nickname",
|
|
// },
|
|
// },
|
|
},
|
|
)
|
|
|
|
if err != nil {
|
|
log.Printf("ChatCompletion error: %v\n", err)
|
|
return
|
|
}
|
|
AppendReplyToHistory(from, resp.Choices[0].Message.Content)
|
|
return enterFormatter(resp.Choices[0].Message.Content)
|
|
}
|
|
|
|
func enterFormatter(msgIn string) string {
|
|
return strings.ReplaceAll(msgIn, "\\n", "\n")
|
|
}
|
|
|
|
func GenRequestFromUsr(from string, question string) []openai.ChatCompletionMessage {
|
|
if _, ok := histories[from]; !ok || histories_time[from].Add(10*time.Minute).Before(time.Now()) {
|
|
histories[from] = make([]openai.ChatCompletionMessage, 0)
|
|
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleSystem,
|
|
Content: config.ConfigManager.GetConfig().OpenaiPrompt,
|
|
},
|
|
)
|
|
}
|
|
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleUser,
|
|
Content: question,
|
|
})
|
|
return histories[from]
|
|
}
|
|
|
|
func AppendReplyToHistory(from string, reply string) {
|
|
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleAssistant,
|
|
Content: reply,
|
|
})
|
|
histories_time[from] = time.Now()
|
|
for len(histories[from]) > 20 {
|
|
histories[from] = histories[from][1:]
|
|
histories[from][0] = openai.ChatCompletionMessage{
|
|
Role: openai.ChatMessageRoleSystem,
|
|
Content: config.ConfigManager.GetConfig().OpenaiPrompt,
|
|
}
|
|
}
|
|
}
|