refactor: update headmasterHandler to handle user messages more flexibly and improve OpenAI client configuration with dynamic API key and model name from config
This commit is contained in:
parent
9c589714ce
commit
03fe508d7b
@ -6,6 +6,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"git.lxtend.com/qqbot/config"
|
||||||
"git.lxtend.com/qqbot/constants"
|
"git.lxtend.com/qqbot/constants"
|
||||||
"git.lxtend.com/qqbot/handler"
|
"git.lxtend.com/qqbot/handler"
|
||||||
"git.lxtend.com/qqbot/model"
|
"git.lxtend.com/qqbot/model"
|
||||||
@ -24,35 +25,33 @@ func init() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func headmasterHandler(msg model.Message) (reply model.Reply) {
|
func headmasterHandler(msg model.Message) (reply model.Reply) {
|
||||||
from := util.From(msg.GroupInfo.GroupId, 0)
|
var from string
|
||||||
|
if msg.GroupInfo.IsGroupMsg {
|
||||||
|
from = util.From(msg.GroupInfo.GroupId, 0)
|
||||||
|
} else {
|
||||||
|
from = util.From(0, msg.UserId)
|
||||||
|
}
|
||||||
if len(msg.RawMsg) > 7 && msg.RawMsg[0:7] == "校长 " {
|
if len(msg.RawMsg) > 7 && msg.RawMsg[0:7] == "校长 " {
|
||||||
return model.Reply{
|
return model.Reply{
|
||||||
ReplyMsg: ask(from, msg.RawMsg[7:]),
|
ReplyMsg: ask(from, fmt.Sprintf("来自%d(%s)的消息:%s", msg.UserId, msg.UserNickName, msg.RawMsg[7:])),
|
||||||
ReferOriginMsg: true,
|
ReferOriginMsg: true,
|
||||||
FromMsg: msg,
|
FromMsg: msg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// nickname := msg.UserNickName
|
|
||||||
return model.Reply{
|
return model.Reply{
|
||||||
ReplyMsg: ask(from, msg.RawMsg),
|
ReplyMsg: ask(from, fmt.Sprintf("来自%d(%s)的消息:%s", msg.UserId, msg.UserNickName, msg.RawMsg)),
|
||||||
ReferOriginMsg: true,
|
ReferOriginMsg: true,
|
||||||
FromMsg: msg,
|
FromMsg: msg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func ask(from string, question string) (reply string) {
|
func ask(from string, question string) (reply string) {
|
||||||
client := openai.NewClientWithConfig(openai.DefaultAzureConfig("none", "http://127.0.0.1:8000/v1"))
|
client := openai.NewClientWithConfig(openai.DefaultAzureConfig(config.ConfigManager.GetProperty("openai_api_key"), config.ConfigManager.GetProperty("openai_api_base_url")))
|
||||||
resp, err := client.CreateChatCompletion(
|
resp, err := client.CreateChatCompletion(
|
||||||
context.Background(),
|
context.Background(),
|
||||||
openai.ChatCompletionRequest{
|
openai.ChatCompletionRequest{
|
||||||
Model: "minimind",
|
Model: config.ConfigManager.GetProperty("openai_model_name"),
|
||||||
Messages: GenRequestFromUsr(from, question),
|
Messages: GenRequestFromUsr(from, question),
|
||||||
// Messages: []openai.ChatCompletionMessage{
|
|
||||||
// {
|
|
||||||
// Role: openai.ChatMessageRoleUser,
|
|
||||||
// Content: question,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -71,6 +70,11 @@ func enterFormatter(msgIn string) string {
|
|||||||
func GenRequestFromUsr(from string, question string) []openai.ChatCompletionMessage {
|
func GenRequestFromUsr(from string, question string) []openai.ChatCompletionMessage {
|
||||||
if _, ok := histories[from]; !ok || histories_time[from].Add(2*time.Minute).Before(time.Now()) {
|
if _, ok := histories[from]; !ok || histories_time[from].Add(2*time.Minute).Before(time.Now()) {
|
||||||
histories[from] = make([]openai.ChatCompletionMessage, 0)
|
histories[from] = make([]openai.ChatCompletionMessage, 0)
|
||||||
|
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
||||||
|
Role: openai.ChatMessageRoleSystem,
|
||||||
|
Content: config.ConfigManager.GetProperty("openai_prompt"),
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
histories[from] = append(histories[from], openai.ChatCompletionMessage{
|
||||||
Role: openai.ChatMessageRoleUser,
|
Role: openai.ChatMessageRoleUser,
|
||||||
@ -87,5 +91,9 @@ func AppendReplyToHistory(from string, reply string) {
|
|||||||
histories_time[from] = time.Now()
|
histories_time[from] = time.Now()
|
||||||
for len(histories[from]) > 10 {
|
for len(histories[from]) > 10 {
|
||||||
histories[from] = histories[from][1:]
|
histories[from] = histories[from][1:]
|
||||||
|
histories[from][0] = openai.ChatCompletionMessage{
|
||||||
|
Role: openai.ChatMessageRoleSystem,
|
||||||
|
Content: config.ConfigManager.GetProperty("openai_prompt"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user