go-bot/workers/ai.go

164 lines
3.9 KiB
Go

package workers
import (
"go-bot/config"
"log"
"strings"
"github.com/goccy/go-json"
"github.com/parnurzeal/gorequest"
)
type AI struct {
*StdAns
}
func (a *AI) GetMsg() string {
if len(a.Parms) < 2 {
return "使用!ai xxx 向我提问吧"
}
ask := a.Parms[1]
if ask == "" {
return "不问问题你说个屁!"
}
var msg string
var OPENAI_API_KEY string
if cfg["OPENAI_API_KEY"] != nil {
OPENAI_API_KEY = cfg["OPENAI_API_KEY"].(string)
} else {
log.Println("OPENAI_API_KEY 未配置")
return "OPENAI_API_KEY 未配置"
}
var OPENAI_BaseURL string
if cfg["OPENAI_BaseURL"] != nil {
OPENAI_BaseURL = cfg["OPENAI_BaseURL"].(string)
} else {
log.Println("OPENAI_BaseURL 未配置,使用openai默认配置")
OPENAI_BaseURL = "https://api.openai.com/v1"
}
var MODEL string
if cfg["MODEL"] != nil {
MODEL = cfg["MODEL"].(string)
} else {
log.Println("模型 未配置,使用默认chatglm_pro模型")
MODEL = "chatglm_pro"
}
if strings.ToLower(a.Parms[1]) == "models" {
OPENAI_BaseURL = OPENAI_BaseURL + "/models"
request := gorequest.New()
resp, body, errs := request.Get(OPENAI_BaseURL).
Set("Content-Type", "application/json").
Set("Authorization", "Bearer "+OPENAI_API_KEY).
End()
if errs != nil {
log.Println(errs)
return "请求失败"
} else {
if resp.StatusCode == 200 {
var responseBody map[string]interface{}
if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
log.Println(err)
return "解析模型列表失败"
}
choices := responseBody["data"].([]interface{})
var models []interface{}
if len(choices) > 0 {
msg = "支持的模型列表:\n"
for _, choice := range choices {
model := choice.(map[string]interface{})["id"].(string)
if model == MODEL {
msg = msg + model + "\t ✔\n"
} else {
msg = msg + model + "\n"
}
models = append(models, model)
}
} else {
msg = "没查到支持模型列表"
}
if len(a.Parms) > 3 && strings.ToLower(a.Parms[2]) == "set" {
// 判断允许设置权限,需要AllowUser和发消息用户账号相同
if a.AllowUser != nil && contains(a.AllowUser, a.UID) {
if contains(models, a.Parms[3]) {
cfg["MODEL"] = a.Parms[3]
msg = "已设置模型为 " + a.Parms[3]
config.ModifyConfig("MODEL", a.Parms[3])
config.ReloadConfig()
config.PrintConfig(cfg, "")
} else {
msg = "不支持的模型"
}
} else {
msg = "无权限设置模型"
}
}
return msg
} else {
log.Println("请求失败")
return "请求模型列表失败"
}
}
} else {
OPENAI_BaseURL = OPENAI_BaseURL + "/chat/completions"
PROMPT, ok := cfg["PROMPT"].(string)
if !ok {
log.Println("PROMRT 未配置")
PROMPT = ""
}
// PROMPT = ""
println("PROMPT:", PROMPT)
println("ask:", ask)
requestBody := map[string]interface{}{
"model": MODEL,
"stream": false,
"messages": []map[string]string{
{
"role": "system",
"content": PROMPT,
},
{"role": "user", "content": ask}},
// "max_tokens": 200,
"temperature": 0.7,
}
request := gorequest.New()
resp, body, errs := request.Post(OPENAI_BaseURL).
Set("Content-Type", "application/json").
Set("Authorization", "Bearer "+OPENAI_API_KEY).
Send(requestBody).
End()
if errs != nil {
log.Println(errs)
return "请求失败"
} else {
if resp.StatusCode == 200 {
var responseBody map[string]interface{}
if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
log.Println(err)
return "解析失败"
}
choices := responseBody["choices"].([]interface{})
if len(choices) > 0 {
choice := choices[0].(map[string]interface{})
msg = choice["message"].(map[string]interface{})["content"].(string)
// println("msg:", msg)
} else {
log.Println("choices为空")
}
}
}
}
return msg
}