feat(workers): 添加对OpenAI模型列表的支持

向AI工作者添加了查询OpenAI支持模型列表的功能。当用户输入特定的
命令时,AI将返回可用模型的列表。这增强了AI的实用性并为用户提供
了更多资源的信息。

同时,对配置加载方式进行了优化,确保了配置的正确性和系统的稳定性。
还对AI的响应消息进行了改进,增加了语言的灵活性和友好性。

BREAKING CHANGE: 配置文件格式有所更改,新增了PROMPT配置项,并调整了
APIURL和MODEL的配置。需要更新配置文件以适配这些变化。
This commit is contained in:
liyp 2024-06-30 23:34:00 +08:00
parent 13483b9643
commit be83757074
2 changed files with 82 additions and 32 deletions

View file

@ -116,7 +116,10 @@ func handlePost(w http.ResponseWriter, r *http.Request) {
func main() { func main() {
cfg := config.GetConfig() cfg := config.GetConfig()
APIURL := cfg["APIURL"].(string) APIURL, ok := cfg["APIURL"].(string)
if !ok {
log.Fatal("加载配置失败!")
}
// config.PrintConfig(cfg, "") // config.PrintConfig(cfg, "")
// print(cfg["AllowGroup"].([]interface{})[0].(string)) // print(cfg["AllowGroup"].([]interface{})[0].(string))

View file

@ -2,6 +2,7 @@ package workers
import ( import (
"log" "log"
"strings"
"github.com/goccy/go-json" "github.com/goccy/go-json"
"github.com/parnurzeal/gorequest" "github.com/parnurzeal/gorequest"
@ -16,6 +17,10 @@ func (a *AI) GetMsg() string {
return "使用!ai xxx 向我提问吧" return "使用!ai xxx 向我提问吧"
} }
ask := a.Parms[1]
if ask == "" {
return "不问问题你说个屁!"
}
var msg string var msg string
var OPENAI_API_KEY string var OPENAI_API_KEY string
if cfg["OPENAI_API_KEY"] != nil { if cfg["OPENAI_API_KEY"] != nil {
@ -29,7 +34,7 @@ func (a *AI) GetMsg() string {
OPENAI_BaseURL = cfg["OPENAI_BaseURL"].(string) OPENAI_BaseURL = cfg["OPENAI_BaseURL"].(string)
} else { } else {
log.Println("OPENAI_BaseURL 未配置,使用openai默认配置") log.Println("OPENAI_BaseURL 未配置,使用openai默认配置")
OPENAI_BaseURL = "https://api.openai.com/v1/chat/completions" OPENAI_BaseURL = "https://api.openai.com/v1"
} }
var MODEL string var MODEL string
if cfg["MODEL"] != nil { if cfg["MODEL"] != nil {
@ -38,37 +43,79 @@ func (a *AI) GetMsg() string {
log.Println("模型 未配置,使用默认chatglm_pro模型") log.Println("模型 未配置,使用默认chatglm_pro模型")
MODEL = "chatglm_pro" MODEL = "chatglm_pro"
} }
ask := a.Parms[1]
if ask == "" { if strings.ToLower(a.Parms[1]) == "models" {
return "不问问题你说个屁!" OPENAI_BaseURL = OPENAI_BaseURL + "/models"
} request := gorequest.New()
requestBody := map[string]interface{}{ resp, body, errs := request.Get(OPENAI_BaseURL).
"model": MODEL, Set("Content-Type", "application/json").
"messages": []map[string]string{{"role": "user", "content": ask}}, Set("Authorization", "Bearer "+OPENAI_API_KEY).
"temperature": 0.7, End()
} if errs != nil {
request := gorequest.New() log.Println(errs)
resp, body, errs := request.Post(OPENAI_BaseURL). return "请求失败"
Set("Content-Type", "application/json"). } else {
Set("Authorization", "Bearer "+OPENAI_API_KEY). if resp.StatusCode == 200 {
Send(requestBody). var responseBody map[string]interface{}
End() if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
if errs != nil { log.Println(err)
log.Println(errs) return "解析模型列表失败"
return "请求失败" }
} else { choices := responseBody["data"].([]interface{})
if resp.StatusCode == 200 { if len(choices) > 0 {
var responseBody map[string]interface{} msg = "支持的模型列表:\n"
if err := json.Unmarshal([]byte(body), &responseBody); err != nil { for _, choice := range choices {
log.Println(err) msg = msg + choice.(map[string]interface{})["id"].(string) + "\n"
return "解析失败" }
} } else {
choices := responseBody["choices"].([]interface{}) msg = "没查到支持模型列表"
if len(choices) > 0 { }
choice := choices[0].(map[string]interface{}) return msg
msg = choice["message"].(map[string]interface{})["content"].(string)
} else { } else {
log.Println("choices为空") log.Println("请求失败")
return "请求模型列表失败"
}
}
} else {
OPENAI_BaseURL = OPENAI_BaseURL + "/chat/completions"
PROMPT, ok := cfg["PROMPT"].(string)
if !ok {
log.Println("PROMRT 未配置")
PROMPT = ""
}
println("PROMPT:", PROMPT)
requestBody := map[string]interface{}{
"model": MODEL,
"messages": []map[string]string{{
"role": "system",
"content": PROMPT,
},
{"role": "user", "content": ask}},
"temperature": 0.7,
}
request := gorequest.New()
resp, body, errs := request.Post(OPENAI_BaseURL).
Set("Content-Type", "application/json").
Set("Authorization", "Bearer "+OPENAI_API_KEY).
Send(requestBody).
End()
if errs != nil {
log.Println(errs)
return "请求失败"
} else {
if resp.StatusCode == 200 {
var responseBody map[string]interface{}
if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
log.Println(err)
return "解析失败"
}
choices := responseBody["choices"].([]interface{})
if len(choices) > 0 {
choice := choices[0].(map[string]interface{})
msg = choice["message"].(map[string]interface{})["content"].(string)
} else {
log.Println("choices为空")
}
} }
} }
} }