2024-06-30 21:56:34 +08:00
|
|
|
package workers
|
|
|
|
|
|
|
|
import (
|
|
|
|
"log"
|
2024-06-30 23:34:00 +08:00
|
|
|
"strings"
|
2024-06-30 21:56:34 +08:00
|
|
|
|
|
|
|
"github.com/goccy/go-json"
|
|
|
|
"github.com/parnurzeal/gorequest"
|
|
|
|
)
|
|
|
|
|
|
|
|
type AI struct {
|
|
|
|
*StdAns
|
|
|
|
}
|
|
|
|
|
|
|
|
func (a *AI) GetMsg() string {
|
|
|
|
if len(a.Parms) < 2 {
|
|
|
|
return "使用!ai xxx 向我提问吧"
|
|
|
|
|
|
|
|
}
|
2024-06-30 23:34:00 +08:00
|
|
|
ask := a.Parms[1]
|
|
|
|
if ask == "" {
|
|
|
|
return "不问问题你说个屁!"
|
|
|
|
}
|
2024-06-30 21:56:34 +08:00
|
|
|
var msg string
|
|
|
|
var OPENAI_API_KEY string
|
|
|
|
if cfg["OPENAI_API_KEY"] != nil {
|
|
|
|
OPENAI_API_KEY = cfg["OPENAI_API_KEY"].(string)
|
|
|
|
} else {
|
|
|
|
log.Println("OPENAI_API_KEY 未配置")
|
|
|
|
return "OPENAI_API_KEY 未配置"
|
|
|
|
}
|
|
|
|
var OPENAI_BaseURL string
|
|
|
|
if cfg["OPENAI_BaseURL"] != nil {
|
|
|
|
OPENAI_BaseURL = cfg["OPENAI_BaseURL"].(string)
|
|
|
|
} else {
|
|
|
|
log.Println("OPENAI_BaseURL 未配置,使用openai默认配置")
|
2024-06-30 23:34:00 +08:00
|
|
|
OPENAI_BaseURL = "https://api.openai.com/v1"
|
2024-06-30 21:56:34 +08:00
|
|
|
}
|
|
|
|
var MODEL string
|
|
|
|
if cfg["MODEL"] != nil {
|
|
|
|
MODEL = cfg["MODEL"].(string)
|
|
|
|
} else {
|
|
|
|
log.Println("模型 未配置,使用默认chatglm_pro模型")
|
|
|
|
MODEL = "chatglm_pro"
|
|
|
|
}
|
2024-06-30 23:34:00 +08:00
|
|
|
|
|
|
|
if strings.ToLower(a.Parms[1]) == "models" {
|
|
|
|
OPENAI_BaseURL = OPENAI_BaseURL + "/models"
|
|
|
|
request := gorequest.New()
|
|
|
|
resp, body, errs := request.Get(OPENAI_BaseURL).
|
|
|
|
Set("Content-Type", "application/json").
|
|
|
|
Set("Authorization", "Bearer "+OPENAI_API_KEY).
|
|
|
|
End()
|
|
|
|
if errs != nil {
|
|
|
|
log.Println(errs)
|
|
|
|
return "请求失败"
|
|
|
|
} else {
|
|
|
|
if resp.StatusCode == 200 {
|
|
|
|
var responseBody map[string]interface{}
|
|
|
|
if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
return "解析模型列表失败"
|
|
|
|
}
|
|
|
|
choices := responseBody["data"].([]interface{})
|
|
|
|
if len(choices) > 0 {
|
|
|
|
msg = "支持的模型列表:\n"
|
|
|
|
for _, choice := range choices {
|
|
|
|
msg = msg + choice.(map[string]interface{})["id"].(string) + "\n"
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
msg = "没查到支持模型列表"
|
|
|
|
}
|
|
|
|
return msg
|
2024-06-30 21:56:34 +08:00
|
|
|
} else {
|
2024-06-30 23:34:00 +08:00
|
|
|
log.Println("请求失败")
|
|
|
|
return "请求模型列表失败"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
OPENAI_BaseURL = OPENAI_BaseURL + "/chat/completions"
|
|
|
|
PROMPT, ok := cfg["PROMPT"].(string)
|
|
|
|
if !ok {
|
|
|
|
log.Println("PROMRT 未配置")
|
|
|
|
PROMPT = ""
|
|
|
|
}
|
2024-06-30 23:36:17 +08:00
|
|
|
// println("PROMPT:", PROMPT)
|
2024-06-30 23:34:00 +08:00
|
|
|
requestBody := map[string]interface{}{
|
|
|
|
"model": MODEL,
|
|
|
|
"messages": []map[string]string{{
|
|
|
|
"role": "system",
|
|
|
|
"content": PROMPT,
|
|
|
|
},
|
|
|
|
{"role": "user", "content": ask}},
|
|
|
|
"temperature": 0.7,
|
|
|
|
}
|
|
|
|
request := gorequest.New()
|
|
|
|
resp, body, errs := request.Post(OPENAI_BaseURL).
|
|
|
|
Set("Content-Type", "application/json").
|
|
|
|
Set("Authorization", "Bearer "+OPENAI_API_KEY).
|
|
|
|
Send(requestBody).
|
|
|
|
End()
|
|
|
|
if errs != nil {
|
|
|
|
log.Println(errs)
|
|
|
|
return "请求失败"
|
|
|
|
} else {
|
|
|
|
if resp.StatusCode == 200 {
|
|
|
|
var responseBody map[string]interface{}
|
|
|
|
if err := json.Unmarshal([]byte(body), &responseBody); err != nil {
|
|
|
|
log.Println(err)
|
|
|
|
return "解析失败"
|
|
|
|
}
|
|
|
|
choices := responseBody["choices"].([]interface{})
|
|
|
|
if len(choices) > 0 {
|
|
|
|
choice := choices[0].(map[string]interface{})
|
|
|
|
msg = choice["message"].(map[string]interface{})["content"].(string)
|
|
|
|
} else {
|
|
|
|
log.Println("choices为空")
|
|
|
|
}
|
2024-06-30 21:56:34 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
|
|
|
}
|