chat-backend/core/gpt.go

63 lines
1.2 KiB
Go

package core
import (
"crypto/tls"
"encoding/json"
"io/ioutil"
"net/http"
"net/url"
"strings"
"time"
)
// HttpClientWithProxy
// 返回Http客户端对象
//
// @receiver g
// @param proxy
// @return *http.Client
func (g *GPT) HttpClientWithProxy(proxy string) *http.Client {
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
}
if proxy != "" {
proxyAddr, _ := url.Parse(proxy)
transport.Proxy = http.ProxyURL(proxyAddr)
}
return &http.Client{
Timeout: time.Duration(g.Timeout) * time.Second,
Transport: transport,
}
}
// GetAnswer
// 发起请求
//
// @receiver g
// @param question
// @return Answer
func (g *GPT) GetAnswer(question string) (Answer, error) {
payload := BuildApiPayload(question)
api := "https://api.openai.com/v1/chat/completions"
req, _ := http.NewRequest("POST", api, strings.NewReader(payload))
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+g.ApiKey)
asr := Answer{}
client := g.HttpClientWithProxy(g.Proxy)
resp, err := client.Do(req)
if err != nil {
return asr, err
}
body, _ := ioutil.ReadAll(resp.Body)
json.Unmarshal(body, &asr)
return asr, nil
}