├── code ├── .gitignore ├── services │ ├── openai │ │ ├── test_file │ │ │ ├── img.png │ │ │ ├── test.jpg │ │ │ └── test.wav │ │ ├── audio.go │ │ ├── billing.go │ │ ├── gpt3.go │ │ ├── gpt3_test.go │ │ ├── common.go │ │ └── picture.go │ ├── chatgpt │ │ ├── tokenizer.go │ │ ├── tokenizer_test.go │ │ ├── check.go │ │ ├── gpt3_test.go │ │ └── gpt3.go │ ├── msgCache.go │ ├── accesscontrol │ │ └── access_control.go │ ├── loadbalancer │ │ └── loadbalancer.go │ └── sessionCache.go ├── utils │ ├── commonUtils.go │ ├── logUtils.go │ ├── strings.go │ ├── audio │ │ ├── ogg.go │ │ └── wav.go │ └── strings_test.go ├── initialization │ ├── lark_client.go │ ├── roles_load.go │ ├── gin.go │ └── config.go ├── handlers │ ├── card_common_action.go │ ├── card_clear_action.go │ ├── init.go │ ├── card_role_action.go │ ├── common.go │ ├── card_pic_action.go │ ├── handler.go │ ├── event_common_action.go │ ├── event_msg_action.go │ └── msg.go ├── config.example.yaml ├── go.mod ├── main.go └── role_list.yaml ├── docs ├── help.png ├── img.png ├── img3.png └── talk.png ├── Dockerfile ├── .gitignore ├── docker-compose.yaml ├── s.yaml ├── readme.md └── LICENSE /code/.gitignore: -------------------------------------------------------------------------------- 1 | /apikey_usage.json 2 | *.pem 3 | -------------------------------------------------------------------------------- /docs/help.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/docs/help.png -------------------------------------------------------------------------------- /docs/img.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/docs/img.png -------------------------------------------------------------------------------- /docs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/docs/img3.png -------------------------------------------------------------------------------- /docs/talk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/docs/talk.png -------------------------------------------------------------------------------- /code/services/openai/test_file/img.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/code/services/openai/test_file/img.png -------------------------------------------------------------------------------- /code/services/openai/test_file/test.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/code/services/openai/test_file/test.jpg -------------------------------------------------------------------------------- /code/services/openai/test_file/test.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ConnectAI-E/Feishu-OpenAI-Stream-Chatbot/HEAD/code/services/openai/test_file/test.wav -------------------------------------------------------------------------------- /code/utils/commonUtils.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | func GetCurrentDateAsString() string { 8 | return time.Now().Format("2006-01-02") 9 | 10 | // 本地测试可以用这个。将1天缩短到10秒。 11 | //return strconv.Itoa((time.Now().Second() + 100000) / 10) 12 | } 13 | -------------------------------------------------------------------------------- /code/initialization/lark_client.go: -------------------------------------------------------------------------------- 1 | package initialization 2 | 3 | import ( 4 | lark "github.com/larksuite/oapi-sdk-go/v3" 5 | ) 6 | 7 | var larkClient *lark.Client 8 | 9 | func LoadLarkClient(config Config) { 10 | larkClient = lark.NewClient(config.FeishuAppId, config.FeishuAppSecret) 11 | } 12 | 13 | func GetLarkClient() *lark.Client { 14 | return larkClient 15 | } 16 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.18 as golang 2 | 3 | ENV GO111MODULE=on \ 4 | CGO_ENABLED=1 \ 5 | GOPROXY=https://goproxy.cn,direct 6 | 7 | WORKDIR /build 8 | ADD /code /build 9 | 10 | RUN CGO_ENABLED=0 GOOS=linux go build -ldflags '-w -s' -o feishu_chatgpt 11 | 12 | FROM alpine:latest 13 | 14 | WORKDIR /app 15 | 16 | RUN apk add --no-cache bash 17 | COPY --from=golang /build/feishu_chatgpt /app 18 | COPY --from=golang /build/role_list.yaml /app 19 | EXPOSE 9000 20 | ENTRYPOINT ["/app/feishu_chatgpt"] 21 | -------------------------------------------------------------------------------- /code/services/chatgpt/tokenizer.go: -------------------------------------------------------------------------------- 1 | package chatgpt 2 | 3 | import ( 4 | "github.com/pandodao/tokenizer-go" 5 | "github.com/sashabaranov/go-openai" 6 | "strings" 7 | ) 8 | 9 | func CalcTokenLength(text string) int { 10 | text = strings.TrimSpace(text) 11 | return tokenizer.MustCalToken(text) 12 | } 13 | 14 | func CalcTokenFromMsgList(msgs []openai.ChatCompletionMessage) int { 15 | var total int 16 | for _, msg := range msgs { 17 | total += CalcTokenLength(msg.Content) 18 | } 19 | return total 20 | } 21 | -------------------------------------------------------------------------------- /code/utils/logUtils.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | "gopkg.in/natefinch/lumberjack.v2" 6 | "log" 7 | "time" 8 | ) 9 | 10 | type MyLogWriter struct { 11 | } 12 | 13 | func (writer MyLogWriter) Write(bytes []byte) (int, error) { 14 | return fmt.Print(time.Now().UTC().Format("2006-01-02T15:04:05.999Z") + string(bytes)) 15 | } 16 | 17 | func CloseLogger(logger *lumberjack.Logger) { 18 | err := logger.Close() 19 | if err != nil { 20 | log.Println(err) 21 | } else { 22 | log.Println("logger closed") 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /code/services/msgCache.go: -------------------------------------------------------------------------------- 1 | package services 2 | 3 | import ( 4 | "github.com/patrickmn/go-cache" 5 | "time" 6 | ) 7 | 8 | type MsgService struct { 9 | cache *cache.Cache 10 | } 11 | type MsgCacheInterface interface { 12 | IfProcessed(msgId string) bool 13 | TagProcessed(msgId string) 14 | Clear(userId string) bool 15 | } 16 | 17 | var msgService *MsgService 18 | 19 | func (u MsgService) IfProcessed(msgId string) bool { 20 | _, found := u.cache.Get(msgId) 21 | return found 22 | } 23 | func (u MsgService) TagProcessed(msgId string) { 24 | u.cache.Set(msgId, true, time.Minute*30) 25 | } 26 | 27 | func (u MsgService) Clear(userId string) bool { 28 | u.cache.Delete(userId) 29 | return true 30 | } 31 | 32 | func GetMsgCache() MsgCacheInterface { 33 | if msgService == nil { 34 | msgService = &MsgService{cache: cache.New(30*time.Minute, 30*time.Minute)} 35 | } 36 | return msgService 37 | } 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Go template 2 | # If you prefer the allow list template instead of the deny list, see community template: 3 | # https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore 4 | # 5 | # Binaries for programs and plugins 6 | *.exe 7 | *.exe~ 8 | *.dll 9 | *.so 10 | *.dylib 11 | 12 | # Test binary, built with `go test -c` 13 | *.test 14 | 15 | # Output of the go coverage tool, specifically when used with LiteIDE 16 | *.out 17 | 18 | # Dependency directories (remove the comment below to include it) 19 | # vendor/ 20 | 21 | # Go workspace file 22 | go.work 23 | ./code/target 24 | .idea 25 | .vscode 26 | .s 27 | 28 | config.yaml 29 | 30 | 31 | 32 | /code/target/ 33 | start-feishubot 34 | .env 35 | 36 | docker.md 37 | # Mac OS 38 | .DS_Store 39 | **/.DS_Store 40 | *.pem 41 | 42 | pkg 43 | # 方便在本地放一些不用上传给 github 的文档,只需要在本地建立一个 ignore 文件夹,然后把需要忽略的文件放进去就行了 44 | ignore 45 | logs -------------------------------------------------------------------------------- /code/utils/strings.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "strings" 4 | 5 | func CutPrefix(s, prefix string) (string, bool) { 6 | if strings.HasPrefix(s, prefix) { 7 | return strings.TrimPrefix(s, prefix), true 8 | } 9 | return s, false 10 | } 11 | 12 | func EitherCutPrefix(s string, prefix ...string) (string, bool) { 13 | // 任一前缀匹配则返回剩余部分 14 | for _, p := range prefix { 15 | if strings.HasPrefix(s, p) { 16 | return strings.TrimPrefix(s, p), true 17 | } 18 | } 19 | return s, false 20 | } 21 | 22 | // trim space and equal 23 | func TrimEqual(s, prefix string) (string, bool) { 24 | if strings.TrimSpace(s) == prefix { 25 | return "", true 26 | } 27 | return s, false 28 | } 29 | 30 | func EitherTrimEqual(s string, prefix ...string) (string, bool) { 31 | // 任一前缀匹配则返回剩余部分 32 | for _, p := range prefix { 33 | if strings.TrimSpace(s) == p { 34 | return "", true 35 | } 36 | } 37 | return s, false 38 | } 39 | -------------------------------------------------------------------------------- /code/services/chatgpt/tokenizer_test.go: -------------------------------------------------------------------------------- 1 | package chatgpt 2 | 3 | import "testing" 4 | 5 | func TestCalcTokenLength(t *testing.T) { 6 | type args struct { 7 | text string 8 | } 9 | tests := []struct { 10 | name string 11 | args args 12 | want int 13 | }{ 14 | { 15 | name: "eng", 16 | args: args{ 17 | text: "hello world", 18 | }, 19 | want: 2, 20 | }, 21 | { 22 | name: "cn", 23 | args: args{ 24 | text: "我和我的祖国", 25 | }, 26 | want: 13, 27 | }, 28 | { 29 | name: "empty", 30 | args: args{ 31 | text: "", 32 | }, 33 | want: 0, 34 | }, 35 | { 36 | name: "empty", 37 | args: args{ 38 | text: " ", 39 | }, 40 | want: 0, 41 | }, 42 | } 43 | for _, tt := range tests { 44 | t.Run(tt.name, func(t *testing.T) { 45 | if got := CalcTokenLength(tt.args.text); got != tt.want { 46 | t.Errorf("CalcTokenLength() = %v, want %v", got, tt.want) 47 | } 48 | }) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /code/services/chatgpt/check.go: -------------------------------------------------------------------------------- 1 | package chatgpt 2 | 3 | import ( 4 | "errors" 5 | "github.com/sashabaranov/go-openai" 6 | ) 7 | 8 | const ( 9 | ChatMessageRoleSystem = "system" 10 | ChatMessageRoleUser = "user" 11 | ChatMessageRoleAssistant = "assistant" 12 | ) 13 | 14 | func CheckChatCompletionMessages(messages []openai.ChatCompletionMessage) error { 15 | hasSystemMsg := false 16 | for _, msg := range messages { 17 | if msg.Role != ChatMessageRoleSystem && msg.Role != ChatMessageRoleUser && msg.Role != ChatMessageRoleAssistant { 18 | return errors.New("invalid message role") 19 | } 20 | if msg.Role == ChatMessageRoleSystem { 21 | if hasSystemMsg { 22 | return errors.New("more than one system message") 23 | } 24 | hasSystemMsg = true 25 | } else { 26 | // 对于非 system 角色的消息,Content 不能为空 27 | if msg.Content == "" { 28 | return errors.New("empty content in non-system message") 29 | } 30 | } 31 | } 32 | return nil 33 | } 34 | -------------------------------------------------------------------------------- /code/handlers/card_common_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "fmt" 7 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 8 | ) 9 | 10 | type CardHandlerMeta func(cardMsg CardMsg, m MessageHandler) CardHandlerFunc 11 | 12 | type CardHandlerFunc func(ctx context.Context, cardAction *larkcard.CardAction) ( 13 | interface{}, error) 14 | 15 | var ErrNextHandler = fmt.Errorf("next handler") 16 | 17 | func NewCardHandler(m MessageHandler) CardHandlerFunc { 18 | handlers := []CardHandlerMeta{ 19 | NewClearCardHandler, 20 | NewPicResolutionHandler, 21 | NewPicTextMoreHandler, 22 | NewPicModeChangeHandler, 23 | NewRoleTagCardHandler, 24 | NewRoleCardHandler, 25 | } 26 | 27 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 28 | var cardMsg CardMsg 29 | actionValue := cardAction.Action.Value 30 | actionValueJson, _ := json.Marshal(actionValue) 31 | json.Unmarshal(actionValueJson, &cardMsg) 32 | //pp.Println(cardMsg) 33 | for _, handler := range handlers { 34 | h := handler(cardMsg, m) 35 | i, err := h(ctx, cardAction) 36 | if err == ErrNextHandler { 37 | continue 38 | } 39 | return i, err 40 | } 41 | return nil, nil 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /code/utils/audio/ogg.go: -------------------------------------------------------------------------------- 1 | package audio 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "io" 7 | "os" 8 | 9 | "github.com/pion/opus" 10 | "github.com/pion/opus/pkg/oggreader" 11 | ) 12 | 13 | func OggToWavByPath(ogg string, wav string) error { 14 | input, err := os.Open(ogg) 15 | if err != nil { 16 | return err 17 | } 18 | defer input.Close() 19 | 20 | output, err := os.Create(wav) 21 | if err != nil { 22 | return err 23 | } 24 | 25 | defer output.Close() 26 | return OggToWav(input, output) 27 | } 28 | 29 | func OggToWav(input io.Reader, output io.WriteSeeker) error { 30 | ogg, _, err := oggreader.NewWith(input) 31 | if err != nil { 32 | return err 33 | } 34 | 35 | out := make([]byte, 1920) 36 | 37 | decoder := opus.NewDecoder() 38 | encoder := NewEncoder(output, 44100, 16) 39 | 40 | for { 41 | segments, _, err := ogg.ParseNextPage() 42 | if errors.Is(err, io.EOF) { 43 | break 44 | } else if bytes.HasPrefix(segments[0], []byte("OpusTags")) { 45 | continue 46 | } 47 | 48 | if err != nil { 49 | panic(err) 50 | } 51 | 52 | for i := range segments { 53 | if _, _, err = decoder.Decode(segments[i], out); err != nil { 54 | panic(err) 55 | } 56 | encoder.Write(out) 57 | } 58 | } 59 | encoder.Close() 60 | return nil 61 | } 62 | -------------------------------------------------------------------------------- /code/handlers/card_clear_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 6 | "start-feishubot/services" 7 | ) 8 | 9 | func NewClearCardHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc { 10 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 11 | if cardMsg.Kind == ClearCardKind { 12 | newCard, err, done := CommonProcessClearCache(cardMsg, m.sessionCache) 13 | if done { 14 | return newCard, err 15 | } 16 | return nil, nil 17 | } 18 | return nil, ErrNextHandler 19 | } 20 | } 21 | 22 | func CommonProcessClearCache(cardMsg CardMsg, session services.SessionServiceCacheInterface) ( 23 | interface{}, error, bool) { 24 | if cardMsg.Value == "1" { 25 | session.Clear(cardMsg.SessionId) 26 | newCard, _ := newSendCard( 27 | withHeader("️🆑 机器人提醒", larkcard.TemplateGrey), 28 | withMainMd("已删除此话题的上下文信息"), 29 | withNote("我们可以开始一个全新的话题,继续找我聊天吧"), 30 | ) 31 | //fmt.Printf("session: %v", newCard) 32 | return newCard, nil, true 33 | } 34 | if cardMsg.Value == "0" { 35 | newCard, _ := newSendCard( 36 | withHeader("️🆑 机器人提醒", larkcard.TemplateGreen), 37 | withMainMd("依旧保留此话题的上下文信息"), 38 | withNote("我们可以继续探讨这个话题,期待和您聊天。如果您有其他问题或者想要讨论的话题,请告诉我哦"), 39 | ) 40 | return newCard, nil, true 41 | } 42 | return nil, nil, false 43 | } 44 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.3' 2 | services: 3 | feishu-chatgpt: 4 | container_name: Feishu-OpenAI-Stream-Chatbot 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | ports: 9 | - "9000:9000/tcp" 10 | volumes: 11 | # - ./code/config.yaml:/app/config.yaml:ro 12 | # 要注意,这里右边的容器内的路径,不是从根目录开始的,要参考 dockerfile 中的 WORKDIR 13 | - ./logs:/app/logs 14 | environment: 15 | ################ 以下配置建议和 config.example.yaml 里面的配置综合起来看 ################ 16 | # 日志配置, 默认不开启, 可以开启后查看日志 17 | - ENABLE_LOG=false 18 | - APP_ID=cli_axxx 19 | - APP_SECRET=xxx 20 | - APP_ENCRYPT_KEY=xxx 21 | - APP_VERIFICATION_TOKEN=xxx 22 | # 请确保和飞书应用管理平台中的设置一致 23 | - BOT_NAME=xxx 24 | # OpenAI API Key 支持负载均衡, 可以填写多个 Key 用逗号分隔 25 | - OPENAI_KEY=sk-xxx,sk-xxx,sk-xxx 26 | # 服务器配置 27 | - HTTP_PORT=9000 28 | - HTTPS_PORT=9001 29 | - USE_HTTPS=false 30 | - CERT_FILE=cert.pem 31 | - KEY_FILE=key.pem 32 | # OpenAI 地址, 一般不需要修改, 除非你有自己的反向代理 33 | - API_URL=https://api.openai.com 34 | # 代理设置, 例如 - HTTP_PROXY=http://127.0.0.1:7890, 默认代表不使用代理 35 | - HTTP_PROXY 36 | ## 访问控制 37 | # 是否启用访问控制。默认不启用。 38 | - ACCESS_CONTROL_ENABLE=false 39 | # 每个用户每天最多问多少个问题。默认为0. 配置成为小于等于0表示不限制。 40 | - ACCESS_CONTROL_MAX_COUNT_PER_USER_PER_DAY=0 41 | # 访问OpenAi的 普通 Http请求的超时时间,单位秒,不配置的话默认为 550 秒 42 | - OPENAI_HTTP_CLIENT_TIMEOUT 43 | -------------------------------------------------------------------------------- /code/services/chatgpt/gpt3_test.go: -------------------------------------------------------------------------------- 1 | package chatgpt 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "start-feishubot/initialization" 7 | "start-feishubot/services/openai" 8 | "testing" 9 | "time" 10 | ) 11 | 12 | func TestChatGPT_streamChat(t *testing.T) { 13 | // 初始化配置 14 | config := initialization.LoadConfig("../../config.yaml") 15 | 16 | // 准备测试用例 17 | testCases := []struct { 18 | msg []openai.Messages 19 | wantOutput string 20 | wantErr bool 21 | }{ 22 | { 23 | msg: []openai.Messages{ 24 | { 25 | Role: "system", 26 | Content: "从现在起你要化身职场语言大师,你需要用婉转的方式回复老板想你提出的问题,或像领导提出请求。", 27 | }, 28 | { 29 | Role: "user", 30 | Content: "领导,我想请假一天", 31 | }, 32 | }, 33 | wantOutput: "", 34 | wantErr: false, 35 | }, 36 | } 37 | 38 | // 执行测试用例 39 | for _, tc := range testCases { 40 | // 准备输入和输出 41 | responseStream := make(chan string) 42 | ctx := context.Background() 43 | c := &ChatGPT{config: config} 44 | 45 | // 启动一个协程来模拟流式聊天 46 | go func() { 47 | err := c.StreamChat(ctx, tc.msg, responseStream) 48 | if err != nil { 49 | t.Errorf("streamChat() error = %v, wantErr %v", err, tc.wantErr) 50 | } 51 | }() 52 | 53 | // 等待输出并检查是否符合预期 54 | select { 55 | case gotOutput := <-responseStream: 56 | fmt.Printf("gotOutput: %v\n", gotOutput) 57 | 58 | case <-time.After(5 * time.Second): 59 | t.Errorf("streamChat() timeout, expected output not received") 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /code/config.example.yaml: -------------------------------------------------------------------------------- 1 | # 是否启用日志。 2 | ENABLE_LOG: true 3 | # 飞书 4 | APP_ID: cli_axxx 5 | APP_SECRET: xxx 6 | APP_ENCRYPT_KEY: xxx 7 | APP_VERIFICATION_TOKEN: xxx 8 | # 请确保和飞书应用管理平台中的设置一致。这里建议直接用 Feishu-OpenAI-Stream-Chatbot 作为机器人名称,这样的话,如果你有多个bot就好区分 9 | BOT_NAME: xxx 10 | # openAI key 支持负载均衡 可以填写多个key 用逗号分隔 11 | OPENAI_KEY: sk-xxx,sk-xxx,sk-xxx 12 | # 服务器配置 13 | HTTP_PORT: 9000 14 | HTTPS_PORT: 9001 15 | USE_HTTPS: false 16 | CERT_FILE: cert.pem 17 | KEY_FILE: key.pem 18 | # openai 地址, 一般不需要修改, 除非你有自己的反向代理 19 | API_URL: https://api.openai.com 20 | # 代理设置, 例如 "http://127.0.0.1:7890", ""代表不使用代理 21 | HTTP_PROXY: "" 22 | # 访问OpenAi的 普通 Http请求的超时时间,单位秒,不配置的话默认为 550 秒 23 | OPENAI_HTTP_CLIENT_TIMEOUT: 24 | # openai 指定模型, 更多见 https://platform.openai.com/docs/models/model-endpoint-compatibility 中 /v1/chat/completions 25 | OPENAI_MODEL: gpt-3.5-turbo 26 | 27 | # AZURE OPENAI 28 | AZURE_ON: true # set to true to use Azure rather than OpenAI 29 | AZURE_API_VERSION: 2023-03-15-preview # 2023-03-15-preview or 2022-12-01 refer https://learn.microsoft.com/en-us/azure/cognitive-services/openai/reference#completions 30 | AZURE_RESOURCE_NAME: xxxx # you can find in endpoint url. Usually looks like https://{RESOURCE_NAME}.openai.azure.com 31 | AZURE_DEPLOYMENT_NAME: xxxx # usually looks like ...openai.azure.com/openai/deployments/{DEPLOYMENT_NAME}/chat/completions. 32 | AZURE_OPENAI_TOKEN: xxxx # Authentication key. We can use Azure Active Directory Authentication(TBD). 33 | 34 | ## 访问控制 35 | # 是否启用访问控制。默认不启用。 36 | ACCESS_CONTROL_ENABLE: false 37 | # 每个用户每天最多问多少个问题。默认为不限制. 配置成为小于等于0表示不限制。 38 | ACCESS_CONTROL_MAX_COUNT_PER_USER_PER_DAY: 0 39 | -------------------------------------------------------------------------------- /code/services/accesscontrol/access_control.go: -------------------------------------------------------------------------------- 1 | package accesscontrol 2 | 3 | import ( 4 | "start-feishubot/initialization" 5 | "start-feishubot/utils" 6 | "sync" 7 | ) 8 | 9 | var accessCountMap = sync.Map{} 10 | var currentDateFlag = "" 11 | 12 | /* 13 | CheckAllowAccessThenIncrement If user has accessed more than 100 times according to accessCountMap, return false. 14 | Otherwise, return true and increase the access count by 1 15 | */ 16 | func CheckAllowAccessThenIncrement(userId *string) bool { 17 | 18 | // Begin a new day, clear the accessCountMap 19 | currentDateAsString := utils.GetCurrentDateAsString() 20 | if currentDateFlag != currentDateAsString { 21 | accessCountMap = sync.Map{} 22 | currentDateFlag = currentDateAsString 23 | } 24 | 25 | if CheckAllowAccess(userId) { 26 | accessedCount, ok := accessCountMap.Load(*userId) 27 | if !ok { 28 | accessCountMap.Store(*userId, 1) 29 | } else { 30 | accessCountMap.Store(*userId, accessedCount.(int)+1) 31 | } 32 | return true 33 | } else { 34 | return false 35 | } 36 | } 37 | 38 | func CheckAllowAccess(userId *string) bool { 39 | 40 | if initialization.GetConfig().AccessControlMaxCountPerUserPerDay <= 0 { 41 | return true 42 | } 43 | 44 | accessedCount, ok := accessCountMap.Load(*userId) 45 | 46 | if !ok { 47 | accessCountMap.Store(*userId, 0) 48 | return true 49 | } 50 | 51 | // If the user has accessed more than 100 times, return false 52 | if accessedCount.(int) >= initialization.GetConfig().AccessControlMaxCountPerUserPerDay { 53 | return false 54 | } 55 | 56 | // Otherwise, return true 57 | return true 58 | } 59 | 60 | func GetCurrentDateFlag() string { 61 | return currentDateFlag 62 | } 63 | 64 | func GetAccessCountMap() *sync.Map { 65 | return &accessCountMap 66 | } 67 | -------------------------------------------------------------------------------- /s.yaml: -------------------------------------------------------------------------------- 1 | edition: 1.0.0 2 | name: feishuBot-chatGpt 3 | access: "aliyun" # 秘钥别名 4 | 5 | vars: # 全局变量 6 | region: "ap-southeast-1" 7 | 8 | services: 9 | helloworld: 10 | component: fc 11 | actions: 12 | pre-deploy: 13 | - run: go mod tidy 14 | path: ./code 15 | - run: GO111MODULE=on GOOS=linux GOARCH=amd64 CGO_ENABLED=0 go build -o 16 | target/main main.go 17 | path: ./code 18 | # post-deploy: 19 | # - run: rm -rf target 20 | # path: ./code 21 | # 配置参考https://docs.serverless-devs.com/fc/yaml/readme 22 | props: # 组件的属性值 23 | region: ${vars.region} # 关于变量的使用方法,可以参考:https://www.serverless-devs.com/serverless-devs/yaml#变量赋值 24 | service: 25 | name: "feishubot" 26 | description: "Serverless Devs Website Service" 27 | function: 28 | name: "feishu-chatgpt" 29 | description: 'a simple feishubot by serverless devs' 30 | codeUri: './code' 31 | cAPort: 9000 32 | customRuntimeConfig: 33 | command: 34 | - ./target/main 35 | environmentVariables: {} 36 | handler: index.handler 37 | instanceConcurrency: 20 38 | instanceType: e1 39 | memorySize: 128 40 | runtime: custom 41 | timeout: 120 42 | internetAccess: true 43 | triggers: 44 | - name: httpTrigger 45 | type: http 46 | config: 47 | authType: anonymous 48 | methods: 49 | - GET 50 | - POST 51 | customDomains: 52 | - domainName: auto 53 | protocol: HTTP 54 | routeConfigs: 55 | - path: /* 56 | methods: 57 | - GET 58 | - POST 59 | -------------------------------------------------------------------------------- /code/initialization/roles_load.go: -------------------------------------------------------------------------------- 1 | package initialization 2 | 3 | import ( 4 | "errors" 5 | "github.com/duke-git/lancet/v2/slice" 6 | "github.com/duke-git/lancet/v2/validator" 7 | "gopkg.in/yaml.v2" 8 | "io/ioutil" 9 | "log" 10 | ) 11 | 12 | type Role struct { 13 | Title string `yaml:"title"` 14 | Content string `yaml:"content"` 15 | Tags []string `yaml:"tags"` 16 | } 17 | 18 | var RoleList *[]Role 19 | 20 | // InitRoleList 加载Prompt 21 | func InitRoleList() *[]Role { 22 | data, err := ioutil.ReadFile("role_list.yaml") 23 | if err != nil { 24 | log.Fatal(err) 25 | } 26 | 27 | err = yaml.Unmarshal(data, &RoleList) 28 | if err != nil { 29 | log.Fatal(err) 30 | } 31 | return RoleList 32 | } 33 | 34 | func GetRoleList() *[]Role { 35 | return RoleList 36 | } 37 | func GetAllUniqueTags() *[]string { 38 | tags := make([]string, 0) 39 | for _, role := range *RoleList { 40 | tags = append(tags, role.Tags...) 41 | } 42 | result := slice.Union(tags) 43 | return &result 44 | } 45 | 46 | func GetRoleByTitle(title string) *Role { 47 | for _, role := range *RoleList { 48 | if role.Title == title { 49 | return &role 50 | } 51 | } 52 | return nil 53 | } 54 | 55 | func GetTitleListByTag(tags string) *[]string { 56 | roles := make([]string, 0) 57 | //pp.Println(RoleList) 58 | for _, role := range *RoleList { 59 | for _, roleTag := range role.Tags { 60 | if roleTag == tags && !validator.IsEmptyString(role. 61 | Title) { 62 | roles = append(roles, role.Title) 63 | } 64 | } 65 | } 66 | return &roles 67 | } 68 | 69 | func GetFirstRoleContentByTitle(title string) (string, error) { 70 | for _, role := range *RoleList { 71 | if role.Title == title { 72 | return role.Content, nil 73 | } 74 | } 75 | return "", errors.New("role not found") 76 | } 77 | -------------------------------------------------------------------------------- /code/services/openai/audio.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "io" 7 | "mime/multipart" 8 | "os" 9 | ) 10 | 11 | type AudioToTextRequestBody struct { 12 | File string `json:"file"` 13 | Model string `json:"model"` 14 | ResponseFormat string `json:"response_format"` 15 | } 16 | 17 | type AudioToTextResponseBody struct { 18 | Text string `json:"text"` 19 | } 20 | 21 | func audioMultipartForm(request AudioToTextRequestBody, w *multipart.Writer) error { 22 | f, err := os.Open(request.File) 23 | if err != nil { 24 | return fmt.Errorf("opening audio file: %w", err) 25 | } 26 | 27 | fw, err := w.CreateFormFile("file", f.Name()) 28 | if err != nil { 29 | return fmt.Errorf("creating form file: %w", err) 30 | } 31 | 32 | if _, err = io.Copy(fw, f); err != nil { 33 | return fmt.Errorf("reading from opened audio file: %w", err) 34 | } 35 | 36 | fw, err = w.CreateFormField("model") 37 | if err != nil { 38 | return fmt.Errorf("creating form field: %w", err) 39 | } 40 | 41 | modelName := bytes.NewReader([]byte(request.Model)) 42 | if _, err = io.Copy(fw, modelName); err != nil { 43 | return fmt.Errorf("writing model name: %w", err) 44 | } 45 | w.Close() 46 | 47 | return nil 48 | } 49 | 50 | func (gpt *ChatGPT) AudioToText(audio string) (string, error) { 51 | requestBody := AudioToTextRequestBody{ 52 | File: audio, 53 | Model: "whisper-1", 54 | ResponseFormat: "text", 55 | } 56 | audioToTextResponseBody := &AudioToTextResponseBody{} 57 | err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/audio/transcriptions", 58 | "POST", formVoiceDataBody, requestBody, audioToTextResponseBody) 59 | //fmt.Println(audioToTextResponseBody) 60 | if err != nil { 61 | //fmt.Println(err) 62 | return "", err 63 | } 64 | 65 | return audioToTextResponseBody.Text, nil 66 | } 67 | -------------------------------------------------------------------------------- /code/services/openai/billing.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "time" 7 | ) 8 | 9 | //https://api.openai.com/dashboard/billing/credit_grants 10 | type Billing struct { 11 | Object string `json:"object"` 12 | TotalGranted float64 `json:"total_granted"` 13 | TotalUsed float64 `json:"total_used"` 14 | TotalAvailable float64 `json:"total_available"` 15 | Grants struct { 16 | Object string `json:"object"` 17 | Data []struct { 18 | Object string `json:"object"` 19 | ID string `json:"id"` 20 | GrantAmount float64 `json:"grant_amount"` 21 | UsedAmount float64 `json:"used_amount"` 22 | EffectiveAt float64 `json:"effective_at"` 23 | ExpiresAt float64 `json:"expires_at"` 24 | } `json:"data"` 25 | } `json:"grants"` 26 | } 27 | 28 | type BalanceResponse struct { 29 | TotalGranted float64 `json:"total_granted"` 30 | TotalUsed float64 `json:"total_used"` 31 | TotalAvailable float64 `json:"total_available"` 32 | EffectiveAt time.Time `json:"effective_at"` 33 | ExpiresAt time.Time `json:"expires_at"` 34 | } 35 | 36 | func (gpt *ChatGPT) GetBalance() (*BalanceResponse, error) { 37 | var data Billing 38 | err := gpt.sendRequestWithBodyType( 39 | gpt.ApiUrl+"/dashboard/billing/credit_grants", 40 | http.MethodGet, 41 | nilBody, 42 | nil, 43 | &data, 44 | ) 45 | if err != nil { 46 | return nil, fmt.Errorf("failed to get billing data: %v", err) 47 | } 48 | 49 | balance := &BalanceResponse{ 50 | TotalGranted: data.TotalGranted, 51 | TotalUsed: data.TotalUsed, 52 | TotalAvailable: data.TotalAvailable, 53 | ExpiresAt: time.Now(), 54 | EffectiveAt: time.Now(), 55 | } 56 | 57 | if len(data.Grants.Data) > 0 { 58 | balance.EffectiveAt = time.Unix(int64(data.Grants.Data[0].EffectiveAt), 0) 59 | balance.ExpiresAt = time.Unix(int64(data.Grants.Data[0].ExpiresAt), 0) 60 | } 61 | 62 | return balance, nil 63 | } 64 | -------------------------------------------------------------------------------- /code/handlers/init.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | "start-feishubot/initialization" 6 | "start-feishubot/services/openai" 7 | 8 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 9 | larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1" 10 | ) 11 | 12 | type MessageHandlerInterface interface { 13 | msgReceivedHandler(ctx context.Context, event *larkim.P2MessageReceiveV1) error 14 | cardHandler(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) 15 | } 16 | 17 | type HandlerType string 18 | 19 | const ( 20 | GroupHandler = "group" 21 | UserHandler = "personal" 22 | ) 23 | 24 | // handlers 所有消息类型类型的处理器 25 | var handlers MessageHandlerInterface 26 | 27 | func InitHandlers(gpt *openai.ChatGPT, config initialization.Config) { 28 | handlers = NewMessageHandler(gpt, config) 29 | } 30 | 31 | func Handler(ctx context.Context, event *larkim.P2MessageReceiveV1) error { 32 | return handlers.msgReceivedHandler(ctx, event) 33 | } 34 | 35 | func ReadHandler(ctx context.Context, event *larkim.P2MessageReadV1) error { 36 | _ = event.Event.Reader.ReaderId.OpenId 37 | //fmt.Printf("msg is read by : %v \n", *readerId) 38 | return nil 39 | } 40 | 41 | func CardHandler() func(ctx context.Context, 42 | cardAction *larkcard.CardAction) (interface{}, error) { 43 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 44 | //handlerType := judgeCardType(cardAction) 45 | return handlers.cardHandler(ctx, cardAction) 46 | } 47 | } 48 | 49 | func judgeCardType(cardAction *larkcard.CardAction) HandlerType { 50 | actionValue := cardAction.Action.Value 51 | chatType := actionValue["chatType"] 52 | //fmt.Printf("chatType: %v", chatType) 53 | if chatType == "group" { 54 | return GroupHandler 55 | } 56 | if chatType == "personal" { 57 | return UserHandler 58 | } 59 | return "otherChat" 60 | } 61 | 62 | func judgeChatType(event *larkim.P2MessageReceiveV1) HandlerType { 63 | chatType := event.Event.Message.ChatType 64 | if *chatType == "group" { 65 | return GroupHandler 66 | } 67 | if *chatType == "p2p" { 68 | return UserHandler 69 | } 70 | return "otherChat" 71 | } 72 | -------------------------------------------------------------------------------- /code/initialization/gin.go: -------------------------------------------------------------------------------- 1 | package initialization 2 | 3 | import ( 4 | "crypto/tls" 5 | "crypto/x509" 6 | "fmt" 7 | "log" 8 | "net/http" 9 | "time" 10 | 11 | "github.com/gin-gonic/gin" 12 | ) 13 | 14 | func loadCertificate(config Config) (cert tls.Certificate, err error) { 15 | cert, err = tls.LoadX509KeyPair(config.CertFile, config.KeyFile) 16 | if err != nil { 17 | return cert, fmt.Errorf("failed to load certificate: %v", err) 18 | } 19 | 20 | // check certificate expiry 21 | if len(cert.Certificate) == 0 { 22 | return cert, fmt.Errorf("no certificates found in %s", config.CertFile) 23 | } 24 | parsedCert, err := x509.ParseCertificate(cert.Certificate[0]) 25 | if err != nil { 26 | return cert, fmt.Errorf("failed to parse certificate: %v", err) 27 | } 28 | cert.Leaf = parsedCert 29 | certExpiry := cert.Leaf.NotAfter 30 | if certExpiry.Before(time.Now()) { 31 | return cert, fmt.Errorf("certificate expired on %v", certExpiry) 32 | } 33 | 34 | return cert, nil 35 | } 36 | 37 | func startHTTPServer(config Config, r *gin.Engine) (err error) { 38 | log.Printf("http server started: http://localhost:%d/webhook/event\n", config.HttpPort) 39 | err = r.Run(fmt.Sprintf(":%d", config.HttpPort)) 40 | if err != nil { 41 | return fmt.Errorf("failed to start http server: %v", err) 42 | } 43 | return nil 44 | } 45 | func startHTTPSServer(config Config, r *gin.Engine) (err error) { 46 | cert, err := loadCertificate(config) 47 | if err != nil { 48 | return fmt.Errorf("failed to load certificate: %v", err) 49 | } 50 | server := &http.Server{ 51 | Addr: fmt.Sprintf(":%d", config.HttpsPort), 52 | Handler: r, 53 | TLSConfig: &tls.Config{ 54 | Certificates: []tls.Certificate{cert}, 55 | }, 56 | } 57 | fmt.Printf("https server started: https://localhost:%d/webhook/event\n", config.HttpsPort) 58 | err = server.ListenAndServeTLS("", "") 59 | if err != nil { 60 | return fmt.Errorf("failed to start https server: %v", err) 61 | } 62 | return nil 63 | } 64 | func StartServer(config Config, r *gin.Engine) (err error) { 65 | if config.UseHttps { 66 | err = startHTTPSServer(config, r) 67 | } else { 68 | err = startHTTPServer(config, r) 69 | } 70 | return err 71 | } 72 | -------------------------------------------------------------------------------- /code/services/openai/gpt3.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | const ( 8 | maxTokens = 2000 9 | temperature = 0.7 10 | ) 11 | 12 | type Messages struct { 13 | Role string `json:"role"` 14 | Content string `json:"content"` 15 | } 16 | 17 | // ChatGPTResponseBody 请求体 18 | type ChatGPTResponseBody struct { 19 | ID string `json:"id"` 20 | Object string `json:"object"` 21 | Created int `json:"created"` 22 | Model string `json:"model"` 23 | Choices []ChatGPTChoiceItem `json:"choices"` 24 | Usage map[string]interface{} `json:"usage"` 25 | } 26 | type ChatGPTChoiceItem struct { 27 | Message Messages `json:"message"` 28 | Index int `json:"index"` 29 | FinishReason string `json:"finish_reason"` 30 | } 31 | 32 | // ChatGPTRequestBody 响应体 33 | type ChatGPTRequestBody struct { 34 | Model string `json:"model"` 35 | Messages []Messages `json:"messages"` 36 | MaxTokens int `json:"max_tokens"` 37 | Temperature float32 `json:"temperature"` 38 | TopP int `json:"top_p"` 39 | FrequencyPenalty int `json:"frequency_penalty"` 40 | PresencePenalty int `json:"presence_penalty"` 41 | Stream bool `json:"stream" default:"false"` 42 | } 43 | 44 | func (gpt *ChatGPT) Completions(msg []Messages) (resp Messages, 45 | err error) { 46 | requestBody := ChatGPTRequestBody{ 47 | Model: gpt.ApiModel, 48 | Messages: msg, 49 | MaxTokens: maxTokens, 50 | Temperature: temperature, 51 | TopP: 1, 52 | FrequencyPenalty: 0, 53 | PresencePenalty: 0, 54 | } 55 | gptResponseBody := &ChatGPTResponseBody{} 56 | url := gpt.FullUrl("chat/completions") 57 | //fmt.Println(url) 58 | if url == "" { 59 | return resp, errors.New("无法获取openai请求地址") 60 | } 61 | err = gpt.sendRequestWithBodyType(url, "POST", jsonBody, requestBody, gptResponseBody) 62 | if err == nil && len(gptResponseBody.Choices) > 0 { 63 | resp = gptResponseBody.Choices[0].Message 64 | } else { 65 | resp = Messages{} 66 | err = errors.New("openai 请求失败") 67 | } 68 | return resp, err 69 | } 70 | -------------------------------------------------------------------------------- /code/handlers/card_role_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 6 | "start-feishubot/initialization" 7 | "start-feishubot/services" 8 | "start-feishubot/services/openai" 9 | ) 10 | 11 | func NewRoleTagCardHandler(cardMsg CardMsg, 12 | m MessageHandler) CardHandlerFunc { 13 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 14 | 15 | if cardMsg.Kind == RoleTagsChooseKind { 16 | newCard, err, done := CommonProcessRoleTag(cardMsg, cardAction, 17 | m.sessionCache) 18 | if done { 19 | return newCard, err 20 | } 21 | return nil, nil 22 | } 23 | return nil, ErrNextHandler 24 | } 25 | } 26 | 27 | func NewRoleCardHandler(cardMsg CardMsg, 28 | m MessageHandler) CardHandlerFunc { 29 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 30 | 31 | if cardMsg.Kind == RoleChooseKind { 32 | newCard, err, done := CommonProcessRole(cardMsg, cardAction, 33 | m.sessionCache) 34 | if done { 35 | return newCard, err 36 | } 37 | return nil, nil 38 | } 39 | return nil, ErrNextHandler 40 | } 41 | } 42 | 43 | func CommonProcessRoleTag(msg CardMsg, cardAction *larkcard.CardAction, 44 | cache services.SessionServiceCacheInterface) (interface{}, 45 | error, bool) { 46 | option := cardAction.Action.Option 47 | //replyMsg(context.Background(), "已选择tag:"+option, 48 | // &msg.MsgId) 49 | roles := initialization.GetTitleListByTag(option) 50 | //fmt.Printf("roles: %s", roles) 51 | SendRoleListCard(context.Background(), &msg.SessionId, 52 | &msg.MsgId, option, *roles) 53 | return nil, nil, true 54 | } 55 | 56 | func CommonProcessRole(msg CardMsg, cardAction *larkcard.CardAction, 57 | cache services.SessionServiceCacheInterface) (interface{}, 58 | error, bool) { 59 | option := cardAction.Action.Option 60 | contentByTitle, error := initialization.GetFirstRoleContentByTitle(option) 61 | if error != nil { 62 | return nil, error, true 63 | } 64 | cache.Clear(msg.SessionId) 65 | systemMsg := append([]openai.Messages{}, openai.Messages{ 66 | Role: "system", Content: contentByTitle, 67 | }) 68 | cache.SetMsg(msg.SessionId, systemMsg) 69 | //pp.Println("systemMsg: ", systemMsg) 70 | sendSystemInstructionCard(context.Background(), &msg.SessionId, 71 | &msg.MsgId, contentByTitle) 72 | //replyMsg(context.Background(), "已选择角色:"+contentByTitle, 73 | // &msg.MsgId) 74 | return nil, nil, true 75 | } 76 | -------------------------------------------------------------------------------- /code/services/chatgpt/gpt3.go: -------------------------------------------------------------------------------- 1 | package chatgpt 2 | 3 | import ( 4 | "context" 5 | "errors" 6 | "fmt" 7 | "github.com/sashabaranov/go-openai" 8 | "io" 9 | "start-feishubot/initialization" 10 | customOpenai "start-feishubot/services/openai" 11 | ) 12 | 13 | type Messages struct { 14 | Role string `json:"role"` 15 | Content string `json:"content"` 16 | } 17 | 18 | type ChatGPT struct { 19 | config *initialization.Config 20 | } 21 | 22 | type Gpt3 interface { 23 | StreamChat() error 24 | StreamChatWithHistory() error 25 | } 26 | 27 | func NewGpt3(config *initialization.Config) *ChatGPT { 28 | return &ChatGPT{config: config} 29 | } 30 | 31 | func (c *ChatGPT) StreamChat(ctx context.Context, 32 | msg []customOpenai.Messages, 33 | responseStream chan string) error { 34 | //change msg type from Messages to openai.ChatCompletionMessage 35 | chatMsgs := make([]openai.ChatCompletionMessage, len(msg)) 36 | for i, m := range msg { 37 | chatMsgs[i] = openai.ChatCompletionMessage{ 38 | Role: m.Role, 39 | Content: m.Content, 40 | } 41 | } 42 | return c.StreamChatWithHistory(ctx, chatMsgs, 2000, 43 | responseStream) 44 | } 45 | 46 | func (c *ChatGPT) StreamChatWithHistory(ctx context.Context, msg []openai.ChatCompletionMessage, maxTokens int, 47 | responseStream chan string, 48 | ) error { 49 | config := openai.DefaultConfig(c.config.OpenaiApiKeys[0]) 50 | config.BaseURL = c.config.OpenaiApiUrl + "/v1" 51 | 52 | proxyClient, parseProxyError := customOpenai.GetProxyClient(c.config.HttpProxy) 53 | if parseProxyError != nil { 54 | return parseProxyError 55 | } 56 | config.HTTPClient = proxyClient 57 | 58 | client := openai.NewClientWithConfig(config) 59 | //pp.Printf("client: %v", client) 60 | req := openai.ChatCompletionRequest{ 61 | Model: c.config.OpenaiModel, 62 | Messages: msg, 63 | N: 1, 64 | Temperature: 0.7, 65 | MaxTokens: maxTokens, 66 | TopP: 1, 67 | //Moderation: true, 68 | //ModerationStop: true, 69 | } 70 | stream, err := client.CreateChatCompletionStream(ctx, req) 71 | if err != nil { 72 | fmt.Errorf("CreateCompletionStream returned error: %v", err) 73 | } 74 | 75 | defer stream.Close() 76 | for { 77 | response, err := stream.Recv() 78 | if errors.Is(err, io.EOF) { 79 | //fmt.Println("Stream finished") 80 | return nil 81 | } 82 | if err != nil { 83 | fmt.Printf("Stream error: %v\n", err) 84 | return err 85 | } 86 | responseStream <- response.Choices[0].Delta.Content 87 | } 88 | return nil 89 | 90 | } 91 | -------------------------------------------------------------------------------- /code/utils/strings_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "testing" 4 | 5 | func TestEitherCutPrefix(t *testing.T) { 6 | type args struct { 7 | s string 8 | prefix []string 9 | } 10 | tests := []struct { 11 | name string 12 | args args 13 | want string 14 | want1 bool 15 | }{ 16 | { 17 | name: "Prefix match", 18 | args: args{ 19 | s: "/system bar", 20 | prefix: []string{"/system "}, 21 | }, 22 | want: "bar", 23 | want1: true, 24 | }, 25 | 26 | { 27 | name: "Prefix match", 28 | args: args{ 29 | s: "扮演 bar", 30 | prefix: []string{"扮演 "}, 31 | }, 32 | want: "bar", 33 | want1: true, 34 | }, 35 | } 36 | for _, tt := range tests { 37 | t.Run(tt.name, func(t *testing.T) { 38 | got, got1 := EitherCutPrefix(tt.args.s, tt.args.prefix...) 39 | if got != tt.want { 40 | t.Errorf("EitherCutPrefix() got = %v, want %v", got, tt.want) 41 | } 42 | if got1 != tt.want1 { 43 | t.Errorf("EitherCutPrefix() got1 = %v, want %v", got1, tt.want1) 44 | } 45 | }) 46 | } 47 | } 48 | 49 | func TestEitherTrimEqual(t *testing.T) { 50 | type args struct { 51 | s string 52 | prefix []string 53 | } 54 | tests := []struct { 55 | name string 56 | args args 57 | want string 58 | want1 bool 59 | }{ 60 | { 61 | name: "Prefix match", 62 | args: args{ 63 | s: "清除", 64 | prefix: []string{"清除"}, 65 | }, 66 | want: "", 67 | want1: true, 68 | }, 69 | { 70 | name: "Prefix match", 71 | args: args{ 72 | s: " /clear ", 73 | prefix: []string{"清除", "/clear"}, 74 | }, 75 | want: "", 76 | want1: true, 77 | }, 78 | { 79 | name: "Prefix match", 80 | args: args{ 81 | s: " 清除 ", 82 | prefix: []string{"清除", "/clear"}, 83 | }, 84 | want: "", 85 | want1: true, 86 | }, 87 | { 88 | name: "Prefix match", 89 | args: args{ 90 | s: " clear ", 91 | prefix: []string{"清除", "/clear"}, 92 | }, 93 | want: " clear ", 94 | want1: false, 95 | }, 96 | } 97 | 98 | for _, tt := range tests { 99 | t.Run(tt.name, func(t *testing.T) { 100 | got, got1 := EitherTrimEqual(tt.args.s, tt.args.prefix...) 101 | if got != tt.want { 102 | t.Errorf("EitherTrimEqual() got = %v, want %v", got, tt.want) 103 | } 104 | if got1 != tt.want1 { 105 | t.Errorf("EitherTrimEqual() got1 = %v, want %v", got1, tt.want1) 106 | } 107 | }) 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /code/services/loadbalancer/loadbalancer.go: -------------------------------------------------------------------------------- 1 | package loadbalancer 2 | 3 | import ( 4 | "fmt" 5 | "math/rand" 6 | "sync" 7 | "time" 8 | ) 9 | 10 | type API struct { 11 | Key string 12 | Times uint32 13 | Available bool 14 | } 15 | 16 | type LoadBalancer struct { 17 | apis []*API 18 | mu sync.RWMutex 19 | } 20 | 21 | func NewLoadBalancer(keys []string) *LoadBalancer { 22 | lb := &LoadBalancer{} 23 | for _, key := range keys { 24 | lb.apis = append(lb.apis, &API{Key: key}) 25 | } 26 | //SetAvailabilityForAll true 27 | lb.SetAvailabilityForAll(true) 28 | return lb 29 | } 30 | 31 | func (lb *LoadBalancer) GetAPI() *API { 32 | lb.mu.RLock() 33 | defer lb.mu.RUnlock() 34 | 35 | var availableAPIs []*API 36 | for _, api := range lb.apis { 37 | if api.Available { 38 | availableAPIs = append(availableAPIs, api) 39 | } 40 | } 41 | if len(availableAPIs) == 0 { 42 | //随机复活一个 43 | fmt.Printf("No available API, revive one randomly\n") 44 | rand.Seed(time.Now().UnixNano()) 45 | index := rand.Intn(len(lb.apis)) 46 | lb.apis[index].Available = true 47 | return lb.apis[index] 48 | } 49 | 50 | selectedAPI := availableAPIs[0] 51 | minTimes := selectedAPI.Times 52 | for _, api := range availableAPIs { 53 | if api.Times < minTimes { 54 | selectedAPI = api 55 | minTimes = api.Times 56 | } 57 | } 58 | selectedAPI.Times++ 59 | //fmt.Printf("API Availability:\n") 60 | //for _, api := range lb.apis { 61 | // fmt.Printf("%s: %v\n", api.Key, api.Available) 62 | // fmt.Printf("%s: %d\n", api.Key, api.Times) 63 | //} 64 | 65 | return selectedAPI 66 | } 67 | func (lb *LoadBalancer) SetAvailability(key string, available bool) { 68 | lb.mu.Lock() 69 | defer lb.mu.Unlock() 70 | 71 | for _, api := range lb.apis { 72 | if api.Key == key { 73 | api.Available = available 74 | return 75 | } 76 | } 77 | } 78 | 79 | func (lb *LoadBalancer) RegisterAPI(key string) { 80 | lb.mu.Lock() 81 | defer lb.mu.Unlock() 82 | 83 | if lb.apis == nil { 84 | lb.apis = make([]*API, 0) 85 | } 86 | 87 | lb.apis = append(lb.apis, &API{Key: key}) 88 | } 89 | 90 | func (lb *LoadBalancer) SetAvailabilityForAll(available bool) { 91 | lb.mu.Lock() 92 | defer lb.mu.Unlock() 93 | 94 | for _, api := range lb.apis { 95 | api.Available = available 96 | } 97 | } 98 | 99 | func (lb *LoadBalancer) GetAPIs() []*API { 100 | lb.mu.RLock() 101 | defer lb.mu.RUnlock() 102 | 103 | apis := make([]*API, len(lb.apis)) 104 | copy(apis, lb.apis) 105 | return apis 106 | } 107 | -------------------------------------------------------------------------------- /code/handlers/common.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "regexp" 7 | "strconv" 8 | "strings" 9 | ) 10 | 11 | // func sendCard 12 | func msgFilter(msg string) string { 13 | //replace @到下一个非空的字段 为 '' 14 | regex := regexp.MustCompile(`@[^ ]*`) 15 | return regex.ReplaceAllString(msg, "") 16 | 17 | } 18 | func parseContent(content string) string { 19 | //"{\"text\":\"@_user_1 hahaha\"}", 20 | //only get text content hahaha 21 | var contentMap map[string]interface{} 22 | err := json.Unmarshal([]byte(content), &contentMap) 23 | if err != nil { 24 | fmt.Println(err) 25 | } 26 | if contentMap["text"] == nil { 27 | return "" 28 | } 29 | text := contentMap["text"].(string) 30 | return msgFilter(text) 31 | } 32 | func processMessage(msg interface{}) (string, error) { 33 | msg = strings.TrimSpace(msg.(string)) 34 | msgB, err := json.Marshal(msg) 35 | if err != nil { 36 | return "", err 37 | } 38 | 39 | msgStr := string(msgB) 40 | 41 | if len(msgStr) >= 2 { 42 | msgStr = msgStr[1 : len(msgStr)-1] 43 | } 44 | return msgStr, nil 45 | } 46 | 47 | func processNewLine(msg string) string { 48 | return strings.Replace(msg, "\\n", ` 49 | `, -1) 50 | } 51 | 52 | func processQuote(msg string) string { 53 | return strings.Replace(msg, "\\\"", "\"", -1) 54 | } 55 | 56 | // 将字符中 \u003c 替换为 < 等等 57 | func processUnicode(msg string) string { 58 | regex := regexp.MustCompile(`\\u[0-9a-fA-F]{4}`) 59 | return regex.ReplaceAllStringFunc(msg, func(s string) string { 60 | r, _ := regexp.Compile(`\\u`) 61 | s = r.ReplaceAllString(s, "") 62 | i, _ := strconv.ParseInt(s, 16, 32) 63 | return string(rune(i)) 64 | }) 65 | } 66 | 67 | func cleanTextBlock(msg string) string { 68 | msg = processNewLine(msg) 69 | msg = processUnicode(msg) 70 | msg = processQuote(msg) 71 | return msg 72 | } 73 | 74 | func parseFileKey(content string) string { 75 | var contentMap map[string]interface{} 76 | err := json.Unmarshal([]byte(content), &contentMap) 77 | if err != nil { 78 | fmt.Println(err) 79 | return "" 80 | } 81 | if contentMap["file_key"] == nil { 82 | return "" 83 | } 84 | fileKey := contentMap["file_key"].(string) 85 | return fileKey 86 | } 87 | 88 | func parseImageKey(content string) string { 89 | var contentMap map[string]interface{} 90 | err := json.Unmarshal([]byte(content), &contentMap) 91 | if err != nil { 92 | fmt.Println(err) 93 | return "" 94 | } 95 | if contentMap["image_key"] == nil { 96 | return "" 97 | } 98 | imageKey := contentMap["image_key"].(string) 99 | return imageKey 100 | } 101 | -------------------------------------------------------------------------------- /code/go.mod: -------------------------------------------------------------------------------- 1 | module start-feishubot 2 | 3 | go 1.18 4 | 5 | require github.com/larksuite/oapi-sdk-go/v3 v3.0.14 6 | 7 | require ( 8 | github.com/duke-git/lancet/v2 v2.1.17 9 | github.com/gin-gonic/gin v1.8.2 10 | github.com/google/uuid v1.3.0 11 | github.com/k0kubun/pp/v3 v3.2.0 12 | github.com/larksuite/oapi-sdk-gin v1.0.0 13 | github.com/pandodao/tokenizer-go v0.2.0 14 | github.com/patrickmn/go-cache v2.1.0+incompatible 15 | github.com/pion/opus v0.0.0-20230123082803-1052c3e89e58 16 | github.com/sashabaranov/go-openai v1.7.0 17 | github.com/spf13/pflag v1.0.5 18 | github.com/spf13/viper v1.14.0 19 | gopkg.in/yaml.v2 v2.4.0 20 | ) 21 | 22 | require ( 23 | github.com/dlclark/regexp2 v1.8.1 // indirect 24 | github.com/dop251/goja v0.0.0-20230304130813-e2f543bf4b4c // indirect 25 | github.com/dop251/goja_nodejs v0.0.0-20230226152057-060fa99b809f // indirect 26 | github.com/fsnotify/fsnotify v1.6.0 // indirect 27 | github.com/gin-contrib/sse v0.1.0 // indirect 28 | github.com/go-playground/locales v0.14.1 // indirect 29 | github.com/go-playground/universal-translator v0.18.0 // indirect 30 | github.com/go-playground/validator/v10 v10.11.1 // indirect 31 | github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect 32 | github.com/goccy/go-json v0.10.0 // indirect 33 | github.com/google/pprof v0.0.0-20230309165930-d61513b1440d // indirect 34 | github.com/hashicorp/hcl v1.0.0 // indirect 35 | github.com/json-iterator/go v1.1.12 // indirect 36 | github.com/leodido/go-urn v1.2.1 // indirect 37 | github.com/magiconair/properties v1.8.7 // indirect 38 | github.com/mattn/go-colorable v0.1.13 // indirect 39 | github.com/mattn/go-isatty v0.0.17 // indirect 40 | github.com/mitchellh/mapstructure v1.5.0 // indirect 41 | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect 42 | github.com/modern-go/reflect2 v1.0.2 // indirect 43 | github.com/pelletier/go-toml v1.9.5 // indirect 44 | github.com/pelletier/go-toml/v2 v2.0.6 // indirect 45 | github.com/spf13/afero v1.9.3 // indirect 46 | github.com/spf13/cast v1.5.0 // indirect 47 | github.com/spf13/jwalterweatherman v1.1.0 // indirect 48 | github.com/subosito/gotenv v1.4.1 // indirect 49 | github.com/ugorji/go/codec v1.2.8 // indirect 50 | golang.org/x/crypto v0.5.0 // indirect 51 | golang.org/x/exp v0.0.0-20221208152030-732eee02a75a // indirect 52 | golang.org/x/net v0.5.0 // indirect 53 | golang.org/x/sys v0.5.0 // indirect 54 | golang.org/x/text v0.8.0 // indirect 55 | google.golang.org/protobuf v1.28.1 // indirect 56 | gopkg.in/ini.v1 v1.67.0 // indirect 57 | gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect 58 | gopkg.in/yaml.v3 v3.0.1 // indirect 59 | ) 60 | -------------------------------------------------------------------------------- /code/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "fmt" 7 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 8 | larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1" 9 | "gopkg.in/natefinch/lumberjack.v2" 10 | "io" 11 | "log" 12 | "os" 13 | "start-feishubot/handlers" 14 | "start-feishubot/initialization" 15 | "start-feishubot/services/openai" 16 | "start-feishubot/utils" 17 | 18 | "github.com/gin-gonic/gin" 19 | "github.com/spf13/pflag" 20 | 21 | sdkginext "github.com/larksuite/oapi-sdk-gin" 22 | 23 | "github.com/larksuite/oapi-sdk-go/v3/event/dispatcher" 24 | ) 25 | 26 | func main() { 27 | initialization.InitRoleList() 28 | pflag.Parse() 29 | globalConfig := initialization.GetConfig() 30 | 31 | // 打印一下实际读取到的配置 32 | globalConfigPrettyString, _ := json.MarshalIndent(globalConfig, "", " ") 33 | log.Println(string(globalConfigPrettyString)) 34 | 35 | initialization.LoadLarkClient(*globalConfig) 36 | gpt := openai.NewChatGPT(*globalConfig) 37 | handlers.InitHandlers(gpt, *globalConfig) 38 | 39 | if globalConfig.EnableLog { 40 | logger := enableLog() 41 | defer utils.CloseLogger(logger) 42 | } 43 | 44 | eventHandler := dispatcher.NewEventDispatcher( 45 | globalConfig.FeishuAppVerificationToken, globalConfig.FeishuAppEncryptKey). 46 | OnP2MessageReceiveV1(handlers.Handler). 47 | OnP2MessageReadV1(func(ctx context.Context, event *larkim.P2MessageReadV1) error { 48 | return handlers.ReadHandler(ctx, event) 49 | }) 50 | 51 | cardHandler := larkcard.NewCardActionHandler( 52 | globalConfig.FeishuAppVerificationToken, globalConfig.FeishuAppEncryptKey, 53 | handlers.CardHandler()) 54 | 55 | r := gin.Default() 56 | r.GET("/ping", func(c *gin.Context) { 57 | c.JSON(200, gin.H{ 58 | "message": "pong", 59 | }) 60 | }) 61 | r.POST("/webhook/event", 62 | sdkginext.NewEventHandlerFunc(eventHandler)) 63 | r.POST("/webhook/card", 64 | sdkginext.NewCardActionHandlerFunc( 65 | cardHandler)) 66 | 67 | err := initialization.StartServer(*globalConfig, r) 68 | if err != nil { 69 | log.Fatalf("failed to start server: %v", err) 70 | } 71 | 72 | } 73 | 74 | func enableLog() *lumberjack.Logger { 75 | // Set up the logger 76 | var logger *lumberjack.Logger 77 | 78 | logger = &lumberjack.Logger{ 79 | Filename: "logs/app.log", 80 | MaxSize: 100, // megabytes 81 | MaxAge: 365 * 10, // days 82 | } 83 | 84 | fmt.Printf("logger %T\n", logger) 85 | 86 | // Set up the logger to write to both file and console 87 | log.SetOutput(io.MultiWriter(logger, os.Stdout)) 88 | log.SetFlags(log.Ldate | log.Ltime) 89 | 90 | // Write some log messages 91 | log.Println("Starting application...") 92 | 93 | return logger 94 | } 95 | -------------------------------------------------------------------------------- /code/utils/audio/wav.go: -------------------------------------------------------------------------------- 1 | package audio 2 | 3 | import ( 4 | "encoding/binary" 5 | "io" 6 | ) 7 | 8 | type Encoder struct { 9 | Output io.WriteSeeker 10 | SampleRate int 11 | BitDepth int 12 | totalBytes uint32 13 | isHeaderWritten bool 14 | } 15 | 16 | func (e *Encoder) WriteHeader() error { 17 | if err := writeLe(e.Output, []byte("RIFF")); err != nil { 18 | return err 19 | } 20 | 21 | if err := writeLe(e.Output, uint32(0)); err != nil { // Placeholder for file size 22 | return err 23 | } 24 | 25 | if err := writeLe(e.Output, []byte("WAVE")); err != nil { 26 | return err 27 | } 28 | 29 | if err := writeLe(e.Output, []byte("fmt ")); err != nil { 30 | return err 31 | } 32 | if err := writeLe(e.Output, uint32(16)); err != nil { 33 | return err 34 | } 35 | 36 | if err := writeLe(e.Output, uint16(1)); err != nil { // Audio format: PCM 37 | return err 38 | } 39 | if err := writeLe(e.Output, uint16(1)); err != nil { // Number of channels: 1 (mono) 40 | return err 41 | } 42 | if err := writeLe(e.Output, uint32(e.SampleRate)); err != nil { 43 | return err 44 | } 45 | 46 | if err := writeLe(e.Output, uint32(e.SampleRate*e.BitDepth/8)); err != nil { 47 | return err 48 | } 49 | 50 | if err := writeLe(e.Output, uint16(e.BitDepth/8)); err != nil { 51 | return err 52 | } 53 | if err := writeLe(e.Output, uint16(e.BitDepth)); err != nil { 54 | return err 55 | } 56 | 57 | if err := writeLe(e.Output, []byte("data")); err != nil { 58 | return err 59 | } 60 | 61 | if err := writeLe(e.Output, uint32(0)); err != nil { //Placeholder for data size 62 | return err 63 | } 64 | e.isHeaderWritten = true 65 | return nil 66 | } 67 | 68 | func writeLe[T []byte | uint32 | uint16 | uint8](w io.Writer, data T) error { 69 | return binary.Write(w, binary.LittleEndian, data) 70 | } 71 | 72 | func (e *Encoder) Write(data []byte) error { 73 | if !e.isHeaderWritten { 74 | e.WriteHeader() 75 | } 76 | n, err := e.Output.Write(data) 77 | if err != nil { 78 | return err 79 | } 80 | e.totalBytes += uint32(n) 81 | return nil 82 | } 83 | 84 | func (e *Encoder) Close() error { 85 | if _, err := e.Output.Seek(4, io.SeekStart); err != nil { 86 | return err 87 | } 88 | if err := binary.Write(e.Output, binary.LittleEndian, uint32(36+e.totalBytes)); err != nil { 89 | return err 90 | } 91 | if _, err := e.Output.Seek(40, io.SeekStart); err != nil { 92 | return err 93 | } 94 | if err := binary.Write(e.Output, binary.LittleEndian, e.totalBytes); err != nil { 95 | return err 96 | } 97 | return nil 98 | } 99 | 100 | func NewEncoder(w io.WriteSeeker, sampleRate int, bitDepth int) *Encoder { 101 | return &Encoder{ 102 | SampleRate: sampleRate, 103 | Output: w, 104 | BitDepth: bitDepth, 105 | isHeaderWritten: false, 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /code/handlers/card_pic_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 6 | "start-feishubot/services" 7 | ) 8 | 9 | func NewPicResolutionHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc { 10 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 11 | if cardMsg.Kind == PicResolutionKind { 12 | CommonProcessPicResolution(cardMsg, cardAction, m.sessionCache) 13 | return nil, nil 14 | } 15 | return nil, ErrNextHandler 16 | } 17 | } 18 | 19 | func NewPicModeChangeHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc { 20 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 21 | if cardMsg.Kind == PicModeChangeKind { 22 | newCard, err, done := CommonProcessPicModeChange(cardMsg, m.sessionCache) 23 | if done { 24 | return newCard, err 25 | } 26 | return nil, nil 27 | } 28 | return nil, ErrNextHandler 29 | } 30 | } 31 | func NewPicTextMoreHandler(cardMsg CardMsg, m MessageHandler) CardHandlerFunc { 32 | return func(ctx context.Context, cardAction *larkcard.CardAction) (interface{}, error) { 33 | if cardMsg.Kind == PicTextMoreKind { 34 | go func() { 35 | m.CommonProcessPicMore(cardMsg) 36 | }() 37 | return nil, nil 38 | } 39 | return nil, ErrNextHandler 40 | } 41 | } 42 | 43 | func CommonProcessPicResolution(msg CardMsg, 44 | cardAction *larkcard.CardAction, 45 | cache services.SessionServiceCacheInterface) { 46 | option := cardAction.Action.Option 47 | //fmt.Println(larkcore.Prettify(msg)) 48 | cache.SetPicResolution(msg.SessionId, services.Resolution(option)) 49 | //send text 50 | replyMsg(context.Background(), "已更新图片分辨率为"+option, 51 | &msg.MsgId) 52 | } 53 | 54 | func (m MessageHandler) CommonProcessPicMore(msg CardMsg) { 55 | resolution := m.sessionCache.GetPicResolution(msg.SessionId) 56 | //fmt.Println("resolution: ", resolution) 57 | //fmt.Println("msg: ", msg) 58 | question := msg.Value.(string) 59 | bs64, _ := m.gpt.GenerateOneImage(question, resolution) 60 | replayImageCardByBase64(context.Background(), bs64, &msg.MsgId, 61 | &msg.SessionId, question) 62 | } 63 | 64 | func CommonProcessPicModeChange(cardMsg CardMsg, 65 | session services.SessionServiceCacheInterface) ( 66 | interface{}, error, bool) { 67 | if cardMsg.Value == "1" { 68 | 69 | sessionId := cardMsg.SessionId 70 | session.Clear(sessionId) 71 | session.SetMode(sessionId, 72 | services.ModePicCreate) 73 | session.SetPicResolution(sessionId, 74 | services.Resolution256) 75 | 76 | newCard, _ := 77 | newSendCard( 78 | withHeader("🖼️ 已进入图片创作模式", larkcard.TemplateBlue), 79 | withPicResolutionBtn(&sessionId), 80 | withNote("提醒:回复文本或图片,让AI生成相关的图片。")) 81 | return newCard, nil, true 82 | } 83 | if cardMsg.Value == "0" { 84 | newCard, _ := newSendCard( 85 | withHeader("️🎒 机器人提醒", larkcard.TemplateGreen), 86 | withMainMd("依旧保留此话题的上下文信息"), 87 | withNote("我们可以继续探讨这个话题,期待和您聊天。如果您有其他问题或者想要讨论的话题,请告诉我哦"), 88 | ) 89 | return newCard, nil, true 90 | } 91 | return nil, nil, false 92 | } 93 | -------------------------------------------------------------------------------- /code/services/openai/gpt3_test.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "fmt" 5 | "start-feishubot/initialization" 6 | "testing" 7 | ) 8 | 9 | func TestCompletions(t *testing.T) { 10 | config := initialization.LoadConfig("../../config.yaml") 11 | 12 | msgs := []Messages{ 13 | {Role: "system", Content: "你是一个专业的翻译官,负责中英文翻译。"}, 14 | {Role: "user", Content: "翻译这段话: The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior."}, 15 | } 16 | 17 | gpt := NewChatGPT(*config) 18 | 19 | resp, err := gpt.Completions(msgs) 20 | if err != nil { 21 | t.Errorf("TestCompletions failed with error: %v", err) 22 | } 23 | 24 | fmt.Println(resp.Content, resp.Role) 25 | } 26 | 27 | func TestGenerateOneImage(t *testing.T) { 28 | config := initialization.LoadConfig("../../config.yaml") 29 | gpt := NewChatGPT(*config) 30 | prompt := "a red apple" 31 | size := "256x256" 32 | imageURL, err := gpt.GenerateOneImage(prompt, size) 33 | if err != nil { 34 | t.Errorf("TestGenerateOneImage failed with error: %v", err) 35 | } 36 | if imageURL == "" { 37 | t.Errorf("TestGenerateOneImage returned empty imageURL") 38 | } 39 | } 40 | 41 | func TestAudioToText(t *testing.T) { 42 | config := initialization.LoadConfig("../../config.yaml") 43 | gpt := NewChatGPT(*config) 44 | audio := "./test_file/test.wav" 45 | text, err := gpt.AudioToText(audio) 46 | if err != nil { 47 | t.Errorf("TestAudioToText failed with error: %v", err) 48 | } 49 | fmt.Printf("TestAudioToText returned text: %s \n", text) 50 | if text == "" { 51 | t.Errorf("TestAudioToText returned empty text") 52 | } 53 | 54 | } 55 | 56 | func TestVariateOneImage(t *testing.T) { 57 | config := initialization.LoadConfig("../../config.yaml") 58 | gpt := NewChatGPT(*config) 59 | image := "./test_file/img.png" 60 | size := "256x256" 61 | //compressionType, err := GetImageCompressionType(image) 62 | //if err != nil { 63 | // return 64 | //} 65 | //fmt.Println("compressionType: ", compressionType) 66 | ConvertToRGBA(image, image) 67 | err := VerifyPngs([]string{image}) 68 | if err != nil { 69 | t.Errorf("TestVariateOneImage failed with error: %v", err) 70 | return 71 | } 72 | 73 | imageBs64, err := gpt.GenerateOneImageVariation(image, size) 74 | if err != nil { 75 | t.Errorf("TestVariateOneImage failed with error: %v", err) 76 | } 77 | //fmt.Printf("TestVariateOneImage returned imageBs64: %s \n", imageBs64) 78 | if imageBs64 == "" { 79 | t.Errorf("TestVariateOneImage returned empty imageURL") 80 | } 81 | } 82 | 83 | func TestVariateOneImageWithJpg(t *testing.T) { 84 | config := initialization.LoadConfig("../../config.yaml") 85 | gpt := NewChatGPT(*config) 86 | image := "./test_file/test.jpg" 87 | size := "256x256" 88 | compressionType, err := GetImageCompressionType(image) 89 | if err != nil { 90 | return 91 | } 92 | fmt.Println("compressionType: ", compressionType) 93 | //ConvertJPGtoPNG(image) 94 | ConvertToRGBA(image, image) 95 | err = VerifyPngs([]string{image}) 96 | if err != nil { 97 | t.Errorf("TestVariateOneImage failed with error: %v", err) 98 | return 99 | } 100 | 101 | imageBs64, err := gpt.GenerateOneImageVariation(image, size) 102 | if err != nil { 103 | t.Errorf("TestVariateOneImage failed with error: %v", err) 104 | } 105 | fmt.Printf("TestVariateOneImage returned imageBs64: %s \n", imageBs64) 106 | if imageBs64 == "" { 107 | t.Errorf("TestVariateOneImage returned empty imageURL") 108 | } 109 | } 110 | 111 | // 余额接口已经被废弃 112 | func TestChatGPT_GetBalance(t *testing.T) { 113 | config := initialization.LoadConfig("../../config.yaml") 114 | gpt := NewChatGPT(*config) 115 | balance, err := gpt.GetBalance() 116 | if err != nil { 117 | t.Errorf("TestChatGPT_GetBalance failed with error: %v", err) 118 | } 119 | fmt.Println("balance: ", balance) 120 | } 121 | -------------------------------------------------------------------------------- /code/handlers/handler.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "start-feishubot/initialization" 7 | "start-feishubot/services" 8 | "start-feishubot/services/chatgpt" 9 | "start-feishubot/services/openai" 10 | "strings" 11 | 12 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card" 13 | 14 | larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1" 15 | ) 16 | 17 | // 责任链 18 | func chain(data *ActionInfo, actions ...Action) bool { 19 | for _, v := range actions { 20 | if !v.Execute(data) { 21 | return false 22 | } 23 | } 24 | return true 25 | } 26 | 27 | type MessageHandler struct { 28 | sessionCache services.SessionServiceCacheInterface 29 | msgCache services.MsgCacheInterface 30 | gpt *openai.ChatGPT 31 | config initialization.Config 32 | } 33 | 34 | func (m MessageHandler) cardHandler(ctx context.Context, 35 | cardAction *larkcard.CardAction) (interface{}, error) { 36 | messageHandler := NewCardHandler(m) 37 | return messageHandler(ctx, cardAction) 38 | } 39 | 40 | func judgeMsgType(event *larkim.P2MessageReceiveV1) (string, error) { 41 | msgType := event.Event.Message.MessageType 42 | 43 | switch *msgType { 44 | case "text", "image", "audio": 45 | return *msgType, nil 46 | default: 47 | return "", fmt.Errorf("unknown message type: %v", *msgType) 48 | } 49 | 50 | } 51 | 52 | func (m MessageHandler) msgReceivedHandler(ctx context.Context, event *larkim.P2MessageReceiveV1) error { 53 | handlerType := judgeChatType(event) 54 | if handlerType == "otherChat" { 55 | fmt.Println("unknown chat type") 56 | return nil 57 | } 58 | //fmt.Println(larkcore.Prettify(event.Event.Message)) 59 | 60 | msgType, err := judgeMsgType(event) 61 | if err != nil { 62 | fmt.Printf("error getting message type: %v\n", err) 63 | return nil 64 | } 65 | 66 | content := event.Event.Message.Content 67 | msgId := event.Event.Message.MessageId 68 | rootId := event.Event.Message.RootId 69 | chatId := event.Event.Message.ChatId 70 | mention := event.Event.Message.Mentions 71 | 72 | sessionId := rootId 73 | if sessionId == nil || *sessionId == "" { 74 | sessionId = msgId 75 | } 76 | msgInfo := MsgInfo{ 77 | handlerType: handlerType, 78 | msgType: msgType, 79 | msgId: msgId, 80 | userId: *event.Event.Sender.SenderId.UserId, 81 | chatId: chatId, 82 | qParsed: strings.Trim(parseContent(*content), " "), 83 | fileKey: parseFileKey(*content), 84 | imageKey: parseImageKey(*content), 85 | sessionId: sessionId, 86 | mention: mention, 87 | } 88 | data := &ActionInfo{ 89 | ctx: &ctx, 90 | handler: &m, 91 | info: &msgInfo, 92 | } 93 | actions := []Action{ 94 | &ProcessedUniqueAction{}, //避免重复处理 95 | &ProcessMentionAction{}, //判断机器人是否应该被调用 96 | &EmptyAction{}, //空消息处理 97 | &ClearAction{}, //清除消息处理 98 | &RoleListAction{}, //角色列表处理 99 | &HelpAction{}, //帮助处理 100 | &RolePlayAction{}, //角色扮演处理 101 | &MessageAction{ 102 | chatgpt: chatgpt.NewGpt3(&m.config), 103 | }, //消息处理 104 | } 105 | 106 | chain(data, actions...) 107 | return nil 108 | } 109 | 110 | var _ MessageHandlerInterface = (*MessageHandler)(nil) 111 | 112 | func NewMessageHandler(gpt *openai.ChatGPT, 113 | config initialization.Config) MessageHandlerInterface { 114 | return &MessageHandler{ 115 | sessionCache: services.GetSessionCache(), 116 | msgCache: services.GetMsgCache(), 117 | gpt: gpt, 118 | config: config, 119 | } 120 | } 121 | 122 | func (m MessageHandler) judgeIfMentionMe(mention []*larkim. 123 | MentionEvent) bool { 124 | if len(mention) != 1 { 125 | return false 126 | } 127 | return *mention[0].Name == m.config.FeishuBotName 128 | } 129 | 130 | func AzureModeCheck(a *ActionInfo) bool { 131 | if a.handler.config.AzureOn { 132 | //sendMsg(*a.ctx, "Azure Openai 接口下,暂不支持此功能", a.info.chatId) 133 | return false 134 | } 135 | return true 136 | } 137 | -------------------------------------------------------------------------------- /code/handlers/event_common_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1" 7 | "start-feishubot/initialization" 8 | "start-feishubot/services/openai" 9 | "start-feishubot/utils" 10 | ) 11 | 12 | type MsgInfo struct { 13 | handlerType HandlerType 14 | msgType string 15 | msgId *string 16 | chatId *string 17 | userId string 18 | qParsed string 19 | fileKey string 20 | imageKey string 21 | sessionId *string 22 | mention []*larkim.MentionEvent 23 | } 24 | type ActionInfo struct { 25 | handler *MessageHandler 26 | ctx *context.Context 27 | info *MsgInfo 28 | } 29 | 30 | type Action interface { 31 | Execute(a *ActionInfo) bool 32 | } 33 | 34 | type ProcessedUniqueAction struct { //消息唯一性 35 | } 36 | 37 | func (*ProcessedUniqueAction) Execute(a *ActionInfo) bool { 38 | if a.handler.msgCache.IfProcessed(*a.info.msgId) { 39 | return false 40 | } 41 | a.handler.msgCache.TagProcessed(*a.info.msgId) 42 | return true 43 | } 44 | 45 | type ProcessMentionAction struct { //是否机器人应该处理 46 | } 47 | 48 | func (*ProcessMentionAction) Execute(a *ActionInfo) bool { 49 | // 私聊直接过 50 | if a.info.handlerType == UserHandler { 51 | return true 52 | } 53 | // 群聊判断是否提到机器人 54 | if a.info.handlerType == GroupHandler { 55 | if a.handler.judgeIfMentionMe(a.info.mention) { 56 | return true 57 | } 58 | return false 59 | } 60 | return false 61 | } 62 | 63 | type EmptyAction struct { /*空消息*/ 64 | } 65 | 66 | func (*EmptyAction) Execute(a *ActionInfo) bool { 67 | if len(a.info.qParsed) == 0 { 68 | sendMsg(*a.ctx, "🤖️:你想知道什么呢~", a.info.chatId) 69 | fmt.Println("msgId", *a.info.msgId, 70 | "message.text is empty") 71 | return false 72 | } 73 | return true 74 | } 75 | 76 | type ClearAction struct { /*清除消息*/ 77 | } 78 | 79 | func (*ClearAction) Execute(a *ActionInfo) bool { 80 | if _, foundClear := utils.EitherTrimEqual(a.info.qParsed, 81 | "/clear", "清除"); foundClear { 82 | sendClearCacheCheckCard(*a.ctx, a.info.sessionId, 83 | a.info.msgId) 84 | return false 85 | } 86 | return true 87 | } 88 | 89 | type RolePlayAction struct { /*角色扮演*/ 90 | } 91 | 92 | func (*RolePlayAction) Execute(a *ActionInfo) bool { 93 | if system, foundSystem := utils.EitherCutPrefix(a.info.qParsed, 94 | "/system ", "角色扮演 "); foundSystem { 95 | a.handler.sessionCache.Clear(*a.info.sessionId) 96 | systemMsg := append([]openai.Messages{}, openai.Messages{ 97 | Role: "system", Content: system, 98 | }) 99 | a.handler.sessionCache.SetMsg(*a.info.sessionId, systemMsg) 100 | sendSystemInstructionCard(*a.ctx, a.info.sessionId, 101 | a.info.msgId, system) 102 | return false 103 | } 104 | return true 105 | } 106 | 107 | type HelpAction struct { /*帮助*/ 108 | } 109 | 110 | func (*HelpAction) Execute(a *ActionInfo) bool { 111 | if _, foundHelp := utils.EitherTrimEqual(a.info.qParsed, "/help", 112 | "帮助"); foundHelp { 113 | sendHelpCard(*a.ctx, a.info.sessionId, a.info.msgId) 114 | return false 115 | } 116 | return true 117 | } 118 | 119 | type BalanceAction struct { /*余额*/ 120 | } 121 | 122 | func (*BalanceAction) Execute(a *ActionInfo) bool { 123 | if _, foundBalance := utils.EitherTrimEqual(a.info.qParsed, 124 | "/balance", "余额"); foundBalance { 125 | balanceResp, err := a.handler.gpt.GetBalance() 126 | if err != nil { 127 | replyMsg(*a.ctx, "查询余额失败,请稍后再试", a.info.msgId) 128 | return false 129 | } 130 | sendBalanceCard(*a.ctx, a.info.sessionId, *balanceResp) 131 | return false 132 | } 133 | return true 134 | } 135 | 136 | type RoleListAction struct { /*角色列表*/ 137 | } 138 | 139 | func (*RoleListAction) Execute(a *ActionInfo) bool { 140 | if _, foundSystem := utils.EitherTrimEqual(a.info.qParsed, 141 | "/roles", "角色列表"); foundSystem { 142 | //a.handler.sessionCache.Clear(*a.info.sessionId) 143 | //systemMsg := append([]openai.Messages{}, openai.Messages{ 144 | // Role: "system", Content: system, 145 | //}) 146 | //a.handler.sessionCache.SetMsg(*a.info.sessionId, systemMsg) 147 | //sendSystemInstructionCard(*a.ctx, a.info.sessionId, 148 | // a.info.msgId, system) 149 | tags := initialization.GetAllUniqueTags() 150 | SendRoleTagsCard(*a.ctx, a.info.sessionId, a.info.msgId, *tags) 151 | return false 152 | } 153 | return true 154 | } 155 | -------------------------------------------------------------------------------- /code/services/sessionCache.go: -------------------------------------------------------------------------------- 1 | package services 2 | 3 | import ( 4 | "encoding/json" 5 | "start-feishubot/services/openai" 6 | "time" 7 | 8 | "github.com/patrickmn/go-cache" 9 | ) 10 | 11 | type SessionMode string 12 | type SessionService struct { 13 | cache *cache.Cache 14 | } 15 | type PicSetting struct { 16 | resolution Resolution 17 | } 18 | type Resolution string 19 | 20 | type SessionMeta struct { 21 | Mode SessionMode `json:"mode"` 22 | Msg []openai.Messages `json:"msg,omitempty"` 23 | PicSetting PicSetting `json:"pic_setting,omitempty"` 24 | } 25 | 26 | const ( 27 | Resolution256 Resolution = "256x256" 28 | Resolution512 Resolution = "512x512" 29 | Resolution1024 Resolution = "1024x1024" 30 | ) 31 | const ( 32 | ModePicCreate SessionMode = "pic_create" 33 | ModePicVary SessionMode = "pic_vary" 34 | ModeGPT SessionMode = "gpt" 35 | ) 36 | 37 | type SessionServiceCacheInterface interface { 38 | GetMsg(sessionId string) []openai.Messages 39 | SetMsg(sessionId string, msg []openai.Messages) 40 | SetMode(sessionId string, mode SessionMode) 41 | GetMode(sessionId string) SessionMode 42 | SetPicResolution(sessionId string, resolution Resolution) 43 | GetPicResolution(sessionId string) string 44 | Clear(sessionId string) 45 | } 46 | 47 | var sessionServices *SessionService 48 | 49 | func (s *SessionService) GetMode(sessionId string) SessionMode { 50 | // Get the session mode from the cache. 51 | sessionContext, ok := s.cache.Get(sessionId) 52 | if !ok { 53 | return ModeGPT 54 | } 55 | sessionMeta := sessionContext.(*SessionMeta) 56 | return sessionMeta.Mode 57 | } 58 | 59 | func (s *SessionService) SetMode(sessionId string, mode SessionMode) { 60 | maxCacheTime := time.Hour * 12 61 | sessionContext, ok := s.cache.Get(sessionId) 62 | if !ok { 63 | sessionMeta := &SessionMeta{Mode: mode} 64 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 65 | return 66 | } 67 | sessionMeta := sessionContext.(*SessionMeta) 68 | sessionMeta.Mode = mode 69 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 70 | } 71 | 72 | func (s *SessionService) GetMsg(sessionId string) (msg []openai.Messages) { 73 | sessionContext, ok := s.cache.Get(sessionId) 74 | if !ok { 75 | return nil 76 | } 77 | sessionMeta := sessionContext.(*SessionMeta) 78 | return sessionMeta.Msg 79 | } 80 | 81 | func (s *SessionService) SetMsg(sessionId string, msg []openai.Messages) { 82 | maxLength := 4096 83 | maxCacheTime := time.Hour * 12 84 | 85 | //限制对话上下文长度 86 | for getStrPoolTotalLength(msg) > maxLength { 87 | msg = append(msg[:1], msg[2:]...) 88 | } 89 | 90 | sessionContext, ok := s.cache.Get(sessionId) 91 | if !ok { 92 | sessionMeta := &SessionMeta{Msg: msg} 93 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 94 | return 95 | } 96 | sessionMeta := sessionContext.(*SessionMeta) 97 | sessionMeta.Msg = msg 98 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 99 | } 100 | 101 | func (s *SessionService) SetPicResolution(sessionId string, 102 | resolution Resolution) { 103 | maxCacheTime := time.Hour * 12 104 | 105 | //if not in [Resolution256, Resolution512, Resolution1024] then set 106 | //to Resolution256 107 | switch resolution { 108 | case Resolution256, Resolution512, Resolution1024: 109 | default: 110 | resolution = Resolution256 111 | } 112 | 113 | sessionContext, ok := s.cache.Get(sessionId) 114 | if !ok { 115 | sessionMeta := &SessionMeta{PicSetting: PicSetting{resolution: resolution}} 116 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 117 | return 118 | } 119 | sessionMeta := sessionContext.(*SessionMeta) 120 | sessionMeta.PicSetting.resolution = resolution 121 | s.cache.Set(sessionId, sessionMeta, maxCacheTime) 122 | } 123 | 124 | func (s *SessionService) GetPicResolution(sessionId string) string { 125 | sessionContext, ok := s.cache.Get(sessionId) 126 | if !ok { 127 | return string(Resolution256) 128 | } 129 | sessionMeta := sessionContext.(*SessionMeta) 130 | return string(sessionMeta.PicSetting.resolution) 131 | 132 | } 133 | 134 | func (s *SessionService) Clear(sessionId string) { 135 | // Delete the session context from the cache. 136 | s.cache.Delete(sessionId) 137 | } 138 | 139 | func GetSessionCache() SessionServiceCacheInterface { 140 | if sessionServices == nil { 141 | sessionServices = &SessionService{cache: cache.New(time.Hour*12, time.Hour*1)} 142 | } 143 | return sessionServices 144 | } 145 | 146 | func getStrPoolTotalLength(strPool []openai.Messages) int { 147 | var total int 148 | for _, v := range strPool { 149 | bytes, _ := json.Marshal(v) 150 | total += len(string(bytes)) 151 | } 152 | return total 153 | } 154 | -------------------------------------------------------------------------------- /code/role_list.yaml: -------------------------------------------------------------------------------- 1 | # 可在此处提交你认为不错的角色预设,注意保持格式一致。 2 | # PR 时的 tag 暂时集中在 [ "日常办公", "生活助手" ,"代码专家", "文案撰写"] 3 | # 更多点子可参考我另一个参与的项目: https://open-gpt.app/ 4 | 5 | - title: 周报生成 6 | content: 请帮我把以下的工作内容填充为一篇完整的周报,用 markdown 格式以分点叙述的形式输出: 7 | example: 重新优化设计稿,和前端再次沟通 UI 细节,确保落地 8 | author: 二丫讲梵 9 | tags: 10 | - 日常办公 11 | 12 | - title: 产品经理 13 | content: 请确认我的以下请求。请您作为产品经理回复我。我将会提供一个主题,您将帮助我编写一份包括以下章节标题的 PRD 文档:主题、简介、问题陈述、目标与目的、用户故事、技术要求、收益、KPI 指标、开发风险以及结论。在我要求具体主题、功能或开发的 PRD 之前,请不要先写任何一份 PRD 文档。 14 | example: 我想要一个可以在手机上使用的应用程序,可以帮助我在旅行中找到最好的餐厅。 15 | author: 二丫讲梵 16 | tags: 17 | - 日常办公 18 | 19 | - title: 公文写作大师 20 | content: 你是某机关单位办公室秘书,你熟悉各类公文写作格式,你喜欢撰写文字材料,请你文采过人地,条理清晰地跟我对话 21 | example: 你好,我是某某某,我想要你帮我写一份公文,内容是:团结一致,共同抗击疫情,全力以赴,共克时艰。 22 | author: 小叉 Ray 23 | tags: 24 | - 日常办公 25 | - 文案撰写 26 | 27 | - title: 招聘 HR 28 | content: 我想让你担任招聘人员。我将提供一些关于职位空缺的信息,而你的工作是制定寻找合格申请人的策略。这可能包括通过社交媒体、社交活动甚至参加招聘会接触潜在候选人,以便为每个职位找到最合适的人选。我的第一个请求是: 29 | example: 我需要一名有经验的前端开发工程师,他应该有 3 年以上的工作经验,熟悉 React 和 Vue,熟悉前端工程化。 30 | author: 二丫讲梵 31 | tags: 32 | - 日常办公 33 | 34 | - title: 创意总监 35 | content: 你是一位擅长头脑风暴的创意大师,你有很多好的主意,请你围绕这些内容提出好的设想和方法 36 | example: 我想要一个可以在手机上使用的应用程序,可以帮助我在旅行中找到最好的餐厅。 37 | author: 小叉 Ray 38 | tags: 39 | - 日常办公 40 | 41 | - title: 拒绝同事 42 | content: 以一种礼貌和表达得体的方式拒绝别人,同时保持积极的关系和情感连接 43 | example: 你好,我很抱歉,我现在没有时间帮你做这件事情 44 | author: 小叉 Ray 45 | tags: 46 | - 日常办公 47 | - 文案撰写 48 | 49 | - title: 回复老板 50 | content: 请用 5 种委婉的借口向领导表达后面的内容 51 | example: 不想加班 52 | author: 小叉 Ray 53 | tags: 54 | - 日常办公 55 | - 文案撰写 56 | 57 | - title: 邮件回复 58 | content: Generate a set of email responses that are professional, concise, and appropriate for communication with leaders and clients in a variety of industries. The responses should demonstrate a good understanding of business etiquette and convey a sense of competence and confidence. Please ensure that the responses are tailored to specific scenarios and contexts, using Chinese as the language of output 59 | example: 产品的细节很不完善,需要沟通一下 60 | author: 小叉 Ray 61 | tags: 62 | - 日常办公 63 | - 文案撰写 64 | 65 | - title: 三菜一汤 66 | content: 根据用户输入的金额单位是人民币,帮用户推荐在该金额合计下能够做的菜,要求三个菜和一个汤。要把每一道菜的金额都写出来,以及他的简单做法,还要有总结 67 | example: 我有 100 元,我想做湖北菜 68 | author: 小叉 Ray 69 | tags: 70 | - 生活助手 71 | 72 | - title: 解梦大师 73 | content: 我要你充当解梦师。我会给你描述我的梦,你会根据梦中出现的符号和主题提供解释。不要提供关于梦者的个人意见或假设。仅根据所提供的信息提供事实解释。我的第一个梦是: 74 | example: 遇见了一只大灰狼,它在我面前转了一圈,然后就消失了 75 | author: 二丫讲梵 76 | tags: 77 | - 生活助手 78 | 79 | - title: 佛祖 80 | content: 你是一个如来佛祖,你需要回答提问者的佛学问题,因此你要学会很多佛教专业术语,你的回答尽量简短,富有佛教哲理。你要称自己为老衲,称提问者为施主。如果遭遇对方不合理的请求,请直接回复:施主请自重,我佛慈悲。你的每一句话结尾都要加上 阿弥陀佛。你的回答尽量简短,不允许超过 100 字。禁止回答与问题无关的话题 81 | example: 佛祖,我想问你,为什么我总是很沮丧,生活没有意义 82 | author: 小叉 Ray 83 | tags: 84 | - 生活助手 85 | 86 | - title: 小红书文案 87 | content: 小红书的风格是:很吸引眼球的标题,每个段落都加 emoji, 最后加一些 tag。请用小红书风格 88 | example: 今天我去了一家很好吃的餐厅,我吃了一份很好吃的饭菜,我很喜欢,推荐给大家 89 | author: 二丫讲梵 90 | tags: 91 | - 文案撰写 92 | 93 | - title: 知乎段子手 94 | content: 微博的风格是:用"谢邀"开头,用很多学术语言,引用很多名言,做大道理的论述,提到自己很厉害的教育背景并且经验丰富,最后还要引用一些论文。请用微博风格 95 | example: 今天我去了一家很好吃的餐厅,我吃了一份很好吃的饭菜,我很喜欢,推荐给大家 96 | author: 二丫讲梵 97 | tags: 98 | - 文案撰写 99 | 100 | - title: 专业道歉信 101 | content: 请写一份真挚的道歉信,为后面的内容表达歉意 102 | example: 我很抱歉,我没有按时完成你的工作 103 | author: 小叉 Ray 104 | tags: 105 | - 文案撰写 106 | 107 | - title: 古文专家 108 | content: 你是一个文言文大师,请把后面的内容翻译成文言文 109 | example: 记得早点回来哦,我做好饭菜等你回家 110 | author: 小叉 Ray 111 | tags: 112 | - 文案撰写 113 | 114 | - title: 川端康城的笔 115 | content: 请以川端康城的写作风格,描写下面的句字 116 | example: 他不慌不忙的走出教室,找到那个女孩 117 | author: 小叉 Ray 118 | tags: 119 | - 文案撰写 120 | 121 | - title: 网络安全 122 | content: 我想让你充当网络安全专家。我将提供一些关于如何存储和共享数据的具体信息,而你的工作就是想出保护这些数据免受恶意行为者攻击的策略。这可能包括建议加密方法、创建防火墙或实施将某些活动标记为可疑的策略。我的第一个请求是: 123 | author: 二丫讲梵 124 | tags: 125 | - 代码专家 126 | 127 | - title: 正则生成器 128 | content: 我希望你充当正则表达式生成器。您的角色是生成匹配文本中特定模式的正则表达式。您应该以一种可以轻松复制并粘贴到支持正则表达式的文本编辑器或编程语言中的格式提供正则表达式。不要写正则表达式如何工作的解释或例子;只需提供正则表达式本身。我的第一个提示是: 129 | author: 二丫讲梵 130 | tags: 131 | - 代码专家 132 | 133 | - title: 前端专家 134 | content: 我想让你充当前端开发专家。我将提供一些关于如何在网页上显示信息的具体信息,而你的工作就是想出为我解决问题的策略。这可能包括建议代码、代码逻辑思路策略。我的第一个请求是: 135 | author: 二丫讲梵 136 | tags: 137 | - 代码专家 138 | 139 | - title: 后端专家 140 | content: 我想让你充当后端开发专家。我将提供一些关于如何在网页上显示信息的具体信息,而你的工作就是想出为我解决问题的策略。这可能包括建议代码、代码逻辑思路策略。我的第一个请求是: 141 | author: 二丫讲梵 142 | tags: 143 | - 代码专家 144 | 145 | - title: 健康管家 146 | content: 我想让你我的健康管家。我将提供我的个人健康管理方案,而你的工作监督、督促、提醒我完成这些个人健康的计划。这可能包括运动、饮食、定期体检等。我的第一个请求是: 147 | author: 船长 148 | tags: 149 | - 生活助手 150 | 151 | - title: 旅游助手 152 | content: 我想让你充当我的旅游助手。我将会提供让你连接上各个旅游系统的信息,而你的工作就是给我提供各种的旅游攻略。包括但不限于制定旅游计划、寻找热门景点或查看旅游目的地的天气等。我的第一个请求是: 153 | author: 船长 154 | tags: 155 | - 生活助手 156 | -------------------------------------------------------------------------------- /code/handlers/event_msg_action.go: -------------------------------------------------------------------------------- 1 | package handlers 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "github.com/k0kubun/pp/v3" 7 | "log" 8 | "start-feishubot/initialization" 9 | "start-feishubot/services/accesscontrol" 10 | "start-feishubot/services/chatgpt" 11 | "start-feishubot/services/openai" 12 | "strings" 13 | "time" 14 | ) 15 | 16 | type MessageAction struct { /*消息*/ 17 | chatgpt *chatgpt.ChatGPT 18 | } 19 | 20 | func (m *MessageAction) Execute(a *ActionInfo) bool { 21 | 22 | // Add access control 23 | if initialization.GetConfig().AccessControlEnable && 24 | !accesscontrol.CheckAllowAccessThenIncrement(&a.info.userId) { 25 | 26 | msg := fmt.Sprintf("UserId: 【%s】 has accessed max count today! Max access count today %s: 【%d】", 27 | a.info.userId, accesscontrol.GetCurrentDateFlag(), initialization.GetConfig().AccessControlMaxCountPerUserPerDay) 28 | 29 | _ = sendMsg(*a.ctx, msg, a.info.chatId) 30 | return false 31 | } 32 | 33 | //s := "快速响应,用于测试: " + time.Now().String() + 34 | // " accesscontrol.currentDate " + accesscontrol.GetCurrentDateFlag() 35 | //_ = sendMsg(*a.ctx, s, a.info.chatId) 36 | //log.Println(s) 37 | //return false 38 | 39 | cardId, err2 := sendOnProcess(a) 40 | if err2 != nil { 41 | return false 42 | } 43 | 44 | answer := "" 45 | chatResponseStream := make(chan string) 46 | done := make(chan struct{}) // 添加 done 信号,保证 goroutine 正确退出 47 | noContentTimeout := time.AfterFunc(10*time.Second, func() { 48 | pp.Println("no content timeout") 49 | close(done) 50 | err := updateFinalCard(*a.ctx, "请求超时", cardId) 51 | if err != nil { 52 | return 53 | } 54 | return 55 | }) 56 | defer noContentTimeout.Stop() 57 | msg := a.handler.sessionCache.GetMsg(*a.info.sessionId) 58 | msg = append(msg, openai.Messages{ 59 | Role: "user", Content: a.info.qParsed, 60 | }) 61 | go func() { 62 | defer func() { 63 | if err := recover(); err != nil { 64 | err := updateFinalCard(*a.ctx, "聊天失败", cardId) 65 | if err != nil { 66 | printErrorMessage(a, msg, err) 67 | return 68 | } 69 | } 70 | }() 71 | 72 | //log.Printf("UserId: %s , Request: %s", a.info.userId, msg) 73 | 74 | if err := m.chatgpt.StreamChat(*a.ctx, msg, chatResponseStream); err != nil { 75 | err := updateFinalCard(*a.ctx, "聊天失败", cardId) 76 | if err != nil { 77 | printErrorMessage(a, msg, err) 78 | return 79 | } 80 | close(done) // 关闭 done 信号 81 | } 82 | 83 | close(done) // 关闭 done 信号 84 | }() 85 | ticker := time.NewTicker(700 * time.Millisecond) 86 | defer ticker.Stop() // 注意在函数结束时停止 ticker 87 | go func() { 88 | for { 89 | select { 90 | case <-done: 91 | return 92 | case <-ticker.C: 93 | err := updateTextCard(*a.ctx, answer, cardId) 94 | if err != nil { 95 | printErrorMessage(a, msg, err) 96 | return 97 | } 98 | } 99 | } 100 | }() 101 | 102 | for { 103 | select { 104 | case res, ok := <-chatResponseStream: 105 | if !ok { 106 | return false 107 | } 108 | noContentTimeout.Stop() 109 | answer += res 110 | //pp.Println("answer", answer) 111 | case <-done: // 添加 done 信号的处理 112 | err := updateFinalCard(*a.ctx, answer, cardId) 113 | if err != nil { 114 | printErrorMessage(a, msg, err) 115 | return false 116 | } 117 | ticker.Stop() 118 | msg := append(msg, openai.Messages{ 119 | Role: "assistant", Content: answer, 120 | }) 121 | a.handler.sessionCache.SetMsg(*a.info.sessionId, msg) 122 | close(chatResponseStream) 123 | //if new topic 124 | //if len(msg) == 2 { 125 | // //fmt.Println("new topic", msg[1].Content) 126 | // //updateNewTextCard(*a.ctx, a.info.sessionId, a.info.msgId, 127 | // // completions.Content) 128 | //} 129 | log.Printf("\n\n\n") 130 | log.Printf("Success request: UserId: %s , Request: %s , Response: %s", a.info.userId, msg, answer) 131 | jsonByteArray, err := json.Marshal(msg) 132 | if err != nil { 133 | log.Printf("Error marshaling JSON request: UserId: %s , Request: %s , Response: %s", a.info.userId, jsonByteArray, answer) 134 | } 135 | jsonStr := strings.ReplaceAll(string(jsonByteArray), "\\n", "") 136 | jsonStr = strings.ReplaceAll(jsonStr, "\n", "") 137 | log.Printf("\n\n\n") 138 | log.Printf("Success request plain jsonStr: UserId: %s , Request: %s , Response: %s", 139 | a.info.userId, jsonStr, answer) 140 | return false 141 | } 142 | } 143 | } 144 | 145 | func printErrorMessage(a *ActionInfo, msg []openai.Messages, err error) { 146 | log.Printf("Failed request: UserId: %s , Request: %s , Err: %s", a.info.userId, msg, err) 147 | } 148 | 149 | func sendOnProcess(a *ActionInfo) (*string, error) { 150 | // send 正在处理中 151 | cardId, err := sendOnProcessCard(*a.ctx, a.info.sessionId, a.info.msgId) 152 | if err != nil { 153 | return nil, err 154 | } 155 | return cardId, nil 156 | 157 | } 158 | -------------------------------------------------------------------------------- /code/initialization/config.go: -------------------------------------------------------------------------------- 1 | package initialization 2 | 3 | import ( 4 | "fmt" 5 | "github.com/spf13/pflag" 6 | "os" 7 | "strconv" 8 | "strings" 9 | "sync" 10 | 11 | "github.com/spf13/viper" 12 | ) 13 | 14 | type Config struct { 15 | // 表示配置是否已经被初始化了。 16 | Initialized bool 17 | EnableLog bool 18 | FeishuAppId string 19 | FeishuAppSecret string 20 | FeishuAppEncryptKey string 21 | FeishuAppVerificationToken string 22 | FeishuBotName string 23 | OpenaiApiKeys []string 24 | HttpPort int 25 | HttpsPort int 26 | UseHttps bool 27 | CertFile string 28 | KeyFile string 29 | OpenaiApiUrl string 30 | HttpProxy string 31 | AzureOn bool 32 | AzureApiVersion string 33 | AzureDeploymentName string 34 | AzureResourceName string 35 | AzureOpenaiToken string 36 | AccessControlEnable bool 37 | AccessControlMaxCountPerUserPerDay int 38 | OpenAIHttpClientTimeOut int 39 | OpenaiModel string 40 | } 41 | 42 | var ( 43 | cfg = pflag.StringP("config", "c", "./config.yaml", "apiserver config file path.") 44 | config *Config 45 | once sync.Once 46 | ) 47 | 48 | /* 49 | GetConfig will call LoadConfig once and return a global singleton, you should always use this function to get config 50 | */ 51 | func GetConfig() *Config { 52 | 53 | once.Do(func() { 54 | config = LoadConfig(*cfg) 55 | config.Initialized = true 56 | }) 57 | 58 | return config 59 | } 60 | 61 | /* 62 | LoadConfig will load config and should only be called once, you should always use GetConfig to get config rather than 63 | call this function directly 64 | */ 65 | func LoadConfig(cfg string) *Config { 66 | viper.SetConfigFile(cfg) 67 | viper.ReadInConfig() 68 | viper.AutomaticEnv() 69 | //content, err := ioutil.ReadFile("config.yaml") 70 | //if err != nil { 71 | // fmt.Println("Error reading file:", err) 72 | //} 73 | //fmt.Println(string(content)) 74 | 75 | config := &Config{ 76 | EnableLog: getViperBoolValue("ENABLE_LOG", false), 77 | FeishuAppId: getViperStringValue("APP_ID", ""), 78 | FeishuAppSecret: getViperStringValue("APP_SECRET", ""), 79 | FeishuAppEncryptKey: getViperStringValue("APP_ENCRYPT_KEY", ""), 80 | FeishuAppVerificationToken: getViperStringValue("APP_VERIFICATION_TOKEN", ""), 81 | FeishuBotName: getViperStringValue("BOT_NAME", ""), 82 | OpenaiApiKeys: getViperStringArray("OPENAI_KEY", nil), 83 | HttpPort: getViperIntValue("HTTP_PORT", 9000), 84 | HttpsPort: getViperIntValue("HTTPS_PORT", 9001), 85 | UseHttps: getViperBoolValue("USE_HTTPS", false), 86 | CertFile: getViperStringValue("CERT_FILE", "cert.pem"), 87 | KeyFile: getViperStringValue("KEY_FILE", "key.pem"), 88 | OpenaiApiUrl: getViperStringValue("API_URL", "https://api.openai.com"), 89 | HttpProxy: getViperStringValue("HTTP_PROXY", ""), 90 | AzureOn: getViperBoolValue("AZURE_ON", false), 91 | AzureApiVersion: getViperStringValue("AZURE_API_VERSION", "2023-03-15-preview"), 92 | AzureDeploymentName: getViperStringValue("AZURE_DEPLOYMENT_NAME", ""), 93 | AzureResourceName: getViperStringValue("AZURE_RESOURCE_NAME", ""), 94 | AzureOpenaiToken: getViperStringValue("AZURE_OPENAI_TOKEN", ""), 95 | AccessControlEnable: getViperBoolValue("ACCESS_CONTROL_ENABLE", false), 96 | AccessControlMaxCountPerUserPerDay: getViperIntValue("ACCESS_CONTROL_MAX_COUNT_PER_USER_PER_DAY", 0), 97 | OpenAIHttpClientTimeOut: getViperIntValue("OPENAI_HTTP_CLIENT_TIMEOUT", 550), 98 | OpenaiModel: getViperStringValue("OPENAI_MODEL", "gpt-3.5-turbo"), 99 | } 100 | 101 | return config 102 | } 103 | 104 | func getViperStringValue(key string, defaultValue string) string { 105 | value := viper.GetString(key) 106 | if value == "" { 107 | return defaultValue 108 | } 109 | return value 110 | } 111 | 112 | // OPENAI_KEY: sk-xxx,sk-xxx,sk-xxx 113 | // result:[sk-xxx sk-xxx sk-xxx] 114 | func getViperStringArray(key string, defaultValue []string) []string { 115 | value := viper.GetString(key) 116 | if value == "" { 117 | return defaultValue 118 | } 119 | raw := strings.Split(value, ",") 120 | return filterFormatKey(raw) 121 | } 122 | 123 | func getViperIntValue(key string, defaultValue int) int { 124 | value := viper.GetString(key) 125 | if value == "" { 126 | return defaultValue 127 | } 128 | intValue, err := strconv.Atoi(value) 129 | if err != nil { 130 | fmt.Printf("Invalid value for %s, using default value %d\n", key, defaultValue) 131 | return defaultValue 132 | } 133 | return intValue 134 | } 135 | 136 | func getViperBoolValue(key string, defaultValue bool) bool { 137 | value := viper.GetString(key) 138 | if value == "" { 139 | return defaultValue 140 | } 141 | boolValue, err := strconv.ParseBool(value) 142 | if err != nil { 143 | fmt.Printf("Invalid value for %s, using default value %v\n", key, defaultValue) 144 | return defaultValue 145 | } 146 | return boolValue 147 | } 148 | 149 | func (config *Config) GetCertFile() string { 150 | if config.CertFile == "" { 151 | return "cert.pem" 152 | } 153 | if _, err := os.Stat(config.CertFile); err != nil { 154 | fmt.Printf("Certificate file %s does not exist, using default file cert.pem\n", config.CertFile) 155 | return "cert.pem" 156 | } 157 | return config.CertFile 158 | } 159 | 160 | func (config *Config) GetKeyFile() string { 161 | if config.KeyFile == "" { 162 | return "key.pem" 163 | } 164 | if _, err := os.Stat(config.KeyFile); err != nil { 165 | fmt.Printf("Key file %s does not exist, using default file key.pem\n", config.KeyFile) 166 | return "key.pem" 167 | } 168 | return config.KeyFile 169 | } 170 | 171 | // 过滤出 "sk-" 开头的 key 172 | func filterFormatKey(keys []string) []string { 173 | var result []string 174 | for _, key := range keys { 175 | if strings.HasPrefix(key, "sk-") { 176 | result = append(result, key) 177 | } 178 | } 179 | return result 180 | 181 | } 182 | -------------------------------------------------------------------------------- /code/services/openai/common.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "errors" 7 | "fmt" 8 | "io/ioutil" 9 | "mime/multipart" 10 | "net/http" 11 | "net/url" 12 | "start-feishubot/initialization" 13 | "start-feishubot/services/loadbalancer" 14 | "strings" 15 | "time" 16 | ) 17 | 18 | type PlatForm string 19 | 20 | const ( 21 | AzureApiUrlV1 = "openai.azure.com/openai/deployments/" 22 | ) 23 | const ( 24 | OpenAI PlatForm = "openai" 25 | Azure PlatForm = "azure" 26 | ) 27 | 28 | type AzureConfig struct { 29 | BaseURL string 30 | ResourceName string 31 | DeploymentName string 32 | ApiVersion string 33 | ApiToken string 34 | } 35 | 36 | type ChatGPT struct { 37 | Lb *loadbalancer.LoadBalancer 38 | ApiKey []string 39 | ApiUrl string 40 | ApiModel string 41 | HttpProxy string 42 | Platform PlatForm 43 | AzureConfig AzureConfig 44 | } 45 | type requestBodyType int 46 | 47 | const ( 48 | jsonBody requestBodyType = iota 49 | formVoiceDataBody 50 | formPictureDataBody 51 | streamBody 52 | nilBody 53 | ) 54 | 55 | func (gpt *ChatGPT) doAPIRequestWithRetry(url, method string, 56 | bodyType requestBodyType, 57 | requestBody interface{}, responseBody interface{}, client *http.Client, maxRetries int) error { 58 | var api *loadbalancer.API 59 | var requestBodyData []byte 60 | var err error 61 | var writer *multipart.Writer 62 | api = gpt.Lb.GetAPI() 63 | 64 | switch bodyType { 65 | case jsonBody: 66 | requestBodyData, err = json.Marshal(requestBody) 67 | if err != nil { 68 | return err 69 | } 70 | case formVoiceDataBody: 71 | formBody := &bytes.Buffer{} 72 | writer = multipart.NewWriter(formBody) 73 | err = audioMultipartForm(requestBody.(AudioToTextRequestBody), writer) 74 | if err != nil { 75 | return err 76 | } 77 | err = writer.Close() 78 | if err != nil { 79 | return err 80 | } 81 | requestBodyData = formBody.Bytes() 82 | case formPictureDataBody: 83 | formBody := &bytes.Buffer{} 84 | writer = multipart.NewWriter(formBody) 85 | err = pictureMultipartForm(requestBody.(ImageVariantRequestBody), writer) 86 | if err != nil { 87 | return err 88 | } 89 | err = writer.Close() 90 | if err != nil { 91 | return err 92 | } 93 | requestBodyData = formBody.Bytes() 94 | 95 | case nilBody: 96 | requestBodyData = nil 97 | 98 | default: 99 | return errors.New("unknown request body type") 100 | } 101 | 102 | if api == nil { 103 | return errors.New("no available API") 104 | } 105 | 106 | req, err := http.NewRequest(method, url, bytes.NewReader(requestBodyData)) 107 | if err != nil { 108 | return err 109 | } 110 | 111 | req.Header.Set("Content-Type", "application/json") 112 | if bodyType == formVoiceDataBody || bodyType == formPictureDataBody { 113 | req.Header.Set("Content-Type", writer.FormDataContentType()) 114 | } 115 | if bodyType == streamBody { 116 | req.Header.Set("Accept", "text/event-stream") 117 | req.Header.Set("Connection", "keep-alive") 118 | req.Header.Set("Cache-Control", "no-cache") 119 | } 120 | if gpt.Platform == OpenAI { 121 | req.Header.Set("Authorization", "Bearer "+api.Key) 122 | } else { 123 | req.Header.Set("api-key", gpt.AzureConfig.ApiToken) 124 | } 125 | 126 | var response *http.Response 127 | var retry int 128 | for retry = 0; retry <= maxRetries; retry++ { 129 | response, err = client.Do(req) 130 | //fmt.Println("--------------------") 131 | //fmt.Println("req", req.Header) 132 | //fmt.Printf("response: %v", response) 133 | // read body 134 | if err != nil || response.StatusCode < 200 || response.StatusCode >= 300 { 135 | 136 | body, _ := ioutil.ReadAll(response.Body) 137 | fmt.Println("body", string(body)) 138 | 139 | gpt.Lb.SetAvailability(api.Key, false) 140 | if retry == maxRetries || bodyType == streamBody { 141 | break 142 | } 143 | time.Sleep(time.Duration(retry+1) * time.Second) 144 | } else { 145 | break 146 | } 147 | } 148 | if response != nil { 149 | defer response.Body.Close() 150 | } 151 | 152 | if response == nil || response.StatusCode < 200 || response.StatusCode >= 300 { 153 | return fmt.Errorf("%s api failed after %d retries", strings.ToUpper(method), retry) 154 | } 155 | 156 | body, err := ioutil.ReadAll(response.Body) 157 | if err != nil { 158 | return err 159 | } 160 | 161 | err = json.Unmarshal(body, responseBody) 162 | if err != nil { 163 | return err 164 | } 165 | 166 | gpt.Lb.SetAvailability(api.Key, true) 167 | return nil 168 | } 169 | 170 | func (gpt *ChatGPT) sendRequestWithBodyType(link, method string, 171 | bodyType requestBodyType, 172 | requestBody interface{}, responseBody interface{}) error { 173 | var err error 174 | proxyString := gpt.HttpProxy 175 | 176 | client, parseProxyError := GetProxyClient(proxyString) 177 | if parseProxyError != nil { 178 | return parseProxyError 179 | } 180 | 181 | err = gpt.doAPIRequestWithRetry(link, method, bodyType, 182 | requestBody, responseBody, client, 3) 183 | 184 | return err 185 | } 186 | 187 | func GetProxyClient(proxyString string) (*http.Client, error) { 188 | var client *http.Client 189 | timeOutDuration := time.Duration(initialization.GetConfig().OpenAIHttpClientTimeOut) * time.Second 190 | if proxyString == "" { 191 | client = &http.Client{Timeout: timeOutDuration} 192 | } else { 193 | proxyUrl, err := url.Parse(proxyString) 194 | if err != nil { 195 | return nil, err 196 | } 197 | transport := &http.Transport{ 198 | Proxy: http.ProxyURL(proxyUrl), 199 | } 200 | client = &http.Client{ 201 | Transport: transport, 202 | Timeout: timeOutDuration, 203 | } 204 | } 205 | return client, nil 206 | } 207 | 208 | func NewChatGPT(config initialization.Config) *ChatGPT { 209 | var lb *loadbalancer.LoadBalancer 210 | if config.AzureOn { 211 | keys := []string{config.AzureOpenaiToken} 212 | lb = loadbalancer.NewLoadBalancer(keys) 213 | } else { 214 | lb = loadbalancer.NewLoadBalancer(config.OpenaiApiKeys) 215 | } 216 | platform := OpenAI 217 | 218 | if config.AzureOn { 219 | platform = Azure 220 | } 221 | 222 | return &ChatGPT{ 223 | Lb: lb, 224 | ApiKey: config.OpenaiApiKeys, 225 | ApiUrl: config.OpenaiApiUrl, 226 | HttpProxy: config.HttpProxy, 227 | Platform: platform, 228 | ApiModel: config.OpenaiModel, 229 | AzureConfig: AzureConfig{ 230 | BaseURL: AzureApiUrlV1, 231 | ResourceName: config.AzureResourceName, 232 | DeploymentName: config.AzureDeploymentName, 233 | ApiVersion: config.AzureApiVersion, 234 | ApiToken: config.AzureOpenaiToken, 235 | }, 236 | } 237 | } 238 | 239 | func (gpt *ChatGPT) FullUrl(suffix string) string { 240 | var url string 241 | switch gpt.Platform { 242 | case Azure: 243 | url = fmt.Sprintf("https://%s.%s%s/%s?api-version=%s", 244 | gpt.AzureConfig.ResourceName, gpt.AzureConfig.BaseURL, 245 | gpt.AzureConfig.DeploymentName, suffix, gpt.AzureConfig.ApiVersion) 246 | case OpenAI: 247 | url = fmt.Sprintf("%s/v1/%s", gpt.ApiUrl, suffix) 248 | } 249 | return url 250 | } 251 | -------------------------------------------------------------------------------- /code/services/openai/picture.go: -------------------------------------------------------------------------------- 1 | package openai 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "image" 7 | "image/jpeg" 8 | "image/png" 9 | "io" 10 | "mime/multipart" 11 | "os" 12 | ) 13 | 14 | type ImageGenerationRequestBody struct { 15 | Prompt string `json:"prompt"` 16 | N int `json:"n"` 17 | Size string `json:"size"` 18 | ResponseFormat string `json:"response_format"` 19 | } 20 | 21 | type ImageResponseBody struct { 22 | Created int64 `json:"created"` 23 | Data []struct { 24 | Base64Json string `json:"b64_json"` 25 | } `json:"data"` 26 | } 27 | 28 | type ImageVariantRequestBody struct { 29 | Image string `json:"image"` 30 | N int `json:"n"` 31 | Size string `json:"size"` 32 | ResponseFormat string `json:"response_format"` 33 | } 34 | 35 | func (gpt *ChatGPT) GenerateImage(prompt string, size string, 36 | n int) ([]string, error) { 37 | requestBody := ImageGenerationRequestBody{ 38 | Prompt: prompt, 39 | N: n, 40 | Size: size, 41 | ResponseFormat: "b64_json", 42 | } 43 | 44 | imageResponseBody := &ImageResponseBody{} 45 | err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/images/generations", 46 | "POST", jsonBody, requestBody, imageResponseBody) 47 | 48 | if err != nil { 49 | return nil, err 50 | } 51 | 52 | var b64Pool []string 53 | for _, data := range imageResponseBody.Data { 54 | b64Pool = append(b64Pool, data.Base64Json) 55 | } 56 | return b64Pool, nil 57 | } 58 | 59 | func (gpt *ChatGPT) GenerateOneImage(prompt string, 60 | size string) (string, error) { 61 | b64s, err := gpt.GenerateImage(prompt, size, 1) 62 | if err != nil { 63 | return "", err 64 | } 65 | return b64s[0], nil 66 | } 67 | 68 | func (gpt *ChatGPT) GenerateOneImageWithDefaultSize( 69 | prompt string) (string, error) { 70 | return gpt.GenerateOneImage(prompt, "512x512") 71 | } 72 | 73 | func (gpt *ChatGPT) GenerateImageVariation(images string, 74 | size string, n int) ([]string, error) { 75 | requestBody := ImageVariantRequestBody{ 76 | Image: images, 77 | N: n, 78 | Size: size, 79 | ResponseFormat: "b64_json", 80 | } 81 | 82 | imageResponseBody := &ImageResponseBody{} 83 | err := gpt.sendRequestWithBodyType(gpt.ApiUrl+"/v1/images/variations", 84 | "POST", formPictureDataBody, requestBody, imageResponseBody) 85 | 86 | if err != nil { 87 | return nil, err 88 | } 89 | 90 | var b64Pool []string 91 | for _, data := range imageResponseBody.Data { 92 | b64Pool = append(b64Pool, data.Base64Json) 93 | } 94 | return b64Pool, nil 95 | } 96 | 97 | func (gpt *ChatGPT) GenerateOneImageVariation(images string, 98 | size string) (string, error) { 99 | b64s, err := gpt.GenerateImageVariation(images, size, 1) 100 | if err != nil { 101 | return "", err 102 | } 103 | return b64s[0], nil 104 | } 105 | 106 | func pictureMultipartForm(request ImageVariantRequestBody, 107 | w *multipart.Writer) error { 108 | 109 | f, err := os.Open(request.Image) 110 | if err != nil { 111 | return fmt.Errorf("opening audio file: %w", err) 112 | } 113 | fw, err := w.CreateFormFile("image", f.Name()) 114 | if err != nil { 115 | return fmt.Errorf("creating form file: %w", err) 116 | } 117 | if _, err = io.Copy(fw, f); err != nil { 118 | return fmt.Errorf("reading from opened audio file: %w", err) 119 | } 120 | 121 | err = w.WriteField("size", request.Size) 122 | if err != nil { 123 | return fmt.Errorf("writing size: %w", err) 124 | } 125 | 126 | err = w.WriteField("n", fmt.Sprintf("%d", request.N)) 127 | if err != nil { 128 | return fmt.Errorf("writing n: %w", err) 129 | } 130 | 131 | err = w.WriteField("response_format", request.ResponseFormat) 132 | if err != nil { 133 | return fmt.Errorf("writing response_format: %w", err) 134 | } 135 | 136 | //err = w.WriteField("user", "user123456") 137 | 138 | //fw, err = w.CreateFormField("model") 139 | //if err != nil { 140 | // return fmt.Errorf("creating form field: %w", err) 141 | //} 142 | //modelName := bytes.NewReader([]byte(request.Model)) 143 | //if _, err = io.Copy(fw, modelName); err != nil { 144 | // return fmt.Errorf("writing model name: %w", err) 145 | //} 146 | 147 | //fmt.Printf("w.FormDataContentType(): %s ", w.FormDataContentType()) 148 | 149 | w.Close() 150 | 151 | return nil 152 | } 153 | 154 | func VerifyPngs(pngPaths []string) error { 155 | foundPng := false 156 | var expectedWidth, expectedHeight int 157 | 158 | for _, pngPath := range pngPaths { 159 | f, err := os.Open(pngPath) 160 | if err != nil { 161 | return fmt.Errorf("os.Open: %v", err) 162 | } 163 | 164 | fi, err := f.Stat() 165 | if err != nil { 166 | return fmt.Errorf("f.Stat: %v", err) 167 | } 168 | if fi.Size() > 4*1024*1024 { 169 | return fmt.Errorf("image size too large, "+ 170 | "must be under %d MB", 4) 171 | } 172 | 173 | image, err := png.Decode(f) 174 | if err != nil { 175 | return fmt.Errorf("image must be valid png, got error: %v", err) 176 | } 177 | width := image.Bounds().Dx() 178 | height := image.Bounds().Dy() 179 | if width != height { 180 | return fmt.Errorf("found non-square image with dimensions %dx%d", width, height) 181 | } 182 | 183 | if !foundPng { 184 | foundPng = true 185 | expectedWidth = width 186 | expectedHeight = height 187 | } else { 188 | if width != expectedWidth || height != expectedHeight { 189 | return fmt.Errorf("dimensions of all images must match, got both (%dx%d) and (%dx%d)", width, height, expectedWidth, expectedHeight) 190 | } 191 | } 192 | } 193 | 194 | return nil 195 | } 196 | 197 | func ConvertToRGBA(inputFilePath string, outputFilePath string) error { 198 | // 打开输入文件 199 | inputFile, err := os.Open(inputFilePath) 200 | if err != nil { 201 | return fmt.Errorf("打开文件时出错:%w", err) 202 | } 203 | defer inputFile.Close() 204 | 205 | // 解码图像 206 | img, _, err := image.Decode(inputFile) 207 | if err != nil { 208 | return fmt.Errorf("解码图像时出错:%w", err) 209 | } 210 | 211 | // 将图像转换为RGBA模式 212 | rgba := image.NewRGBA(img.Bounds()) 213 | for x := 0; x < img.Bounds().Max.X; x++ { 214 | for y := 0; y < img.Bounds().Max.Y; y++ { 215 | rgba.Set(x, y, img.At(x, y)) 216 | } 217 | } 218 | 219 | // 创建输出文件 220 | outputFile, err := os.Create(outputFilePath) 221 | if err != nil { 222 | return fmt.Errorf("创建输出文件时出错:%w", err) 223 | } 224 | defer outputFile.Close() 225 | 226 | // 编码图像为 PNG 格式并写入输出文件 227 | if err := png.Encode(outputFile, rgba); err != nil { 228 | return fmt.Errorf("编码图像时出错:%w", err) 229 | } 230 | 231 | return nil 232 | } 233 | 234 | func ConvertJpegToPNG(jpgPath string) error { 235 | // Open the JPEG file for reading 236 | f, err := os.Open(jpgPath) 237 | if err != nil { 238 | return err 239 | } 240 | defer f.Close() 241 | 242 | // Check if the file is a JPEG image 243 | _, err = jpeg.Decode(f) 244 | if err != nil { 245 | // The file is not a JPEG image, no need to convert it 246 | return fmt.Errorf("file %s is not a JPEG image", jpgPath) 247 | } 248 | 249 | // Reset the file pointer to the beginning of the file 250 | _, err = f.Seek(0, 0) 251 | if err != nil { 252 | return err 253 | } 254 | 255 | // Create a new PNG file for writing 256 | pngPath := jpgPath[:len(jpgPath)-4] + ".png" // replace .jpg extension with .png 257 | out, err := os.Create(pngPath) 258 | if err != nil { 259 | return err 260 | } 261 | defer out.Close() 262 | 263 | // Decode the JPEG image and encode it as PNG 264 | img, err := jpeg.Decode(f) 265 | if err != nil { 266 | return err 267 | } 268 | err = png.Encode(out, img) 269 | if err != nil { 270 | return err 271 | } 272 | 273 | return nil 274 | } 275 | 276 | func GetImageCompressionType(path string) (string, error) { 277 | // 打开文件 278 | file, err := os.Open(path) 279 | if err != nil { 280 | return "", err 281 | } 282 | defer file.Close() 283 | 284 | // 创建 bufio.Reader 285 | reader := bufio.NewReader(file) 286 | 287 | // 解码图像 288 | _, format, err := image.DecodeConfig(reader) 289 | if err != nil { 290 | fmt.Println("err: ", err) 291 | return "", err 292 | } 293 | 294 | fmt.Println("format: ", format) 295 | // 返回压缩类型 296 | return format, nil 297 | } 298 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
5 |
16 | 飞书 ×(GPT)+ 打字机效果
17 |
18 |
19 | 🚀 Feishu OpenAI 🚀
20 |
229 |
230 | #### 3. 部署项目
231 |
232 | 填写完环境变量后,点击 Deploy 就完成了项目的部署。部署完成后还需获取对应的域名用于飞书机器人访问,如下图所示:
233 |
234 |
235 |
236 | 如果不确定自己部署是否成功,可以通过访问上述获取到的域名 (https://xxxxxxxx.railway.app/ping) 来查看是否返回了`pong`
237 | ,如果返回了`pong`,说明部署成功。
238 |
239 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
383 |
384 | ## 交朋友 或者 鼓励一下
385 |
386 | 如果你觉得这个项目对你有帮助,可以请作者买本书~
387 |
388 |
389 |
390 | 😚 谢谢你啦 😚
391 |
392 | ## 赞助感谢
393 |
394 | 友情感谢 'Find My AI' 提供的部分经费赞助!
395 |
396 |
--------------------------------------------------------------------------------
/code/handlers/msg.go:
--------------------------------------------------------------------------------
1 | package handlers
2 |
3 | import (
4 | "bytes"
5 | "context"
6 | "encoding/base64"
7 | "errors"
8 | "fmt"
9 | "github.com/google/uuid"
10 | larkcard "github.com/larksuite/oapi-sdk-go/v3/card"
11 | larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
12 | "start-feishubot/initialization"
13 | "start-feishubot/services"
14 | "start-feishubot/services/openai"
15 | )
16 |
17 | type CardKind string
18 | type CardChatType string
19 |
20 | var (
21 | ClearCardKind = CardKind("clear") // 清空上下文
22 | PicModeChangeKind = CardKind("pic_mode_change") // 切换图片创作模式
23 | PicResolutionKind = CardKind("pic_resolution") // 图片分辨率调整
24 | PicTextMoreKind = CardKind("pic_text_more") // 重新根据文本生成图片
25 | PicVarMoreKind = CardKind("pic_var_more") // 变量图片
26 | RoleTagsChooseKind = CardKind("role_tags_choose") // 内置角色所属标签选择
27 | RoleChooseKind = CardKind("role_choose") // 内置角色选择
28 | )
29 |
30 | var (
31 | GroupChatType = CardChatType("group")
32 | UserChatType = CardChatType("personal")
33 | )
34 |
35 | type CardMsg struct {
36 | Kind CardKind
37 | ChatType CardChatType
38 | Value interface{}
39 | SessionId string
40 | MsgId string
41 | }
42 |
43 | type MenuOption struct {
44 | value string
45 | label string
46 | }
47 |
48 | func replyCard(ctx context.Context,
49 | msgId *string,
50 | cardContent string,
51 | ) error {
52 | client := initialization.GetLarkClient()
53 | resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
54 | MessageId(*msgId).
55 | Body(larkim.NewReplyMessageReqBodyBuilder().
56 | MsgType(larkim.MsgTypeInteractive).
57 | Uuid(uuid.New().String()).
58 | Content(cardContent).
59 | Build()).
60 | Build())
61 |
62 | // 处理错误
63 | if err != nil {
64 | fmt.Println(err)
65 | return err
66 | }
67 |
68 | // 服务端错误处理
69 | if !resp.Success() {
70 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
71 | return errors.New(resp.Msg)
72 | }
73 | return nil
74 | }
75 |
76 | func replyCardWithBackId(ctx context.Context,
77 | msgId *string,
78 | cardContent string,
79 | ) (*string, error) {
80 | client := initialization.GetLarkClient()
81 | resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
82 | MessageId(*msgId).
83 | Body(larkim.NewReplyMessageReqBodyBuilder().
84 | MsgType(larkim.MsgTypeInteractive).
85 | Uuid(uuid.New().String()).
86 | Content(cardContent).
87 | Build()).
88 | Build())
89 |
90 | // 处理错误
91 | if err != nil {
92 | fmt.Println(err)
93 | return nil, err
94 | }
95 |
96 | // 服务端错误处理
97 | if !resp.Success() {
98 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
99 | return nil, errors.New(resp.Msg)
100 | }
101 |
102 | //ctx = context.WithValue(ctx, "SendMsgId", *resp.Data.MessageId)
103 | //SendMsgId := ctx.Value("SendMsgId")
104 | //pp.Println(SendMsgId)
105 | return resp.Data.MessageId, nil
106 | }
107 |
108 | func newSendCard(header *larkcard.MessageCardHeader, elements ...larkcard.MessageCardElement) (string, error) {
109 | config := larkcard.NewMessageCardConfig().
110 | WideScreenMode(false).
111 | EnableForward(true).
112 | UpdateMulti(true).
113 | Build()
114 | var aElementPool []larkcard.MessageCardElement
115 | for _, element := range elements {
116 | aElementPool = append(aElementPool, element)
117 | }
118 | // 卡片消息体
119 | cardContent, err := larkcard.NewMessageCard().
120 | Config(config).
121 | Header(header).
122 | Elements(
123 | aElementPool,
124 | ).
125 | String()
126 | return cardContent, err
127 | }
128 | func newSendCardWithOutHeader(
129 | elements ...larkcard.MessageCardElement) (string, error) {
130 | config := larkcard.NewMessageCardConfig().
131 | WideScreenMode(false).
132 | EnableForward(true).
133 | UpdateMulti(true).
134 | Build()
135 | var aElementPool []larkcard.MessageCardElement
136 | for _, element := range elements {
137 | aElementPool = append(aElementPool, element)
138 | }
139 | // 卡片消息体
140 | cardContent, err := larkcard.NewMessageCard().
141 | Config(config).
142 | Elements(
143 | aElementPool,
144 | ).
145 | String()
146 | return cardContent, err
147 | }
148 |
149 | func newSimpleSendCard(
150 | elements ...larkcard.MessageCardElement) (string,
151 | error) {
152 | config := larkcard.NewMessageCardConfig().
153 | WideScreenMode(false).
154 | EnableForward(true).
155 | UpdateMulti(false).
156 | Build()
157 | var aElementPool []larkcard.MessageCardElement
158 | for _, element := range elements {
159 | aElementPool = append(aElementPool, element)
160 | }
161 | // 卡片消息体
162 | cardContent, err := larkcard.NewMessageCard().
163 | Config(config).
164 | Elements(
165 | aElementPool,
166 | ).
167 | String()
168 | return cardContent, err
169 | }
170 |
171 | // withSplitLine 用于生成分割线
172 | func withSplitLine() larkcard.MessageCardElement {
173 | splitLine := larkcard.NewMessageCardHr().
174 | Build()
175 | return splitLine
176 | }
177 |
178 | // withHeader 用于生成消息头
179 | func withHeader(title string, color string) *larkcard.
180 | MessageCardHeader {
181 | if title == "" {
182 | title = "🤖️机器人提醒"
183 | }
184 | header := larkcard.NewMessageCardHeader().
185 | Template(color).
186 | Title(larkcard.NewMessageCardPlainText().
187 | Content(title).
188 | Build()).
189 | Build()
190 | return header
191 | }
192 |
193 | // withNote 用于生成纯文本脚注
194 | func withNote(note string) larkcard.MessageCardElement {
195 | noteElement := larkcard.NewMessageCardNote().
196 | Elements([]larkcard.MessageCardNoteElement{larkcard.NewMessageCardPlainText().
197 | Content(note).
198 | Build()}).
199 | Build()
200 | return noteElement
201 | }
202 |
203 | // withMainMd 用于生成markdown消息体
204 | func withMainMd(msg string) larkcard.MessageCardElement {
205 | msg, i := processMessage(msg)
206 | msg = processNewLine(msg)
207 | if i != nil {
208 | return nil
209 | }
210 | mainElement := larkcard.NewMessageCardDiv().
211 | Fields([]*larkcard.MessageCardField{larkcard.NewMessageCardField().
212 | Text(larkcard.NewMessageCardLarkMd().
213 | Content(msg).
214 | Build()).
215 | IsShort(true).
216 | Build()}).
217 | Build()
218 | return mainElement
219 | }
220 |
221 | // withMainText 用于生成纯文本消息体
222 | func withMainText(msg string) larkcard.MessageCardElement {
223 | msg, i := processMessage(msg)
224 | msg = cleanTextBlock(msg)
225 | if i != nil {
226 | return nil
227 | }
228 | mainElement := larkcard.NewMessageCardDiv().
229 | Fields([]*larkcard.MessageCardField{larkcard.NewMessageCardField().
230 | Text(larkcard.NewMessageCardPlainText().
231 | Content(msg).
232 | Build()).
233 | IsShort(false).
234 | Build()}).
235 | Build()
236 | return mainElement
237 | }
238 |
239 | func withImageDiv(imageKey string) larkcard.MessageCardElement {
240 | imageElement := larkcard.NewMessageCardImage().
241 | ImgKey(imageKey).
242 | Alt(larkcard.NewMessageCardPlainText().Content("").
243 | Build()).
244 | Preview(true).
245 | Mode(larkcard.MessageCardImageModelCropCenter).
246 | CompactWidth(true).
247 | Build()
248 | return imageElement
249 | }
250 |
251 | // withMdAndExtraBtn 用于生成带有额外按钮的消息体
252 | func withMdAndExtraBtn(msg string, btn *larkcard.
253 | MessageCardEmbedButton) larkcard.MessageCardElement {
254 | msg, i := processMessage(msg)
255 | msg = processNewLine(msg)
256 | if i != nil {
257 | return nil
258 | }
259 | mainElement := larkcard.NewMessageCardDiv().
260 | Fields(
261 | []*larkcard.MessageCardField{
262 | larkcard.NewMessageCardField().
263 | Text(larkcard.NewMessageCardLarkMd().
264 | Content(msg).
265 | Build()).
266 | IsShort(true).
267 | Build()}).
268 | Extra(btn).
269 | Build()
270 | return mainElement
271 | }
272 |
273 | func newBtn(content string, value map[string]interface{},
274 | typename larkcard.MessageCardButtonType) *larkcard.
275 | MessageCardEmbedButton {
276 | btn := larkcard.NewMessageCardEmbedButton().
277 | Type(typename).
278 | Value(value).
279 | Text(larkcard.NewMessageCardPlainText().
280 | Content(content).
281 | Build())
282 | return btn
283 | }
284 |
285 | func newMenu(
286 | placeHolder string,
287 | value map[string]interface{},
288 | options ...MenuOption,
289 | ) *larkcard.
290 | MessageCardEmbedSelectMenuStatic {
291 | var aOptionPool []*larkcard.MessageCardEmbedSelectOption
292 | for _, option := range options {
293 | aOption := larkcard.NewMessageCardEmbedSelectOption().
294 | Value(option.value).
295 | Text(larkcard.NewMessageCardPlainText().
296 | Content(option.label).
297 | Build())
298 | aOptionPool = append(aOptionPool, aOption)
299 |
300 | }
301 | btn := larkcard.NewMessageCardEmbedSelectMenuStatic().
302 | MessageCardEmbedSelectMenuStatic(larkcard.NewMessageCardEmbedSelectMenuBase().
303 | Options(aOptionPool).
304 | Placeholder(larkcard.NewMessageCardPlainText().
305 | Content(placeHolder).
306 | Build()).
307 | Value(value).
308 | Build()).
309 | Build()
310 | return btn
311 | }
312 |
313 | // 清除卡片按钮
314 | func withClearDoubleCheckBtn(sessionID *string) larkcard.MessageCardElement {
315 | confirmBtn := newBtn("确认清除", map[string]interface{}{
316 | "value": "1",
317 | "kind": ClearCardKind,
318 | "chatType": UserChatType,
319 | "sessionId": *sessionID,
320 | }, larkcard.MessageCardButtonTypeDanger,
321 | )
322 | cancelBtn := newBtn("我再想想", map[string]interface{}{
323 | "value": "0",
324 | "kind": ClearCardKind,
325 | "sessionId": *sessionID,
326 | "chatType": UserChatType,
327 | },
328 | larkcard.MessageCardButtonTypeDefault)
329 |
330 | actions := larkcard.NewMessageCardAction().
331 | Actions([]larkcard.MessageCardActionElement{confirmBtn, cancelBtn}).
332 | Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
333 | Build()
334 |
335 | return actions
336 | }
337 |
338 | func withPicModeDoubleCheckBtn(sessionID *string) larkcard.
339 | MessageCardElement {
340 | confirmBtn := newBtn("切换模式", map[string]interface{}{
341 | "value": "1",
342 | "kind": PicModeChangeKind,
343 | "chatType": UserChatType,
344 | "sessionId": *sessionID,
345 | }, larkcard.MessageCardButtonTypeDanger,
346 | )
347 | cancelBtn := newBtn("我再想想", map[string]interface{}{
348 | "value": "0",
349 | "kind": PicModeChangeKind,
350 | "sessionId": *sessionID,
351 | "chatType": UserChatType,
352 | },
353 | larkcard.MessageCardButtonTypeDefault)
354 |
355 | actions := larkcard.NewMessageCardAction().
356 | Actions([]larkcard.MessageCardActionElement{confirmBtn, cancelBtn}).
357 | Layout(larkcard.MessageCardActionLayoutBisected.Ptr()).
358 | Build()
359 |
360 | return actions
361 | }
362 |
363 | func withOneBtn(btn *larkcard.MessageCardEmbedButton) larkcard.
364 | MessageCardElement {
365 | actions := larkcard.NewMessageCardAction().
366 | Actions([]larkcard.MessageCardActionElement{btn}).
367 | Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
368 | Build()
369 | return actions
370 | }
371 |
372 | //新建对话按钮
373 |
374 | func withPicResolutionBtn(sessionID *string) larkcard.
375 | MessageCardElement {
376 | cancelMenu := newMenu("默认分辨率",
377 | map[string]interface{}{
378 | "value": "0",
379 | "kind": PicResolutionKind,
380 | "sessionId": *sessionID,
381 | "msgId": *sessionID,
382 | },
383 | MenuOption{
384 | label: "256x256",
385 | value: string(services.Resolution256),
386 | },
387 | MenuOption{
388 | label: "512x512",
389 | value: string(services.Resolution512),
390 | },
391 | MenuOption{
392 | label: "1024x1024",
393 | value: string(services.Resolution1024),
394 | },
395 | )
396 |
397 | actions := larkcard.NewMessageCardAction().
398 | Actions([]larkcard.MessageCardActionElement{cancelMenu}).
399 | Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
400 | Build()
401 | return actions
402 | }
403 | func withRoleTagsBtn(sessionID *string, tags ...string) larkcard.
404 | MessageCardElement {
405 | var menuOptions []MenuOption
406 |
407 | for _, tag := range tags {
408 | menuOptions = append(menuOptions, MenuOption{
409 | label: tag,
410 | value: tag,
411 | })
412 | }
413 | cancelMenu := newMenu("选择角色分类",
414 | map[string]interface{}{
415 | "value": "0",
416 | "kind": RoleTagsChooseKind,
417 | "sessionId": *sessionID,
418 | "msgId": *sessionID,
419 | },
420 | menuOptions...,
421 | )
422 |
423 | actions := larkcard.NewMessageCardAction().
424 | Actions([]larkcard.MessageCardActionElement{cancelMenu}).
425 | Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
426 | Build()
427 | return actions
428 | }
429 |
430 | func withRoleBtn(sessionID *string, titles ...string) larkcard.
431 | MessageCardElement {
432 | var menuOptions []MenuOption
433 |
434 | for _, tag := range titles {
435 | menuOptions = append(menuOptions, MenuOption{
436 | label: tag,
437 | value: tag,
438 | })
439 | }
440 | cancelMenu := newMenu("查看内置角色",
441 | map[string]interface{}{
442 | "value": "0",
443 | "kind": RoleChooseKind,
444 | "sessionId": *sessionID,
445 | "msgId": *sessionID,
446 | },
447 | menuOptions...,
448 | )
449 |
450 | actions := larkcard.NewMessageCardAction().
451 | Actions([]larkcard.MessageCardActionElement{cancelMenu}).
452 | Layout(larkcard.MessageCardActionLayoutFlow.Ptr()).
453 | Build()
454 | return actions
455 | }
456 |
457 | func replyMsg(ctx context.Context, msg string, msgId *string) error {
458 | msg, i := processMessage(msg)
459 | if i != nil {
460 | return i
461 | }
462 | client := initialization.GetLarkClient()
463 | content := larkim.NewTextMsgBuilder().
464 | Text(msg).
465 | Build()
466 |
467 | resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
468 | MessageId(*msgId).
469 | Body(larkim.NewReplyMessageReqBodyBuilder().
470 | MsgType(larkim.MsgTypeText).
471 | Uuid(uuid.New().String()).
472 | Content(content).
473 | Build()).
474 | Build())
475 |
476 | // 处理错误
477 | if err != nil {
478 | fmt.Println(err)
479 | return err
480 | }
481 |
482 | // 服务端错误处理
483 | if !resp.Success() {
484 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
485 | return errors.New(resp.Msg)
486 | }
487 | return nil
488 | }
489 |
490 | func uploadImage(base64Str string) (*string, error) {
491 | imageBytes, err := base64.StdEncoding.DecodeString(base64Str)
492 | if err != nil {
493 | fmt.Println(err)
494 | return nil, err
495 | }
496 | client := initialization.GetLarkClient()
497 | resp, err := client.Im.Image.Create(context.Background(),
498 | larkim.NewCreateImageReqBuilder().
499 | Body(larkim.NewCreateImageReqBodyBuilder().
500 | ImageType(larkim.ImageTypeMessage).
501 | Image(bytes.NewReader(imageBytes)).
502 | Build()).
503 | Build())
504 |
505 | // 处理错误
506 | if err != nil {
507 | fmt.Println(err)
508 | return nil, err
509 | }
510 |
511 | // 服务端错误处理
512 | if !resp.Success() {
513 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
514 | return nil, errors.New(resp.Msg)
515 | }
516 | return resp.Data.ImageKey, nil
517 | }
518 | func replyImage(ctx context.Context, ImageKey *string,
519 | msgId *string) error {
520 | //fmt.Println("sendMsg", ImageKey, msgId)
521 |
522 | msgImage := larkim.MessageImage{ImageKey: *ImageKey}
523 | content, err := msgImage.String()
524 | if err != nil {
525 | fmt.Println(err)
526 | return err
527 | }
528 | client := initialization.GetLarkClient()
529 |
530 | resp, err := client.Im.Message.Reply(ctx, larkim.NewReplyMessageReqBuilder().
531 | MessageId(*msgId).
532 | Body(larkim.NewReplyMessageReqBodyBuilder().
533 | MsgType(larkim.MsgTypeImage).
534 | Uuid(uuid.New().String()).
535 | Content(content).
536 | Build()).
537 | Build())
538 |
539 | // 处理错误
540 | if err != nil {
541 | fmt.Println(err)
542 | return err
543 | }
544 |
545 | // 服务端错误处理
546 | if !resp.Success() {
547 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
548 | return errors.New(resp.Msg)
549 | }
550 | return nil
551 |
552 | }
553 |
554 | func replayImageCardByBase64(ctx context.Context, base64Str string,
555 | msgId *string, sessionId *string, question string) error {
556 | imageKey, err := uploadImage(base64Str)
557 | if err != nil {
558 | return err
559 | }
560 | //example := "img_v2_041b28e3-5680-48c2-9af2-497ace79333g"
561 | //imageKey := &example
562 | //fmt.Println("imageKey", *imageKey)
563 | err = sendImageCard(ctx, *imageKey, msgId, sessionId, question)
564 | if err != nil {
565 | return err
566 | }
567 | return nil
568 | }
569 |
570 | func sendMsg(ctx context.Context, msg string, chatId *string) error {
571 | //fmt.Println("sendMsg", msg, chatId)
572 | msg, i := processMessage(msg)
573 | if i != nil {
574 | return i
575 | }
576 | client := initialization.GetLarkClient()
577 | content := larkim.NewTextMsgBuilder().
578 | Text(msg).
579 | Build()
580 |
581 | //fmt.Println("content", content)
582 |
583 | resp, err := client.Im.Message.Create(ctx, larkim.NewCreateMessageReqBuilder().
584 | ReceiveIdType(larkim.ReceiveIdTypeChatId).
585 | Body(larkim.NewCreateMessageReqBodyBuilder().
586 | MsgType(larkim.MsgTypeText).
587 | ReceiveId(*chatId).
588 | Content(content).
589 | Build()).
590 | Build())
591 |
592 | // 处理错误
593 | if err != nil {
594 | fmt.Println(err)
595 | return err
596 | }
597 |
598 | // 服务端错误处理
599 | if !resp.Success() {
600 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
601 | return errors.New(resp.Msg)
602 | }
603 | return nil
604 | }
605 |
606 | func PatchCard(ctx context.Context, msgId *string,
607 | cardContent string) error {
608 | //fmt.Println("sendMsg", msg, chatId)
609 | client := initialization.GetLarkClient()
610 | //content := larkim.NewTextMsgBuilder().
611 | // Text(msg).
612 | // Build()
613 |
614 | //fmt.Println("content", content)
615 |
616 | resp, err := client.Im.Message.Patch(ctx, larkim.NewPatchMessageReqBuilder().
617 | MessageId(*msgId).
618 | Body(larkim.NewPatchMessageReqBodyBuilder().
619 | Content(cardContent).
620 | Build()).
621 | Build())
622 |
623 | // 处理错误
624 | if err != nil {
625 | fmt.Println(err)
626 | return err
627 | }
628 |
629 | // 服务端错误处理
630 | if !resp.Success() {
631 | fmt.Println(resp.Code, resp.Msg, resp.RequestId())
632 | return errors.New(resp.Msg)
633 | }
634 | return nil
635 | }
636 |
637 | func sendClearCacheCheckCard(ctx context.Context,
638 | sessionId *string, msgId *string) {
639 | newCard, _ := newSendCard(
640 | withHeader("🆑 机器人提醒", larkcard.TemplateBlue),
641 | withMainMd("您确定要清除对话上下文吗?"),
642 | withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"),
643 | withClearDoubleCheckBtn(sessionId))
644 | replyCard(ctx, msgId, newCard)
645 | }
646 |
647 | func sendSystemInstructionCard(ctx context.Context,
648 | sessionId *string, msgId *string, content string) {
649 | newCard, _ := newSendCard(
650 | withHeader("🥷 已进入角色扮演模式", larkcard.TemplateIndigo),
651 | withMainText(content),
652 | withNote("请注意,这将开始一个全新的对话,您将无法利用之前话题的历史信息"))
653 | replyCard(ctx, msgId, newCard)
654 | }
655 |
656 | func sendOnProcessCard(ctx context.Context,
657 | sessionId *string, msgId *string) (*string, error) {
658 | newCard, _ := newSendCardWithOutHeader(
659 | withNote("正在思考,请稍等..."))
660 | id, err := replyCardWithBackId(ctx, msgId, newCard)
661 | if err != nil {
662 | return nil, err
663 | }
664 | return id, nil
665 | }
666 |
667 | func updateTextCard(ctx context.Context, msg string,
668 | msgId *string) error {
669 | newCard, _ := newSendCardWithOutHeader(
670 | withMainText(msg),
671 | withNote("正在生成,请稍等..."))
672 | err := PatchCard(ctx, msgId, newCard)
673 | if err != nil {
674 | return err
675 | }
676 | return nil
677 | }
678 | func updateFinalCard(
679 | ctx context.Context,
680 | msg string,
681 | msgId *string,
682 | ) error {
683 | newCard, _ := newSendCardWithOutHeader(
684 | withMainText(msg))
685 | err := PatchCard(ctx, msgId, newCard)
686 | if err != nil {
687 | return err
688 | }
689 | return nil
690 | }
691 |
692 | func sendHelpCard(ctx context.Context,
693 | sessionId *string, msgId *string) {
694 | newCard, _ := newSendCard(
695 | withHeader("🎒需要帮助吗?", larkcard.TemplateBlue),
696 | withMainMd("**我是具备打字机效果的聊天机器人!**"),
697 | withSplitLine(),
698 | withMdAndExtraBtn(
699 | "** 🆑 清除话题上下文**\n文本回复 *清除* 或 */clear*",
700 | newBtn("立刻清除", map[string]interface{}{
701 | "value": "1",
702 | "kind": ClearCardKind,
703 | "chatType": UserChatType,
704 | "sessionId": *sessionId,
705 | }, larkcard.MessageCardButtonTypeDanger)),
706 | withMainMd("🛖 **内置角色列表** \n"+" 文本回复 *角色列表* 或 */roles*"),
707 | withMainMd("🥷 **角色扮演模式**\n文本回复*角色扮演* 或 */system*+空格+角色信息"),
708 | withSplitLine(),
709 | withMainMd("🎒 **需要更多帮助**\n文本回复 *帮助* 或 */help*"),
710 | )
711 | replyCard(ctx, msgId, newCard)
712 | }
713 |
714 | func sendImageCard(ctx context.Context, imageKey string,
715 | msgId *string, sessionId *string, question string) error {
716 | newCard, _ := newSimpleSendCard(
717 | withImageDiv(imageKey),
718 | withSplitLine(),
719 | //再来一张
720 | withOneBtn(newBtn("再来一张", map[string]interface{}{
721 | "value": question,
722 | "kind": PicTextMoreKind,
723 | "chatType": UserChatType,
724 | "msgId": *msgId,
725 | "sessionId": *sessionId,
726 | }, larkcard.MessageCardButtonTypePrimary)),
727 | )
728 | replyCard(ctx, msgId, newCard)
729 | return nil
730 | }
731 |
732 | func sendBalanceCard(ctx context.Context, msgId *string,
733 | balance openai.BalanceResponse) {
734 | newCard, _ := newSendCard(
735 | withHeader("🎰️ 余额查询", larkcard.TemplateBlue),
736 | withMainMd(fmt.Sprintf("总额度: %.2f$", balance.TotalGranted)),
737 | withMainMd(fmt.Sprintf("已用额度: %.2f$", balance.TotalUsed)),
738 | withMainMd(fmt.Sprintf("可用额度: %.2f$",
739 | balance.TotalAvailable)),
740 | withNote(fmt.Sprintf("有效期: %s - %s",
741 | balance.EffectiveAt.Format("2006-01-02 15:04:05"),
742 | balance.ExpiresAt.Format("2006-01-02 15:04:05"))),
743 | )
744 | replyCard(ctx, msgId, newCard)
745 | }
746 |
747 | func SendRoleTagsCard(ctx context.Context,
748 | sessionId *string, msgId *string, roleTags []string) {
749 | newCard, _ := newSendCard(
750 | withHeader("🛖 请选择角色类别", larkcard.TemplateIndigo),
751 | withRoleTagsBtn(sessionId, roleTags...),
752 | withNote("提醒:选择角色所属分类,以便我们为您推荐更多相关角色。"))
753 | replyCard(ctx, msgId, newCard)
754 | }
755 |
756 | func SendRoleListCard(ctx context.Context,
757 | sessionId *string, msgId *string, roleTag string, roleList []string) {
758 | newCard, _ := newSendCard(
759 | withHeader("🛖 角色列表"+" - "+roleTag, larkcard.TemplateIndigo),
760 | withRoleBtn(sessionId, roleList...),
761 | withNote("提醒:选择内置场景,快速进入角色扮演模式。"))
762 | replyCard(ctx, msgId, newCard)
763 | }
764 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.