├── .github └── workflows │ └── docker-image.yml ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── cmd ├── cobra │ └── ctor.go └── iocgo │ ├── annotation │ ├── cobra.go │ ├── del.go │ ├── get.go │ ├── post.go │ └── put.go │ └── main.go ├── core ├── cache │ └── memory.go ├── common │ ├── agent │ │ └── agent.go │ ├── basic.go │ ├── exec.go │ ├── gin.go │ ├── http.go │ ├── inited │ │ └── initialized.go │ ├── poll.go │ ├── toolcall │ │ ├── complete.go │ │ ├── message.go │ │ └── tpl.go │ ├── vars │ │ └── const.go │ └── wasm │ │ └── load.go ├── gin │ ├── initializer.go │ ├── inter │ │ ├── adapter.go │ │ └── matcher.go │ ├── model │ │ ├── keyv.go │ │ └── v1.go │ ├── response │ │ ├── chunk.go │ │ ├── matcher.go │ │ ├── message.go │ │ └── token.go │ └── v1.go ├── logger │ └── log.go ├── scan │ └── export.go └── tokenizer │ ├── elem.go │ ├── lexer.go │ └── parser.go ├── deploy ├── Dockerfile └── Dockerfile-BL ├── go.mod ├── go.sum ├── main.go ├── relay ├── 3rd ├── alloc │ ├── bing │ │ ├── bing.go │ │ └── ctor.go │ ├── coze │ │ ├── coze.go │ │ ├── ctor.go │ │ └── websdk.go │ ├── grok │ │ ├── ctor.go │ │ └── grok.go │ └── you │ │ ├── ctor.go │ │ └── you.go ├── hf │ ├── adapter.go │ ├── ctor.go │ ├── fetch.go │ └── model.go ├── llm │ ├── bing │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── message.go │ │ └── toolcall.go │ ├── blackbox │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ └── toolcall.go │ ├── coze │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── message.go │ │ └── toolcall.go │ ├── cursor │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ ├── message.pb.go │ │ ├── message.proto │ │ └── toolcall.go │ ├── deepseek │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ ├── sha3_wasm_bg.wasm │ │ └── toolcall.go │ ├── grok │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ └── toolcall.go │ ├── lmsys │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ └── toolcall.go │ ├── qodo │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ └── toolcall.go │ ├── v1 │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ └── toolcall.go │ ├── windsurf │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── fetch.go │ │ ├── message.go │ │ ├── message.pb.go │ │ ├── message.proto │ │ └── toolcall.go │ └── you │ │ ├── adapter.go │ │ ├── ctor.go │ │ ├── message.go │ │ └── toolcall.go ├── pg │ ├── adapter.go │ └── ctor.go └── scan │ ├── 3rd.go │ ├── 3rd_e.go │ └── export.go └── wire └── container.go /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Build ChatGPT-Adatper images and copy image to docker hub 2 | on: 3 | workflow_dispatch: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - '*' 9 | tags: 10 | - 'v*' 11 | jobs: 12 | build-docker-images: 13 | runs-on: ubuntu-22.04 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v3 17 | with: 18 | fetch-depth: 1 19 | - name: Install Dependencies 20 | run: | 21 | sudo apt update && sudo apt install -y golang 22 | - name: Set up QEMU (optional) 23 | uses: docker/setup-qemu-action@v2 24 | - name: Set up Docker Buildx 25 | uses: docker/setup-buildx-action@v2 26 | with: 27 | driver-opts: network=host 28 | - name: Cache Docker layers 29 | uses: actions/cache@v4 30 | with: 31 | path: /tmp/.buildx-cache 32 | key: ${{ runner.os }}-buildx-${{ github.sha }} 33 | restore-keys: | 34 | ${{ runner.os }}-buildx- 35 | - name: Login to GitHub Container Registry 36 | uses: docker/login-action@v2 37 | with: 38 | registry: ghcr.io 39 | username: ${{ github.repository_owner }} 40 | password: ${{ secrets.GOLBAL }} 41 | - name: Set DOCKER_REPO_TAGGED based on branch or tag 42 | run: | 43 | if [[ "${{ github.ref_name }}" == "main" ]]; then 44 | echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/chatgpt-adapter:latest" >> $GITHUB_ENV 45 | echo "DOCKER_REPO_TAGGED_ARM=ghcr.io/${{ github.repository_owner }}/chatgpt-adapter-arm64:latest" >> $GITHUB_ENV 46 | else 47 | echo "DOCKER_REPO_TAGGED=ghcr.io/${{ github.repository_owner }}/chatgpt-adapter:${{ github.ref_name }}" >> $GITHUB_ENV 48 | echo "DOCKER_REPO_TAGGED_ARM=ghcr.io/${{ github.repository_owner }}/chatgpt-adapter-arm64:${{ github.ref_name }}" >> $GITHUB_ENV 49 | fi 50 | - name: Build and publish image for main branch or tag push event 51 | env: 52 | DOCKER_REPO_TAGGED: ${{ env.DOCKER_REPO_TAGGED }} 53 | DOCKER_REPO_TAGGED_ARM: ${{ env.DOCKER_REPO_TAGGED_ARM }} 54 | run: | 55 | docker buildx build \ 56 | -f deploy/Dockerfile \ 57 | --platform linux/amd64 \ 58 | --label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/chatgpt-adapter" \ 59 | --label "org.opencontainers.image.description=chatgpt-adapter image" \ 60 | --push \ 61 | --cache-from=type=local,src=/tmp/.buildx-cache \ 62 | --cache-to=type=local,dest=/tmp/.buildx-cache \ 63 | -t ${DOCKER_REPO_TAGGED} \ 64 | . 65 | sed -i 's/build-linux/build-linux-arm64/g' deploy/Dockerfile 66 | sed -i 's/bin\/linux\/server/bin\/linux\/server-arm64/g' deploy/Dockerfile 67 | sed -i 's/alpine:3\.19\.0/arm64v8\/alpine:3\.19\.1/g' deploy/Dockerfile 68 | docker buildx build \ 69 | -f deploy/Dockerfile \ 70 | --platform linux/arm64/v8 \ 71 | --label "org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/chatgpt-adapter" \ 72 | --label "org.opencontainers.image.description=chatgpt-adapter image" \ 73 | --push \ 74 | --cache-from=type=local,src=/tmp/.buildx-cache \ 75 | --cache-to=type=local,dest=/tmp/.buildx-cache \ 76 | -t ${DOCKER_REPO_TAGGED_ARM} \ 77 | . 78 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # If you prefer the allow list template instead of the deny list, see community template: 2 | # https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore 3 | # 4 | # Binaries for programs and plugins 5 | *.exe 6 | *.exe~ 7 | *.dll 8 | *.so 9 | *.dylib 10 | 11 | # Test binary, built with `go test -c` 12 | *.test 13 | 14 | # Output of the go coverage tool, specifically when used with LiteIDE 15 | *.out 16 | .env 17 | /bin/ 18 | /log/ 19 | /images/ 20 | /tmp/ 21 | /plugins/ 22 | node_modules/ 23 | 24 | # Dependency directories (remove the comment below to include it) 25 | # vendor/ 26 | 27 | # Go workspace file 28 | go.work 29 | cookies_test.go 30 | internal/gin.handler/3rd.go 31 | cookies.txt 32 | config.yaml 33 | package-lock.json 34 | CHANGELOG.md 35 | 36 | # osx system 37 | .DS_Store -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | TARGET_EXEC := server 2 | ENV := CGO_ENABLED=0 3 | 4 | .PHONY: all changelog clean install build 5 | 6 | all: clean install build-linux build-linux-arm64 build-osx build-win 7 | 8 | changelog: 9 | conventional-changelog -p angular -o CHANGELOG.md -w -r 0 10 | 11 | clean: 12 | go clean -cache 13 | 14 | install: clean 15 | go install -ldflags="-s -w" -trimpath ./cmd/iocgo 16 | 17 | build-linux: 18 | ${ENV} GOARCH=amd64 GOOS=linux go build -toolexec iocgo $(argv) -ldflags="-s -w" -o bin/linux/${TARGET_EXEC} -trimpath main.go 19 | 20 | build-linux-arm64: 21 | ${ENV} GOARCH=arm64 GOOS=linux go build -toolexec iocgo $(argv) -ldflags="-s -w" -o bin/linux/${TARGET_EXEC}-arm64 -trimpath main.go 22 | 23 | build-osx: 24 | ${ENV} GOARCH=amd64 GOOS=darwin go build -toolexec iocgo $(argv) -ldflags="-s -w" -o bin/osx/${TARGET_EXEC} -trimpath main.go 25 | 26 | build-win: 27 | ${ENV} GOARCH=amd64 GOOS=windows go build -toolexec iocgo $(argv) -ldflags="-s -w" -o bin/windows/${TARGET_EXEC}.exe -trimpath main.go -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

ChatGPT Adapter

2 |
3 | 该服务集成了openai-api、coze、deepseek、cursor、windsurf、blackbox、you、grok、bing 绘画 多款AI的聊天逆向接口适配到 OpenAI API 标准接口服务端。 4 |
5 | 6 | ![image](https://github.com/user-attachments/assets/93be2041-8ebc-466a-9fd4-939f4f9082f2) 7 | 8 | 具体配置请 » [查阅文档](https://bincooo.github.io/chatgpt-adapter) » 9 | 10 | 支持高速流式输出、支持多轮对话,与ChatGPT接口完全兼容。 11 | 12 | 使用本项目,可享用以下内容转v1接口: 13 | 14 | - [字节coze国际版](https://www.coze.com) 15 | - [new bing copilot](https://copilot.microsoft.com) 16 | - [cursor editor](https://www.cursor.com) 17 | - [windsurf editor](https://codeium.com) 18 | - [qodo](https://www.qodo.ai) 19 | - [deepseek](https://www.deepseek.com) 20 | - [Chatbot Arena LMSYS](https://lmarena.ai) 21 | - [you](https://you.com) 22 | - [grok](https://grok.com) 23 | - [huggingface 绘图](https://huggingface.io) 24 | 25 | ![image](https://github.com/user-attachments/assets/1edf6efe-028c-4ec8-a7c4-a3c4fc2753d8) 26 | 27 | ### 执行前置 28 | 29 | 安装中间编译工具 30 | ```shell 31 | go install ./cmd/iocgo 32 | 33 | # or 34 | 35 | make install 36 | ``` 37 | ### 使用 38 | 39 | 40 | 正常指令附加 41 | ```shell 42 | # ----- go build ------ # 43 | # 原指令 # 44 | go build ./main.go 45 | 46 | # 附加指令 # 47 | go build -toolexec iocgo ./main.go 48 | 49 | 50 | # ----- go run ------ # 51 | # 原指令 # 52 | go run ./main.go 53 | 54 | # 附加指令 # 55 | go run -toolexec iocgo ./main.go 56 | ``` 57 | 58 | 其它`go`指令同理 59 | 60 | 61 | ### 运行本项目三部曲 (linux / macos / window) 62 | 63 | ```shell 64 | make install 65 | 66 | make build 67 | 68 | ./bin/[os]/server[.exe] -h 69 | ``` 70 | 71 | ### Docker 启动 72 | 1. docker 命令: 73 | ```shell 74 | docker run -p 8080:8080 -v ./config.yaml:/app/config.yaml ghcr.io/bincooo/chatgpt-adapter:latest 75 | ``` 76 | 77 | 2. huggingface: [Duplicate this Space](https://huggingface.co/spaces/wIK5Ez2o/DEMO/tree/main?duplicate=true) 78 | 79 | ### systemctl 自启动配置 80 | ```adapter.service 81 | [Unit] 82 | Description=ChatGPT adapter 83 | After=network.target 84 | 85 | [Service] 86 | Type=simple 87 | WorkingDirectory=/your_work_dir 88 | ExecStart=/your_app --port 7860 89 | Restart=on-failure 90 | 91 | [Install] 92 | WantedBy=multi-user.target 93 | ``` 94 | 95 | ### 其它 ... 96 | 看到有不少朋友似乎对逆向爬虫十分感兴趣,那我这里就浅谈一下个人的一点小经验吧 97 | 98 | - 爬虫逆向之 [ja3 指纹篇](https://github.com/bincooo/chatgpt-adapter/discussions/106) 99 | - 爬虫逆向之 [new bing copilot篇](https://github.com/bincooo/chatgpt-adapter/discussions/105) 100 | - 爬虫逆向之 [cursor & windsurf (protobuf+gzip)篇](https://github.com/bincooo/chatgpt-adapter/discussions/107) 101 | 102 | ## 特别声明 103 | > 本仓库发布的程序代码及其中涉及的任何解锁和解密分析脚本,仅用于测试和学习研究,禁止用于商业用途,不能保证其合法性,准确性,完整性和有效性,请根据情况自行判断。 104 | > 105 | > 本项目内所有资源文件,禁止任何公众号、自媒体进行任何形式的转载、发布。 106 | > 107 | > 本人对任何脚本/代码/访问资源问题概不负责,包括但不限于由任何脚本错误导致的任何损失或损害。 108 | > 109 | > 间接使用脚本/代码/访问资源的任何用户,包括但不限于建立VPS或在某些行为违反国家/地区法律或相关法规的情况下进行传播, 本人对于由此引起的任何隐私泄漏或其他后果概不负责。 110 | > 111 | > 请勿将本仓库的任何内容用于商业或非法目的,否则后果自负。 112 | > 113 | > 如果任何单位或个人认为该项目的脚本/代码/访问资源可能涉嫌侵犯其权利,则应及时通知并提供身份证明,所有权证明,我们将在收到认证文件后删除相关脚本。 114 | > 115 | > 任何以任何方式查看此项目的人或直接或间接使用该项目的任何脚本的使用者都应仔细阅读此声明。本人保留随时更改或补充此免责声明的权利。一旦使用并复制了任何相关脚本或Script项目的规则,则视为您已接受此免责声明。 116 | > 117 | > 您必须在下载后的24小时内从计算机或手机中完全删除以上内容. 118 | > 119 | > 您使用或者复制了本仓库且本人制作的任何脚本/代码,则视为 已接受 此声明,请仔细阅读! -------------------------------------------------------------------------------- /cmd/cobra/ctor.go: -------------------------------------------------------------------------------- 1 | package cobra 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/inited" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/logger" 7 | "github.com/gin-gonic/gin" 8 | "github.com/iocgo/sdk" 9 | "github.com/iocgo/sdk/cobra" 10 | "github.com/iocgo/sdk/env" 11 | "github.com/sirupsen/logrus" 12 | "os" 13 | "strings" 14 | ) 15 | 16 | type RootCommand struct { 17 | container *sdk.Container 18 | engine *gin.Engine 19 | env *env.Environment 20 | 21 | Port int `cobra:"port" short:"p" usage:"服务端口 port"` 22 | LogLevel string `cobra:"log" short:"L" usage:"日志级别: trace|debug|info|warn|error"` 23 | LogPath string `cobra:"log-path" usage:"日志路径 log path"` 24 | Proxied string `cobra:"proxies" short:"P" usage:"本地代理 proxies"` 25 | MView bool `cobra:"models" short:"M" usage:"展示模型列表"` 26 | } 27 | 28 | // @Cobra(name="cobra" 29 | // 30 | // version = "v3.0.0-beta" 31 | // use = "ChatGPT-Adapter" 32 | // short = "GPT接口适配器" 33 | // long = "GPT接口适配器。统一适配接口规范,集成了bing、claude-2,gemini...\n项目地址: https://github.com/bincooo/chatgpt-adapter" 34 | // run = "Run" 35 | // 36 | // ) 37 | func New(container *sdk.Container, engine *gin.Engine, config string) (rc cobra.ICobra, err error) { 38 | environment, err := sdk.InvokeBean[*env.Environment](container, "") 39 | if err != nil { 40 | return 41 | } 42 | 43 | rc = cobra.ICobraWrapper(&RootCommand{ 44 | container: container, 45 | engine: engine, 46 | env: environment, 47 | 48 | Port: 8080, 49 | LogLevel: "info", 50 | LogPath: "log", 51 | }, config) 52 | return 53 | } 54 | 55 | func (rc *RootCommand) Run(cmd *cobra.Command, args []string) { 56 | if rc.env.GetBool("server.debug") { 57 | println(rc.container.HealthLogger()) 58 | } 59 | 60 | if rc.MView { 61 | println("模型可用列表:") 62 | slice := sdk.ListInvokeAs[inter.Adapter](rc.container) 63 | for _, i := range slice { 64 | for _, mod := range i.Models() { 65 | println("- " + mod.Id) 66 | } 67 | } 68 | return 69 | } 70 | 71 | // init 72 | logger.InitLogger( 73 | rc.LogPath, 74 | LogLevel(rc.LogLevel), 75 | ) 76 | Initialized(rc) 77 | inited.Initialized(rc.env) 78 | 79 | // gin 80 | addr := ":" + rc.env.GetString("server.port") 81 | println("Listening and serving HTTP on 0.0.0.0" + addr) 82 | if err := rc.engine.Run(addr); err != nil { 83 | panic(err) 84 | } 85 | } 86 | 87 | func Initialized(rc *RootCommand) { 88 | if rc.env.GetInt("server.port") == 0 { 89 | rc.env.Set("server.port", rc.Port) 90 | } 91 | if rc.Proxied != "" { 92 | rc.env.Set("server.proxied", rc.Proxied) 93 | } 94 | 95 | if rc.env.GetString("server.password") == "" { 96 | for _, item := range os.Environ() { 97 | if len(item) > 9 && item[:9] == "PASSWORD=" { 98 | rc.env.Set("server.password", item[9:]) 99 | break 100 | } 101 | } 102 | } 103 | 104 | initFile(rc.env) 105 | } 106 | 107 | func LogLevel(lv string) logrus.Level { 108 | switch lv { 109 | case "trace": 110 | return logrus.TraceLevel 111 | case "debug": 112 | return logrus.DebugLevel 113 | case "warn": 114 | return logrus.WarnLevel 115 | case "error": 116 | return logrus.ErrorLevel 117 | default: 118 | return logrus.InfoLevel 119 | } 120 | } 121 | 122 | func initFile(env *env.Environment) { 123 | _, err := os.Stat("config.yaml") 124 | if !os.IsNotExist(err) { 125 | return 126 | } 127 | 128 | content := "browser-less:\n enabled: {enabled}\n port: {port}\n disabled-gpu: {gpu}\n headless: {headless}\n reversal: ${reversal}" 129 | content = strings.Replace(content, "{enabled}", env.GetString("browser-less.enabled"), 1) 130 | content = strings.Replace(content, "{port}", env.GetString("browser-less.port"), 1) 131 | content = strings.Replace(content, "{gpu}", env.GetString("browser-less.disabled-gpu"), 1) 132 | content = strings.Replace(content, "{headless}", env.GetString("browser-less.headless"), 1) 133 | content = strings.Replace(content, "{reversal}", env.GetString("browser-less.reversal"), 1) 134 | err = os.WriteFile("config.yaml", []byte(content), 0644) 135 | if err != nil { 136 | logger.Fatal(err) 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /cmd/iocgo/annotation/cobra.go: -------------------------------------------------------------------------------- 1 | package annotation 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "github.com/iocgo/sdk/gen/annotation" 7 | "github.com/iocgo/sdk/stream" 8 | "go/ast" 9 | ) 10 | 11 | type Cobra struct { 12 | *annotation.Anon 13 | 14 | N string `annotation:"name=name,default=" json:"-"` 15 | Qualifier string `annotation:"name=qualifier,default=" json:"-"` 16 | 17 | Use string `annotation:"name=use,default="` 18 | Short string `annotation:"name=short,default="` 19 | Long string `annotation:"name=long,default="` 20 | Version string `annotation:"name=version,default="` 21 | Example string `annotation:"name=example,default="` 22 | 23 | Run string `annotation:"name=run,default="` 24 | } 25 | 26 | var _ annotation.M = (*Cobra)(nil) 27 | 28 | func (g Cobra) As() annotation.M { 29 | config, _ := json.Marshal(g) 30 | return annotation.Inject{ 31 | N: g.N, 32 | IsLazy: true, 33 | Singleton: true, 34 | Qualifier: g.Qualifier, 35 | Config: string(config), 36 | } 37 | } 38 | 39 | func (g Cobra) Match(node ast.Node) (err error) { 40 | if err = g.As().Match(node); err != nil { 41 | return 42 | } 43 | 44 | fd := node.(*ast.FuncDecl) 45 | if stream.OfSlice(fd.Type.Params.List).Filter(isStringField).One() == nil { 46 | err = fmt.Errorf(`'@Cobra' annotation requires a receive parameter of type 'string'`) 47 | return 48 | } 49 | 50 | if stream.OfSlice(fd.Type.Results.List).Filter(isCobraField).One() == nil { 51 | err = fmt.Errorf(`'@Cobra' annotation requires a receive returns of type 'cobra.ICobra'`) 52 | return 53 | } 54 | return 55 | } 56 | 57 | func isCobraField(field *ast.Field) bool { 58 | switch expr := field.Type.(type) { 59 | case *ast.Ident: 60 | if expr.Name == "ICobra" { 61 | return true 62 | } 63 | case *ast.StarExpr: 64 | selectorExpr := expr.X.(*ast.SelectorExpr) 65 | if selectorExpr.Sel.Name == "ICobra" { 66 | return true 67 | } 68 | case *ast.SelectorExpr: 69 | if expr.Sel.Name == "ICobra" { 70 | return true 71 | } 72 | } 73 | return false 74 | } 75 | 76 | func isStringField(field *ast.Field) bool { 77 | switch expr := field.Type.(type) { 78 | case *ast.Ident: 79 | if expr.Name == "string" { 80 | return true 81 | } 82 | } 83 | return false 84 | } 85 | -------------------------------------------------------------------------------- /cmd/iocgo/annotation/del.go: -------------------------------------------------------------------------------- 1 | package annotation 2 | 3 | import ( 4 | "fmt" 5 | "github.com/iocgo/sdk/gen/annotation" 6 | "go/ast" 7 | ) 8 | 9 | type DEL struct { 10 | *annotation.Anon 11 | Path string `annotation:"name=path,default=/"` 12 | } 13 | 14 | var _ annotation.M = (*DEL)(nil) 15 | 16 | func (g DEL) Match(node ast.Node) (err error) { 17 | if _, ok := node.(*ast.FuncDecl); !ok { 18 | err = fmt.Errorf(`"@DEL" annotation is only allowed to be defined on the method`) 19 | return 20 | } 21 | 22 | if err = g.As().Match(node); err != nil { 23 | return 24 | } 25 | return 26 | } 27 | 28 | func (g DEL) As() annotation.M { 29 | return annotation.Router{ 30 | Method: "DELETE", 31 | Path: g.Path, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /cmd/iocgo/annotation/get.go: -------------------------------------------------------------------------------- 1 | package annotation 2 | 3 | import ( 4 | "fmt" 5 | "github.com/iocgo/sdk/gen/annotation" 6 | "go/ast" 7 | ) 8 | 9 | type GET struct { 10 | *annotation.Anon 11 | Path string `annotation:"name=path,default=/"` 12 | } 13 | 14 | var _ annotation.M = (*GET)(nil) 15 | 16 | func (g GET) Match(node ast.Node) (err error) { 17 | if _, ok := node.(*ast.FuncDecl); !ok { 18 | err = fmt.Errorf(`"@GET" annotation is only allowed to be defined on the method`) 19 | return 20 | } 21 | 22 | if err = g.As().Match(node); err != nil { 23 | return 24 | } 25 | return 26 | } 27 | 28 | func (g GET) As() annotation.M { 29 | return annotation.Router{ 30 | Method: "GET", 31 | Path: g.Path, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /cmd/iocgo/annotation/post.go: -------------------------------------------------------------------------------- 1 | package annotation 2 | 3 | import ( 4 | "fmt" 5 | "github.com/iocgo/sdk/gen/annotation" 6 | "go/ast" 7 | ) 8 | 9 | type POST struct { 10 | *annotation.Anon 11 | Path string `annotation:"name=path,default=/"` 12 | } 13 | 14 | var _ annotation.M = (*POST)(nil) 15 | 16 | func (g POST) Match(node ast.Node) (err error) { 17 | if _, ok := node.(*ast.FuncDecl); !ok { 18 | err = fmt.Errorf(`"@POST" annotation is only allowed to be defined on the method`) 19 | return 20 | } 21 | 22 | if err = g.As().Match(node); err != nil { 23 | return 24 | } 25 | return 26 | } 27 | 28 | func (g POST) As() annotation.M { 29 | return annotation.Router{ 30 | Method: "POST", 31 | Path: g.Path, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /cmd/iocgo/annotation/put.go: -------------------------------------------------------------------------------- 1 | package annotation 2 | 3 | import ( 4 | "fmt" 5 | "github.com/iocgo/sdk/gen/annotation" 6 | "go/ast" 7 | ) 8 | 9 | type PUT struct { 10 | *annotation.Anon 11 | Path string `annotation:"name=path,default=/"` 12 | } 13 | 14 | var _ annotation.M = (*PUT)(nil) 15 | 16 | func (g PUT) Match(node ast.Node) (err error) { 17 | if _, ok := node.(*ast.FuncDecl); !ok { 18 | err = fmt.Errorf(`"@PUT" annotation is only allowed to be defined on the method`) 19 | return 20 | } 21 | 22 | if err = g.As().Match(node); err != nil { 23 | return 24 | } 25 | return 26 | } 27 | 28 | func (g PUT) As() annotation.M { 29 | return annotation.Router{ 30 | Method: "PUT", 31 | Path: g.Path, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /cmd/iocgo/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "chatgpt-adapter/cmd/iocgo/annotation" 5 | "github.com/iocgo/sdk/gen" 6 | "github.com/iocgo/sdk/gen/tool" 7 | ) 8 | 9 | func init() { 10 | // gin 11 | gen.Alias[annotation.GET]() 12 | gen.Alias[annotation.PUT]() 13 | gen.Alias[annotation.DEL]() 14 | gen.Alias[annotation.POST]() 15 | 16 | // cobra 17 | gen.Alias[annotation.Cobra]() 18 | } 19 | 20 | func main() { 21 | tool.Process() 22 | } 23 | -------------------------------------------------------------------------------- /core/cache/memory.go: -------------------------------------------------------------------------------- 1 | package cache 2 | 3 | import ( 4 | "context" 5 | "strings" 6 | "time" 7 | 8 | "chatgpt-adapter/core/common/inited" 9 | "chatgpt-adapter/core/gin/model" 10 | "github.com/eko/gocache/lib/v4/cache" 11 | "github.com/eko/gocache/lib/v4/store" 12 | "github.com/iocgo/sdk/env" 13 | 14 | gocacheStore "github.com/eko/gocache/store/go_cache/v4" 15 | gocache "github.com/patrickmn/go-cache" 16 | ) 17 | 18 | type Manager[T any] struct { 19 | cache *cache.Cache[T] 20 | } 21 | 22 | var ( 23 | toolTasksCacheManager *Manager[[]model.Keyv[string]] 24 | windsurfCacheManager *Manager[string] 25 | bingCacheManager *Manager[string] 26 | cursorCacheManager *Manager[string] 27 | qodoCacheManager *Manager[string] 28 | zedCacheManager *Manager[string] 29 | ) 30 | 31 | func init() { 32 | inited.AddInitialized(func(_ *env.Environment) { 33 | client := gocache.New(5*time.Minute, 5*time.Minute) 34 | toolTasksCacheManager = &Manager[[]model.Keyv[string]]{ 35 | cache.New[[]model.Keyv[string]](gocacheStore.NewGoCache(client)), 36 | } 37 | 38 | client = gocache.New(5*time.Minute, 5*time.Minute) 39 | windsurfCacheManager = &Manager[string]{ 40 | cache.New[string](gocacheStore.NewGoCache(client)), 41 | } 42 | 43 | client = gocache.New(5*time.Minute, 5*time.Minute) 44 | bingCacheManager = &Manager[string]{ 45 | cache.New[string](gocacheStore.NewGoCache(client)), 46 | } 47 | 48 | client = gocache.New(5*time.Minute, 5*time.Minute) 49 | cursorCacheManager = &Manager[string]{ 50 | cache.New[string](gocacheStore.NewGoCache(client)), 51 | } 52 | 53 | client = gocache.New(5*time.Minute, 5*time.Minute) 54 | qodoCacheManager = &Manager[string]{ 55 | cache.New[string](gocacheStore.NewGoCache(client)), 56 | } 57 | 58 | client = gocache.New(5*time.Minute, 5*time.Minute) 59 | zedCacheManager = &Manager[string]{ 60 | cache.New[string](gocacheStore.NewGoCache(client)), 61 | } 62 | }) 63 | } 64 | 65 | func ToolTasksCacheManager() *Manager[[]model.Keyv[string]] { 66 | return toolTasksCacheManager 67 | } 68 | 69 | func WindsurfCacheManager() *Manager[string] { 70 | return windsurfCacheManager 71 | } 72 | 73 | func BingCacheManager() *Manager[string] { 74 | return bingCacheManager 75 | } 76 | 77 | func CursorCacheManager() *Manager[string] { 78 | return cursorCacheManager 79 | } 80 | 81 | func QodoCacheManager() *Manager[string] { 82 | return qodoCacheManager 83 | } 84 | 85 | func ZedCacheManager() *Manager[string] { 86 | return zedCacheManager 87 | } 88 | 89 | func (cacheManager *Manager[T]) SetValue(key string, value T) error { 90 | return cacheManager.SetWithExpiration(key, value, 120*time.Second) 91 | } 92 | 93 | func (cacheManager *Manager[T]) SetWithExpiration(key string, value T, expir time.Duration) error { 94 | timeout, cancel := context.WithTimeout(context.Background(), 30*time.Second) 95 | defer cancel() 96 | return cacheManager.cache.Set(timeout, key, value, store.WithExpiration(expir)) 97 | } 98 | 99 | func (cacheManager *Manager[T]) GetValue(key string) (value T, err error) { 100 | timeout, cancel := context.WithTimeout(context.Background(), 30*time.Second) 101 | defer cancel() 102 | 103 | const errorMessage = "value not found" 104 | value, err = cacheManager.cache.Get(timeout, key) 105 | if err != nil && strings.Contains(err.Error(), errorMessage) { 106 | err = nil 107 | return 108 | } 109 | return 110 | } 111 | 112 | func (cacheManager *Manager[T]) Delete(key string) error { 113 | timeout, cancel := context.WithTimeout(context.Background(), 30*time.Second) 114 | defer cancel() 115 | return cacheManager.cache.Delete(timeout, key) 116 | } 117 | -------------------------------------------------------------------------------- /core/common/basic.go: -------------------------------------------------------------------------------- 1 | package common 2 | 3 | import ( 4 | "crypto/sha1" 5 | "encoding/hex" 6 | "io" 7 | "math/rand" 8 | "time" 9 | "unsafe" 10 | 11 | "chatgpt-adapter/core/logger" 12 | ) 13 | 14 | type ref struct { 15 | rtype unsafe.Pointer 16 | data unsafe.Pointer 17 | } 18 | 19 | func IsNIL(obj interface{}) bool { 20 | return obj == nil || unpackEFace(obj).data == nil 21 | } 22 | 23 | func Hex(n int) string { 24 | r := rand.New(rand.NewSource(time.Now().UnixNano())) 25 | var runes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890") 26 | bytes := make([]rune, n) 27 | for i := range bytes { 28 | bytes[i] = runes[r.Intn(len(runes))] 29 | } 30 | return string(bytes) 31 | } 32 | 33 | func RandInt(n int) string { 34 | r := rand.New(rand.NewSource(time.Now().UnixNano())) 35 | var runes = []rune("1234567890") 36 | bytes := make([]rune, n) 37 | for i := range bytes { 38 | bytes[i] = runes[r.Intn(len(runes))] 39 | } 40 | return string(bytes) 41 | } 42 | 43 | func CalcHex(str string) string { 44 | h := sha1.New() 45 | if _, err := io.WriteString(h, str); err != nil { 46 | logger.Error(err) 47 | return "-1" 48 | } 49 | return hex.EncodeToString(h.Sum(nil)) 50 | } 51 | 52 | func isSlice(o interface{}) (ok bool) { 53 | _, ok = o.([]interface{}) 54 | return 55 | } 56 | 57 | func unpackEFace(obj interface{}) *ref { 58 | return (*ref)(unsafe.Pointer(&obj)) 59 | } 60 | 61 | func ips(ips ...string) func() []string { 62 | return func() []string { return ips } 63 | } 64 | -------------------------------------------------------------------------------- /core/common/exec.go: -------------------------------------------------------------------------------- 1 | package common 2 | 3 | import ( 4 | "chatgpt-adapter/core/logger" 5 | "github.com/iocgo/sdk/env" 6 | "io" 7 | "os" 8 | "os/exec" 9 | "runtime" 10 | "time" 11 | ) 12 | 13 | var cmd *exec.Cmd 14 | 15 | func Exec(port, proxies string, stdout io.Writer, stderr io.Writer) { 16 | app := appPath() 17 | 18 | if !fileExists(app) { 19 | logger.Fatalf("executable file not exists: %s", app) 20 | return 21 | } 22 | 23 | args := []string{app, "--port", port} 24 | if proxies != "" { 25 | args = append(args, "--proxies", proxies) 26 | } 27 | 28 | cmd = exec.Command(app, args...) 29 | if stdout == nil { 30 | stdout = os.Stdout 31 | } 32 | cmd.Stdout = stdout 33 | 34 | if stderr == nil { 35 | stderr = os.Stderr 36 | } 37 | cmd.Stderr = stderr 38 | 39 | go func() { 40 | if err := cmd.Run(); err != nil { 41 | logger.Fatalf("executable file error: %v", err) 42 | return 43 | } 44 | }() 45 | 46 | time.Sleep(5 * time.Second) 47 | logger.Info("helper exec running ...") 48 | } 49 | 50 | func appPath() string { 51 | app := "bin/" 52 | switch runtime.GOOS { 53 | case "linux": 54 | // 可惜了,arm过不了验证 55 | if runtime.GOARCH == "arm" || runtime.GOARCH == "arm64" { 56 | app += "linux/helper-arm64" 57 | } else { 58 | app += "linux/helper" 59 | } 60 | case "darwin": 61 | app += "osx/helper" 62 | case "windows": 63 | app += "windows/helper.exe" 64 | default: 65 | logger.Fatalf("Unsupported platform: %s", runtime.GOOS) 66 | } 67 | return app 68 | } 69 | 70 | func Exit(_ *env.Environment) { 71 | if cmd == nil { 72 | return 73 | } 74 | _ = cmd.Process.Kill() 75 | } 76 | 77 | func fileExists(path string) bool { 78 | _, err := os.Stat(path) 79 | return err == nil || os.IsExist(err) 80 | } 81 | -------------------------------------------------------------------------------- /core/common/gin.go: -------------------------------------------------------------------------------- 1 | package common 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/vars" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "github.com/gin-gonic/gin" 8 | ) 9 | 10 | func GetGinCompletion(ctx *gin.Context) (value model.Completion) { 11 | value, _ = GetGinValue[model.Completion](ctx, vars.GinCompletion) 12 | return 13 | } 14 | 15 | func GetGinEmbedding(ctx *gin.Context) (value model.Embed) { 16 | value, _ = GetGinValue[model.Embed](ctx, vars.GinEmbedding) 17 | return 18 | } 19 | 20 | func GetGinGeneration(ctx *gin.Context) (value model.Generation) { 21 | value, _ = GetGinValue[model.Generation](ctx, vars.GinGeneration) 22 | return 23 | } 24 | 25 | func GetGinMatchers(ctx *gin.Context) (values []inter.Matcher) { 26 | values, _ = GetGinValues[inter.Matcher](ctx, vars.GinMatchers) 27 | return 28 | } 29 | 30 | func GetGinCompletionUsage(ctx *gin.Context) map[string]interface{} { 31 | obj, exists := ctx.Get(vars.GinCompletionUsage) 32 | if exists { 33 | return obj.(map[string]interface{}) 34 | } 35 | return nil 36 | } 37 | 38 | func GetGinToolValue(ctx *gin.Context) model.Keyv[interface{}] { 39 | tool, ok := GetGinValue[model.Keyv[interface{}]](ctx, vars.GinTool) 40 | if !ok { 41 | tool = model.Keyv[interface{}]{ 42 | "id": "-1", 43 | "enabled": false, 44 | "tasks": false, 45 | } 46 | } 47 | return tool 48 | } 49 | 50 | func IsGinCozeWebsdk(ctx *gin.Context) bool { 51 | return ctx.GetBool(vars.GinCozeWebsdk) 52 | } 53 | 54 | func GetGinValue[T any](ctx *gin.Context, key string) (t T, ok bool) { 55 | value, exists := ctx.Get(key) 56 | if !exists { 57 | return 58 | } 59 | 60 | t, ok = value.(T) 61 | return 62 | } 63 | 64 | func GetGinValues[T any](ctx *gin.Context, key string) ([]T, bool) { 65 | value, exists := ctx.Get(key) 66 | if !exists { 67 | return nil, false 68 | } 69 | 70 | t, ok := value.([]T) 71 | return t, ok 72 | } 73 | 74 | // func GetGinContext(ctx *gin.Context) context.Context { 75 | // var key = "__context__" 76 | // { 77 | // value, exists := GetGinValue[context.Context](ctx, key) 78 | // if exists { 79 | // return value 80 | // } 81 | // } 82 | // 83 | // reqCtx := ctx.Request.Context() 84 | // connTimeout := gin2.Config.GetInt("server-conn.connTimeout") 85 | // if connTimeout > 0 { 86 | // timeout, cancelFunc := context.WithTimeout(reqCtx, time.Duration(connTimeout)*time.Second) 87 | // ctx.Set(key, timeout) 88 | // ctx.Set(vars.GinCancelFunc, cancelFunc) 89 | // return timeout 90 | // } 91 | // return reqCtx 92 | // } 93 | 94 | // func GetGinIdleConnectOption(ctx *gin.Context) *emit.ConnectOption { 95 | // key := "__IdleConnectOption__" 96 | // { 97 | // value, exists := GetGinValue[*emit.ConnectOption](ctx, key) 98 | // if exists { 99 | // return value 100 | // } 101 | // } 102 | // 103 | // option := GetIdleConnectOption() 104 | // ctx.Set(key, option) 105 | // return option 106 | // } 107 | -------------------------------------------------------------------------------- /core/common/inited/initialized.go: -------------------------------------------------------------------------------- 1 | package inited 2 | 3 | import ( 4 | "github.com/iocgo/sdk/env" 5 | "os" 6 | "os/signal" 7 | "syscall" 8 | ) 9 | 10 | var ( 11 | inits = make([]func(env *env.Environment), 0) 12 | exits = make([]func(env *env.Environment), 0) 13 | ) 14 | 15 | func AddInitialized(apply func(env *env.Environment)) { inits = append(inits, apply) } 16 | func AddExited(apply func(env *env.Environment)) { exits = append(exits, apply) } 17 | func Initialized(env *env.Environment) { 18 | for _, apply := range inits { 19 | apply(env) 20 | } 21 | 22 | osSignal := make(chan os.Signal, 1) 23 | signal.Notify(osSignal, syscall.SIGINT, syscall.SIGTERM, syscall.SIGKILL) 24 | go func(ch chan os.Signal) { 25 | <-ch 26 | for _, apply := range exits { 27 | apply(env) 28 | } 29 | os.Exit(0) 30 | }(osSignal) 31 | } 32 | -------------------------------------------------------------------------------- /core/common/poll.go: -------------------------------------------------------------------------------- 1 | package common 2 | 3 | import ( 4 | "context" 5 | "encoding/json" 6 | "errors" 7 | "fmt" 8 | "reflect" 9 | "time" 10 | 11 | "chatgpt-adapter/core/logger" 12 | "github.com/iocgo/sdk/lock" 13 | ) 14 | 15 | const ( 16 | waitTimeout = 10 * time.Second 17 | ) 18 | 19 | type state struct { 20 | t time.Time 21 | s byte 22 | } 23 | 24 | type PollContainer[T interface{}] struct { 25 | name string 26 | pos int 27 | slice []T 28 | markers map[interface{}]*state 29 | mu *lock.ExpireLock // mark 30 | cmu *lock.ExpireLock // delete 31 | Condition func(T, ...interface{}) bool 32 | } 33 | 34 | // resetTime 用于复位状态:0 就绪状态,1 使用状态,2 异常状态 35 | func NewPollContainer[T interface{}](name string, slice []T, resetTime time.Duration) *PollContainer[T] { 36 | container := PollContainer[T]{ 37 | name: name, 38 | slice: slice, 39 | markers: make(map[interface{}]*state), 40 | 41 | mu: lock.NewExpireLock(true), 42 | cmu: lock.NewExpireLock(true), 43 | } 44 | 45 | if resetTime > 0 { 46 | go timer(&container, resetTime) 47 | } 48 | return &container 49 | } 50 | 51 | // 定时复位状态 0 就绪状态,1 使用状态,2 异常状态 52 | func timer[T interface{}](container *PollContainer[T], resetTime time.Duration) { 53 | s10 := 10 * time.Second 54 | s20 := 20 * time.Second 55 | for { 56 | if len(container.slice) == 0 { 57 | time.Sleep(s10) 58 | continue 59 | } 60 | 61 | timeout, cancel := context.WithTimeout(context.Background(), s20) 62 | if !container.mu.Lock(timeout) { 63 | cancel() 64 | time.Sleep(s10) 65 | logger.Errorf("[%s] PollContainer 获取锁失败", container.name) 66 | continue 67 | } 68 | cancel() 69 | 70 | for _, value := range container.slice { 71 | var obj interface{} = value 72 | if s, ok := obj.(string); ok { 73 | obj = s 74 | } else { 75 | data, _ := json.Marshal(obj) 76 | obj = string(data) 77 | } 78 | 79 | marker, ok := container.markers[obj] 80 | if !ok { 81 | continue 82 | } 83 | 84 | if marker.s == 0 || marker.s == 1 { // 0 就绪状态, 1 使用中 85 | continue 86 | } 87 | 88 | // 2 异常冷却中 89 | if time.Now().Add(-resetTime).After(marker.t) { 90 | marker.s = 0 91 | logger.Infof("[%s] PollContainer 冷却完毕: %v", container.name, obj) 92 | } 93 | } 94 | container.mu.Unlock() 95 | time.Sleep(s10) 96 | } 97 | } 98 | 99 | func (container *PollContainer[T]) Poll(argv ...interface{}) (T, error) { 100 | var zero T 101 | if container == nil || len(container.slice) == 0 { 102 | return zero, errors.New("no elements in slice") 103 | } 104 | 105 | if container.Condition == nil { 106 | return zero, errors.New("condition is nil") 107 | } 108 | 109 | timeout, cancel := context.WithTimeout(context.Background(), waitTimeout) 110 | defer cancel() 111 | 112 | if !container.cmu.Lock(timeout) { 113 | return zero, errors.New("lock timeout") 114 | } 115 | defer container.cmu.Unlock() 116 | 117 | pos := container.pos 118 | sliceL := len(container.slice) 119 | if pos >= sliceL { 120 | container.pos = 0 121 | pos = 0 122 | } 123 | 124 | for index := 0; index < sliceL; index++ { 125 | curr := pos + index 126 | if curr >= sliceL { 127 | curr = curr - sliceL 128 | } 129 | 130 | value := container.slice[curr] 131 | if container.Condition(value, argv...) { 132 | container.pos = curr + 1 133 | err := container.MarkTo(value, 1) 134 | if err != nil { 135 | return zero, err 136 | } 137 | return value, nil 138 | } 139 | } 140 | 141 | return zero, fmt.Errorf("not roll result") 142 | } 143 | 144 | func (container *PollContainer[T]) Remove(value T) (err error) { 145 | if container.Len() == 0 { 146 | return 147 | } 148 | 149 | timeout, cancel := context.WithTimeout(context.Background(), waitTimeout) 150 | defer cancel() 151 | 152 | if !container.cmu.Lock(timeout) { 153 | return errors.New("lock timeout") 154 | } 155 | defer container.cmu.Unlock() 156 | 157 | for idx := 0; idx < len(container.slice); idx++ { 158 | if reflect.DeepEqual(container.slice[idx], value) { 159 | container.slice = append(container.slice[:idx], container.slice[idx+1:]...) 160 | break 161 | } 162 | } 163 | return 164 | } 165 | 166 | func (container *PollContainer[T]) Add(value T) { 167 | container.slice = append(container.slice, value) 168 | } 169 | 170 | // 标记: 0 就绪状态,1 使用状态,2 异常状态 171 | func (container *PollContainer[T]) MarkTo(key interface{}, value byte) error { 172 | if s, ok := key.(string); ok { 173 | key = s 174 | } else { 175 | data, _ := json.Marshal(key) 176 | key = string(data) 177 | } 178 | 179 | timeout, cancel := context.WithTimeout(context.Background(), 10*time.Second) 180 | defer cancel() 181 | 182 | if container.mu.Lock(timeout) { 183 | defer container.mu.Unlock() 184 | container.markers[key] = &state{ 185 | t: time.Now(), 186 | s: value, 187 | } 188 | if value == 1 { 189 | logger.Infof("[%s] 索引 [%d] 设置状态值:%d", container.name, container.pos, value) 190 | } else { 191 | logger.Infof("[%s] 设置状态值:%d", container.name, value) 192 | } 193 | } else { 194 | return context.DeadlineExceeded 195 | } 196 | return nil 197 | } 198 | 199 | func (container *PollContainer[T]) Marked(key interface{}) (byte, error) { 200 | if s, ok := key.(string); ok { 201 | key = s 202 | } else { 203 | data, _ := json.Marshal(key) 204 | key = string(data) 205 | } 206 | 207 | timeout, cancel := context.WithTimeout(context.Background(), 10*time.Second) 208 | defer cancel() 209 | 210 | if container.mu.Lock(timeout) { 211 | defer container.mu.Unlock() 212 | marker, ok := container.markers[key] 213 | if !ok { 214 | return 0, nil 215 | } 216 | return marker.s, nil 217 | } else { 218 | return 0, context.DeadlineExceeded 219 | } 220 | } 221 | 222 | func (container *PollContainer[T]) Len() int { 223 | return len(container.slice) 224 | } 225 | -------------------------------------------------------------------------------- /core/common/toolcall/message.go: -------------------------------------------------------------------------------- 1 | package toolcall 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/model" 5 | ) 6 | 7 | func ExtractToolMessages(completion *model.Completion) (toolMessages []model.Keyv[interface{}]) { 8 | for i := len(completion.Messages) - 1; i >= 0; i-- { 9 | message := completion.Messages[i] 10 | if message.Is("role", "tool") || (message.Is("role", "assistant") && message.Has("tool_calls")) { 11 | toolMessages = append(toolMessages, message) 12 | continue 13 | } 14 | 15 | completion.Messages = completion.Messages[:i+1] 16 | break 17 | } 18 | return 19 | } 20 | -------------------------------------------------------------------------------- /core/common/toolcall/tpl.go: -------------------------------------------------------------------------------- 1 | package toolcall 2 | 3 | import ( 4 | "bytes" 5 | "text/template" 6 | ) 7 | 8 | type Builder struct { 9 | instance *template.Template 10 | 11 | ctx map[string]interface{} 12 | funcM template.FuncMap 13 | } 14 | 15 | func newBuilder(name string) *Builder { 16 | instance := template.New(name) 17 | context := make(map[string]interface{}) 18 | funcMap := template.FuncMap{} 19 | return &Builder{ 20 | instance, 21 | context, 22 | funcMap, 23 | } 24 | } 25 | 26 | func (bdr *Builder) Vars(key string, value interface{}) *Builder { bdr.ctx[key] = value; return bdr } 27 | func (bdr *Builder) Func(key string, fun interface{}) *Builder { bdr.funcM[key] = fun; return bdr } 28 | func (bdr *Builder) String(template string) (result string, err error) { 29 | bdr.instance.Funcs(bdr.funcM) 30 | t, err := bdr.instance.Parse(template) 31 | if err != nil { 32 | return 33 | } 34 | 35 | var buffer bytes.Buffer 36 | if err = t.Execute(&buffer, bdr.ctx); err != nil { 37 | return 38 | } 39 | 40 | result = buffer.String() 41 | return 42 | } 43 | -------------------------------------------------------------------------------- /core/common/vars/const.go: -------------------------------------------------------------------------------- 1 | package vars 2 | 3 | var ( 4 | GinCompletion = "__completion__" 5 | GinGeneration = "__generation__" 6 | GinEmbedding = "__embedding__" 7 | GinMatchers = "__matchers__" 8 | GinCompletionUsage = "__completion-usage__" 9 | GinDebugger = "__debug__" 10 | GinEcho = "__echo__" 11 | GinTool = "__tool__" 12 | GinClose = "__close__" 13 | GinCharSequences = "__char_sequences__" 14 | GinCozeWebsdk = "__coze_websdk__" 15 | GinCancelFunc = "__cancelFunc__" 16 | GinClaudeMessages = "__claude_messages__" 17 | GinThinkReason = "__think_reason__" 18 | ) 19 | -------------------------------------------------------------------------------- /core/common/wasm/load.go: -------------------------------------------------------------------------------- 1 | package wasm 2 | 3 | import ( 4 | "github.com/wasmerio/wasmer-go/wasmer" 5 | "os" 6 | ) 7 | 8 | type Instance *wasmer.Instance 9 | type NativeFunction wasmer.NativeFunction 10 | 11 | func New(path string) (instance Instance, err error) { 12 | wasmBytes, err := os.ReadFile(path) 13 | if err != nil { 14 | return 15 | } 16 | engine := wasmer.NewEngine() 17 | store := wasmer.NewStore(engine) 18 | 19 | module, err := wasmer.NewModule(store, wasmBytes) 20 | if err != nil { 21 | return 22 | } 23 | 24 | instance, err = wasmer.NewInstance(module, wasmer.NewImportObject()) 25 | return 26 | } 27 | -------------------------------------------------------------------------------- /core/gin/initializer.go: -------------------------------------------------------------------------------- 1 | package gin 2 | 3 | import ( 4 | "chatgpt-adapter/core/logger" 5 | "github.com/gin-gonic/gin" 6 | "github.com/google/uuid" 7 | "github.com/iocgo/sdk" 8 | "github.com/iocgo/sdk/env" 9 | "github.com/iocgo/sdk/router" 10 | "net/http" 11 | "net/http/httputil" 12 | "strings" 13 | ) 14 | 15 | var ( 16 | debug bool 17 | ) 18 | 19 | // @Inject(lazy="false", name="ginInitializer") 20 | func Initialized(env *env.Environment) sdk.Initializer { 21 | debug = env.GetBool("server.debug") 22 | return sdk.InitializedWrapper(0, func(container *sdk.Container) (err error) { 23 | sdk.ProvideTransient(container, sdk.NameOf[*gin.Engine](), func() (engine *gin.Engine, err error) { 24 | if !debug { 25 | gin.SetMode(gin.ReleaseMode) 26 | } 27 | 28 | engine = gin.Default() 29 | { 30 | engine.Use(gin.Recovery()) 31 | engine.Use(cros) 32 | engine.Use(token) 33 | } 34 | engine.Static("/file/", "tmp") 35 | beans := sdk.ListInvokeAs[router.Router](container) 36 | for _, route := range beans { 37 | route.Routers(engine) 38 | } 39 | 40 | return 41 | }) 42 | return 43 | }) 44 | } 45 | 46 | func token(gtx *gin.Context) { 47 | str := gtx.Request.Header.Get("X-Api-Key") 48 | if str == "" { 49 | str = strings.TrimPrefix(gtx.Request.Header.Get("Authorization"), "Bearer ") 50 | } 51 | gtx.Set("token", str) 52 | } 53 | 54 | func cros(gtx *gin.Context) { 55 | method := gtx.Request.Method 56 | gtx.Writer.Header().Set("Access-Control-Allow-Origin", "*") 57 | gtx.Header("Access-Control-Allow-Origin", "*") // 设置允许访问所有域 58 | gtx.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE,UPDATE") 59 | gtx.Header("Access-Control-Allow-Headers", "*") 60 | gtx.Header("Access-Control-Expose-Headers", "*") 61 | gtx.Header("Access-Control-Max-Age", "172800") 62 | gtx.Header("Access-Control-Allow-Credentials", "false") 63 | //gtx.Set("content-type", "application/json") 64 | 65 | if method == "OPTIONS" { 66 | gtx.Status(http.StatusOK) 67 | return 68 | } 69 | 70 | if gtx.Request.RequestURI == "/" || 71 | gtx.Request.RequestURI == "/favicon.ico" || 72 | strings.Contains(gtx.Request.URL.Path, "/v1/models") || 73 | strings.HasPrefix(gtx.Request.URL.Path, "/file/") { 74 | // 处理请求 75 | gtx.Next() 76 | return 77 | } 78 | 79 | uid := uuid.NewString() 80 | // 请求打印 81 | data, _ := httputil.DumpRequest(gtx.Request, debug) 82 | logger.Infof("------ START REQUEST %s ---------", uid) 83 | println(string(data)) 84 | 85 | // 处理请求 86 | gtx.Next() 87 | 88 | // 结束处理 89 | logger.Infof("------ END REQUEST %s ---------", uid) 90 | } 91 | -------------------------------------------------------------------------------- /core/gin/inter/adapter.go: -------------------------------------------------------------------------------- 1 | package inter 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/model" 5 | "github.com/gin-gonic/gin" 6 | ) 7 | 8 | type Adapter interface { 9 | Match(ctx *gin.Context, model string) (bool, error) 10 | Models() []model.Model 11 | Completion(ctx *gin.Context) error 12 | Generation(ctx *gin.Context) error 13 | Embedding(ctx *gin.Context) error 14 | ToolChoice(ctx *gin.Context) (bool, error) 15 | HandleMessages(ctx *gin.Context, completion model.Completion) (messages []model.Keyv[interface{}], err error) 16 | } 17 | 18 | type BaseAdapter struct{} 19 | 20 | func (BaseAdapter) Models() (slice []model.Model) { return } 21 | func (BaseAdapter) Completion(*gin.Context) (err error) { return } 22 | func (BaseAdapter) Generation(*gin.Context) (err error) { return } 23 | func (BaseAdapter) Embedding(*gin.Context) (err error) { return } 24 | func (BaseAdapter) ToolChoice(*gin.Context) (ok bool, err error) { return } 25 | func (BaseAdapter) HandleMessages(ctx *gin.Context, completion model.Completion) (messages []model.Keyv[interface{}], err error) { 26 | messages = completion.Messages 27 | return 28 | } 29 | -------------------------------------------------------------------------------- /core/gin/inter/matcher.go: -------------------------------------------------------------------------------- 1 | package inter 2 | 3 | // 匹配器接口 4 | type Matcher interface { 5 | Match(content string, over bool) (state int, result string) 6 | } 7 | -------------------------------------------------------------------------------- /core/gin/model/keyv.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | import ( 4 | "encoding/json" 5 | "maps" 6 | "reflect" 7 | "strings" 8 | ) 9 | 10 | type Keyv[V any] map[string]V 11 | 12 | func (kv Keyv[V]) Set(key string, value V) { kv[key] = value } 13 | func (kv Keyv[V]) Get(key string) (V, bool) { value, ok := kv[key]; return value, ok } 14 | func (kv Keyv[V]) Has(key string) bool { _, ok := kv.Get(key); return ok } 15 | func (kv Keyv[V]) String() string { bytes, _ := json.Marshal(kv); return string(bytes) } 16 | func (kv Keyv[V]) Clone() Keyv[V] { return maps.Clone(kv) } 17 | 18 | func (kv Keyv[V]) GetKeyv(key string) (value Keyv[interface{}]) { 19 | if val, ok := kv[key]; ok { 20 | var v interface{} = val 21 | if n, o := v.(map[string]interface{}); o { 22 | value = n 23 | } 24 | } 25 | return 26 | } 27 | 28 | func (kv Keyv[V]) GetSlice(key string) (values []interface{}) { 29 | if value, ok := kv[key]; ok { 30 | var v interface{} = value 31 | values, ok = v.([]interface{}) 32 | } 33 | return 34 | } 35 | 36 | func (kv Keyv[V]) GetString(key string) (value string) { 37 | if val, ok := kv[key]; ok { 38 | var v interface{} = val 39 | value, ok = v.(string) 40 | } 41 | return 42 | } 43 | 44 | func (kv Keyv[V]) GetInt(key string) (value int) { 45 | if val, ok := kv[key]; ok { 46 | var v interface{} = val 47 | value, ok = v.(int) 48 | } 49 | return 50 | } 51 | 52 | func (kv Keyv[V]) Is(key string, value V) (out bool) { 53 | if !kv.Has(key) { 54 | return 55 | } 56 | 57 | v, _ := kv.Get(key) 58 | return reflect.DeepEqual(v, value) 59 | } 60 | 61 | func (kv Keyv[V]) In(key string, values ...V) (out bool) { 62 | if !kv.Has(key) { 63 | return 64 | } 65 | 66 | v, _ := kv.Get(key) 67 | for _, value := range values { 68 | if reflect.DeepEqual(v, value) { 69 | return true 70 | } 71 | } 72 | return 73 | } 74 | 75 | func (kv Keyv[V]) IsString(key string) bool { 76 | if value, ok := kv[key]; ok { 77 | var v interface{} = value 78 | if _, ok = v.(string); ok { 79 | return true 80 | } 81 | } 82 | return false 83 | } 84 | 85 | func (kv Keyv[V]) IsSlice(key string) bool { 86 | if value, ok := kv[key]; ok { 87 | var v interface{} = value 88 | if _, ok = v.([]interface{}); ok { 89 | return true 90 | } 91 | } 92 | return false 93 | } 94 | 95 | func (kv Keyv[V]) IsE(key string) bool { 96 | value, ok := kv.Get(key) 97 | if ok { 98 | var v interface{} = value 99 | if str, o := v.(string); o { 100 | return strings.TrimSpace(str) == "" 101 | } 102 | } 103 | return true 104 | } 105 | -------------------------------------------------------------------------------- /core/gin/model/v1.go: -------------------------------------------------------------------------------- 1 | package model 2 | 3 | type Model struct { 4 | Id string `json:"id"` 5 | Object string `json:"object"` 6 | Created int `json:"created"` 7 | By string `json:"owned_by"` 8 | } 9 | 10 | type Completion struct { 11 | System string `json:"system,omitempty"` 12 | Messages []Keyv[interface{}] `json:"messages"` 13 | Tools []Keyv[interface{}] `json:"tools,omitempty"` 14 | Model string `json:"model,omitempty"` 15 | MaxTokens int `json:"max_tokens"` 16 | StopSequences []string `json:"stop,omitempty"` 17 | Temperature float32 `json:"temperature"` 18 | TopK int `json:"top_k,omitempty"` 19 | TopP float32 `json:"top_p,omitempty"` 20 | Stream bool `json:"stream,omitempty"` 21 | ToolChoice interface{} `json:"tool_choice,omitempty"` 22 | } 23 | 24 | type Generation struct { 25 | Model string `json:"model"` 26 | Message string `json:"prompt"` 27 | N int `json:"n"` 28 | Size string `json:"size"` 29 | Style string `json:"style"` 30 | Quality string `json:"quality"` 31 | } 32 | 33 | type Embed struct { 34 | Input interface{} `json:"input"` 35 | Model string `json:"model"` 36 | EncodingFormat string `json:"encoding_format,omitempty"` 37 | Dimensions int `json:"dimensions,omitempty"` 38 | User string `json:"user,omitempty"` 39 | } 40 | 41 | type Response struct { 42 | Id string `json:"id"` 43 | Object string `json:"object"` 44 | Created int64 `json:"created"` 45 | Model string `json:"model"` 46 | Choices []Choice `json:"choices"` 47 | Error *struct { 48 | Message string `json:"message"` 49 | Type string `json:"type"` 50 | } `json:"error,omitempty"` 51 | Usage map[string]interface{} `json:"usage,omitempty"` 52 | } 53 | 54 | type Choice struct { 55 | Index int `json:"index"` 56 | Message *struct { 57 | Role string `json:"role,omitempty"` 58 | Content string `json:"content,omitempty"` 59 | ReasoningContent string `json:"reasoning_content,omitempty"` 60 | 61 | ToolCalls []Keyv[interface{}] `json:"tool_calls,omitempty"` 62 | } `json:"message,omitempty"` 63 | Delta *struct { 64 | Type string `json:"type,omitempty"` 65 | Role string `json:"role,omitempty"` 66 | Content string `json:"content,omitempty"` 67 | ReasoningContent string `json:"reasoning_content,omitempty"` 68 | 69 | ToolCalls []Keyv[interface{}] `json:"tool_calls,omitempty"` 70 | } `json:"delta,omitempty"` 71 | FinishReason *string `json:"finish_reason"` 72 | } 73 | -------------------------------------------------------------------------------- /core/gin/response/message.go: -------------------------------------------------------------------------------- 1 | package response 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/model" 5 | "fmt" 6 | "strings" 7 | 8 | "chatgpt-adapter/core/common" 9 | "github.com/bincooo/coze-api" 10 | "github.com/gin-gonic/gin" 11 | _ "github.com/iocgo/sdk" 12 | "github.com/iocgo/sdk/env" 13 | ) 14 | 15 | const ( 16 | END = "<|end|>\n\n" 17 | ) 18 | 19 | func defaultRole(role string) string { return fmt.Sprintf("<|%s|>\n", role) } 20 | func gptRole(role string) string { return fmt.Sprintf("<|start|>%s\n", role) } 21 | func deepseekRole(role string) string { return fmt.Sprintf("<%s>\n", role) } 22 | func deepseekEnd(role string) string { return fmt.Sprintf("\n\n\n", role) } 23 | 24 | func claudeRole(role string) string { 25 | sep := env.Env.GetString("separator.claude") 26 | if sep == "" { 27 | sep = "\n" 28 | } 29 | return fmt.Sprintf("\n%s\n%s: ", sep, role) 30 | } 31 | 32 | func bingRole(role string) string { 33 | switch role { 34 | case "user": 35 | return "Q: " 36 | case "assistant": 37 | return "A: " 38 | default: 39 | return "Ins: \n" 40 | } 41 | } 42 | 43 | func ConvertRole(ctx *gin.Context, role string) (newRole, end string) { 44 | completion := common.GetGinCompletion(ctx) 45 | if IsClaude(ctx, completion.Model) { 46 | switch role { 47 | case "user": 48 | newRole = claudeRole("Human") 49 | case "assistant": 50 | newRole = claudeRole("Assistant") 51 | default: 52 | newRole = claudeRole("SYSTEM") 53 | } 54 | return 55 | } 56 | 57 | if IsBing(completion.Model) { 58 | newRole = bingRole(role) 59 | return 60 | } 61 | 62 | end = END 63 | if IsGPT(completion.Model) { 64 | switch role { 65 | case "user", "assistant": 66 | newRole = gptRole(role) 67 | default: 68 | newRole = gptRole("system") 69 | } 70 | return 71 | } 72 | 73 | if IsDeepseek(completion.Model) { 74 | newRole = deepseekRole(role) 75 | end = deepseekEnd(role) 76 | return 77 | } 78 | 79 | newRole = defaultRole(role) 80 | return 81 | } 82 | 83 | func IsBing(mod string) bool { 84 | return mod == "bing" 85 | } 86 | 87 | func IsGPT(model string) bool { 88 | model = strings.ToLower(model) 89 | return strings.Contains(model, "openai") || strings.Contains(model, "gpt") 90 | } 91 | 92 | func IsDeepseek(model string) bool { 93 | return strings.Contains(model, "deepseek") 94 | } 95 | 96 | func IsClaude(ctx *gin.Context, model string) bool { 97 | key := "__is-claude__" 98 | if ctx.GetBool(key) { 99 | return true 100 | } 101 | 102 | if model == "coze/websdk" || common.IsGinCozeWebsdk(ctx) { 103 | model = env.Env.GetString("coze.websdk.model") 104 | return model == coze.ModelClaude35Sonnet_200k || model == coze.ModelClaude3Haiku_200k 105 | } 106 | 107 | isc := strings.Contains(strings.ToLower(model), "claude") 108 | if isc { 109 | ctx.Set(key, true) 110 | return true 111 | } 112 | 113 | if strings.HasPrefix(model, "coze/") { 114 | values := strings.Split(model[5:], "-") 115 | if len(values) > 3 && "w" == values[3] && 116 | (strings.Contains(ctx.GetString("token"), "[claude=true]") || values[1] == "claude") { 117 | ctx.Set(key, true) 118 | return true 119 | } 120 | return false 121 | } 122 | 123 | return isc 124 | } 125 | 126 | func ConvertToText(keyv interface{}) (s string) { 127 | var kv model.Keyv[interface{}] 128 | kv, ok := keyv.(map[string]interface{}) 129 | if !ok || !kv.Is("type", "text") { 130 | return 131 | } 132 | return kv.GetString("text") 133 | } 134 | -------------------------------------------------------------------------------- /core/gin/response/token.go: -------------------------------------------------------------------------------- 1 | package response 2 | 3 | import ( 4 | "chatgpt-adapter/core/logger" 5 | encoder "github.com/samber/go-gpt-3-encoder" 6 | ) 7 | 8 | func CalcTokens(content string) int { 9 | resolver, err := encoder.NewEncoder() 10 | if err != nil { 11 | logger.Error(err) 12 | return 0 13 | } 14 | result, err := resolver.Encode(content) 15 | if err != nil { 16 | logger.Error(err) 17 | return 0 18 | } 19 | return len(result) 20 | } 21 | 22 | func CalcUsageTokens(content string, previousTokens int) map[string]interface{} { 23 | tokens := CalcTokens(content) 24 | return map[string]interface{}{ 25 | "completion_tokens": tokens, 26 | "prompt_tokens": previousTokens, 27 | "total_tokens": previousTokens + tokens, 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /core/gin/v1.go: -------------------------------------------------------------------------------- 1 | package gin 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/inter" 7 | "chatgpt-adapter/core/gin/model" 8 | "chatgpt-adapter/core/gin/response" 9 | "chatgpt-adapter/core/logger" 10 | "fmt" 11 | "github.com/gin-gonic/gin" 12 | "github.com/iocgo/sdk" 13 | "time" 14 | ) 15 | 16 | const ginTokens = "__tokens__" 17 | 18 | // @Router() 19 | type Handler struct{ extensions []inter.Adapter } 20 | 21 | // @Inject() 22 | func New(container *sdk.Container) *Handler { 23 | extensions := sdk.ListInvokeAs[inter.Adapter](container) 24 | return &Handler{extensions} 25 | } 26 | 27 | // @GET(path = "/") 28 | func (h *Handler) index(gtx *gin.Context) { 29 | gtx.Writer.WriteString("
success ~
") 30 | } 31 | 32 | // @POST(path = " 33 | // 34 | // v1/chat/completions, 35 | // v1/object/completions, 36 | // proxies/v1/chat/completions 37 | // 38 | // ") 39 | func (h *Handler) completions(gtx *gin.Context) { 40 | var completion model.Completion 41 | if err := gtx.BindJSON(&completion); err != nil { 42 | logger.Error(err) 43 | response.Error(gtx, -1, err) 44 | return 45 | } 46 | 47 | gtx.Set(vars.GinCompletion, completion) 48 | logger.Infof("curr model: %s", completion.Model) 49 | if !response.MessageValidator(gtx) { 50 | return 51 | } 52 | 53 | for _, extension := range h.extensions { 54 | ok, err := extension.Match(gtx, completion.Model) 55 | if err != nil { 56 | response.Error(gtx, -1, err) 57 | return 58 | } 59 | if !ok { 60 | continue 61 | } 62 | 63 | gtx.Set(vars.GinMatchers, response.NewMatchers(gtx, func(t byte, str string) { 64 | if completion.Stream && t == 0 { 65 | response.SSEResponse(gtx, "matcher", str, time.Now().Unix()) 66 | } 67 | if completion.Stream && t == 1 { 68 | response.ReasonSSEResponse(gtx, "matcher", "", str, time.Now().Unix()) 69 | } 70 | })) 71 | 72 | messages, err := extension.HandleMessages(gtx, completion) 73 | if err != nil { 74 | logger.Error("Error handling messages: ", err) 75 | response.Error(gtx, 500, err) 76 | return 77 | } 78 | 79 | if gtx.GetInt(ginTokens) == 0 { 80 | calcTokens(gtx, messages) 81 | } 82 | 83 | completion.Messages = messages 84 | gtx.Set(vars.GinCompletion, completion) 85 | 86 | if toolcall.NeedExec(gtx) { 87 | if ok, err = extension.ToolChoice(gtx); err != nil { 88 | response.Error(gtx, -1, err) 89 | return 90 | } 91 | if ok { 92 | return 93 | } 94 | } 95 | 96 | if err = extension.Completion(gtx); err != nil { 97 | response.Error(gtx, -1, err) 98 | } 99 | return 100 | } 101 | response.Error(gtx, -1, fmt.Sprintf("model '%s' is not not yet supported", completion.Model)) 102 | } 103 | 104 | func calcTokens(gtx *gin.Context, messages []model.Keyv[interface{}]) { 105 | tokens := 0 106 | for _, message := range messages { 107 | if !message.IsString("content") { 108 | continue 109 | } 110 | value := message.GetString("content") 111 | tokens += response.CalcTokens(value) 112 | } 113 | gtx.Set(ginTokens, tokens) 114 | } 115 | 116 | // @POST(path = " 117 | // 118 | // /v1/embeddings, 119 | // proxies/v1/embeddings 120 | // 121 | // ") 122 | func (h *Handler) embeddings(gtx *gin.Context) { 123 | var embed model.Embed 124 | if err := gtx.BindJSON(&embed); err != nil { 125 | logger.Error(err) 126 | response.Error(gtx, -1, err) 127 | return 128 | } 129 | 130 | gtx.Set(vars.GinEmbedding, embed) 131 | logger.Infof("curr model: %s", embed.Model) 132 | for _, extension := range h.extensions { 133 | ok, err := extension.Match(gtx, embed.Model) 134 | if err != nil { 135 | response.Error(gtx, -1, err) 136 | return 137 | } 138 | if ok { 139 | if err = extension.Embedding(gtx); err != nil { 140 | response.Error(gtx, -1, err) 141 | } 142 | return 143 | } 144 | } 145 | response.Error(gtx, -1, fmt.Sprintf("model '%s' is not not yet supported", embed.Model)) 146 | } 147 | 148 | // @POST(path = " 149 | // 150 | // v1/images/generations, 151 | // v1/object/generations, 152 | // proxies/v1/images/generations 153 | // 154 | // ") 155 | func (h *Handler) generations(gtx *gin.Context) { 156 | var generation model.Generation 157 | if err := gtx.BindJSON(&generation); err != nil { 158 | response.Error(gtx, 500, err) 159 | return 160 | } 161 | 162 | gtx.Set(vars.GinGeneration, generation) 163 | for _, extension := range h.extensions { 164 | ok, err := extension.Match(gtx, generation.Model) 165 | if err != nil { 166 | response.Error(gtx, 500, err) 167 | return 168 | } 169 | if ok { 170 | if err = extension.Generation(gtx); err != nil { 171 | response.Error(gtx, -1, err) 172 | } 173 | return 174 | } 175 | } 176 | response.Error(gtx, -1, fmt.Sprintf("model '%s' is not not yet supported", generation.Model)) 177 | } 178 | 179 | // @GET(path = " 180 | // 181 | // v1/models, 182 | // proxies/v1/models 183 | // 184 | // ") 185 | func (h *Handler) models(gtx *gin.Context) { 186 | models := make([]model.Model, 0) 187 | for _, extension := range h.extensions { 188 | models = append(models, extension.Models()...) 189 | } 190 | gtx.JSON(200, gin.H{ 191 | "object": "list", 192 | "data": models, 193 | }) 194 | } 195 | -------------------------------------------------------------------------------- /core/logger/log.go: -------------------------------------------------------------------------------- 1 | package logger 2 | 3 | import ( 4 | nested "github.com/antonfisher/nested-logrus-formatter" 5 | rotatelogs "github.com/lestrrat-go/file-rotatelogs" 6 | "github.com/sirupsen/logrus" 7 | 8 | "io" 9 | "os" 10 | "path" 11 | "path/filepath" 12 | "runtime" 13 | "strconv" 14 | "strings" 15 | "time" 16 | ) 17 | 18 | func InitLogger(basePath string, level logrus.Level) { 19 | logrus.SetLevel(level) 20 | if len(basePath) == 0 { 21 | basePath = "log" 22 | } 23 | 24 | writer, err := rotatelogs.New( 25 | filepath.Join(basePath, "background-%Y-%m-%d.log"), 26 | // 日志最大保存时间 27 | rotatelogs.WithMaxAge(7*24*time.Hour), 28 | // //设置日志切割时间间隔(1天)(隔多久分割一次) 29 | rotatelogs.WithRotationTime(24*time.Hour), 30 | ) 31 | if err != nil { 32 | Fatal(err) 33 | } 34 | 35 | writers := []io.Writer{writer, os.Stdout} 36 | logrus.SetOutput(io.MultiWriter(writers...)) 37 | logrus.SetFormatter(&nested.Formatter{ 38 | HideKeys: true, 39 | TimestampFormat: "2006-01-02 15:04:05", 40 | CallerFirst: true, 41 | NoColors: true, 42 | CustomCallerFormatter: CustomCallerFormatter, 43 | }) 44 | logrus.SetReportCaller(true) 45 | } 46 | 47 | func CustomCallerFormatter(frame *runtime.Frame) string { 48 | trimPackage := func(pkg string) string { 49 | if pkg == "" { 50 | return pkg 51 | } 52 | slice := strings.Split(pkg, "/") 53 | length := len(slice) 54 | if length <= 2 { 55 | return pkg 56 | } 57 | return slice[length-2] + "/" + slice[length-1] 58 | } 59 | 60 | trimL := func(prefix string) string { 61 | if prefix == "" { 62 | return prefix 63 | } 64 | if strings.HasPrefix(prefix, "/") { 65 | return prefix[1:] 66 | } 67 | return prefix 68 | } 69 | 70 | // 尝试获取上层栈 71 | pcs := make([]uintptr, 10) 72 | depth := runtime.Callers(10, pcs) 73 | frames := runtime.CallersFrames(pcs[:depth]) 74 | for f, next := frames.Next(); next; f, next = frames.Next() { 75 | if f.PC == frame.PC { 76 | if f, next = frames.Next(); next { 77 | frame = &f 78 | break 79 | } 80 | } 81 | } 82 | 83 | main := strings.HasPrefix(frame.Function, "main.") 84 | slice := strings.Split(frame.File, trimPackage(path.Dir(frame.Function))) 85 | if !main && len(slice) > 1 { 86 | return " <" + path.Dir(frame.Function) + "> " + trimL(slice[1]) + ":" + strconv.Itoa(frame.Line) + " |" 87 | } 88 | 89 | root := path.Dir(frame.Function) 90 | if main { 91 | root = "main" 92 | } 93 | 94 | file := frame.File 95 | return " <" + root + "> " + file + ":" + strconv.Itoa(frame.Line) + " |" 96 | } 97 | 98 | func Trace(args ...interface{}) { 99 | logrus.Trace(args...) 100 | } 101 | 102 | func Tracef(format string, args ...interface{}) { 103 | logrus.Tracef(format, args...) 104 | } 105 | 106 | func Debug(args ...interface{}) { 107 | logrus.Debug(args...) 108 | } 109 | 110 | func Debugf(format string, args ...interface{}) { 111 | logrus.Debugf(format, args...) 112 | } 113 | 114 | func Info(args ...interface{}) { 115 | logrus.Info(args...) 116 | } 117 | 118 | func Infof(format string, args ...interface{}) { 119 | logrus.Infof(format, args...) 120 | } 121 | 122 | func Warn(args ...interface{}) { 123 | logrus.Warn(args...) 124 | } 125 | 126 | func Warnf(format string, args ...interface{}) { 127 | logrus.Warnf(format, args...) 128 | } 129 | 130 | func Error(args ...interface{}) { 131 | logrus.Error(args...) 132 | } 133 | 134 | func Errorf(format string, args ...interface{}) { 135 | logrus.Errorf(format, args...) 136 | } 137 | 138 | func Fatal(args ...interface{}) { 139 | logrus.Fatal(args...) 140 | } 141 | 142 | func Fatalf(format string, args ...interface{}) { 143 | logrus.Fatalf(format, args...) 144 | } 145 | -------------------------------------------------------------------------------- /core/scan/export.go: -------------------------------------------------------------------------------- 1 | // 该包下仅提供给iocgo工具使用的,不需要理会 Injects 的错误,在编译过程中生成 2 | 3 | package scan 4 | 5 | import ( 6 | "github.com/iocgo/sdk" 7 | 8 | "chatgpt-adapter/cmd/cobra" 9 | "chatgpt-adapter/core/gin" 10 | ) 11 | 12 | func Injects(container *sdk.Container) (err error) { 13 | err = cobra.Injects(container) 14 | if err != nil { 15 | return 16 | } 17 | 18 | err = gin.Injects(container) 19 | if err != nil { 20 | return 21 | } 22 | 23 | return 24 | } 25 | -------------------------------------------------------------------------------- /core/tokenizer/elem.go: -------------------------------------------------------------------------------- 1 | package tokenizer 2 | 3 | import ( 4 | "fmt" 5 | "strconv" 6 | ) 7 | 8 | const ( 9 | Str Kind = iota 10 | Ident 11 | ) 12 | 13 | type Kind uint8 14 | 15 | type Elem interface { 16 | Kind() Kind 17 | Expr() string 18 | Content() string 19 | 20 | Str(key string) (string, bool) 21 | Int(key string) (int64, bool) 22 | Boolean(key string) (bool, bool) 23 | 24 | String() string 25 | } 26 | 27 | type strElem struct { 28 | kind Kind 29 | content string 30 | } 31 | 32 | type nodeElem struct { 33 | strElem 34 | 35 | count int 36 | 37 | expr string 38 | attributes map[string]string 39 | } 40 | 41 | var ( 42 | _ Elem = (*strElem)(nil) 43 | _ Elem = (*nodeElem)(nil) 44 | ) 45 | 46 | func (s strElem) Kind() Kind { return s.kind } 47 | func (s strElem) Content() string { return s.content } 48 | func (s strElem) String() string { return s.content } 49 | func (s strElem) Expr() string { panic("implement me") } 50 | func (s strElem) Str(string) (string, bool) { panic("implement me") } 51 | func (s strElem) Int(string) (int64, bool) { panic("implement me") } 52 | func (s strElem) Boolean(string) (bool, bool) { panic("implement me") } 53 | 54 | func (s nodeElem) Expr() string { return s.expr } 55 | func (s nodeElem) String() string { 56 | attr := "" 57 | for k, v := range s.attributes { 58 | if attr == "" { 59 | attr += " " 60 | } 61 | attr += k + "=" + v 62 | } 63 | if s.content == "" { 64 | return fmt.Sprintf("<%s%s />", s.expr, attr) 65 | } 66 | return fmt.Sprintf("<%s%s>%s", s.expr, attr, s.content, s.expr) 67 | } 68 | 69 | func (s nodeElem) Str(key string) (string, bool) { 70 | value, ok := s.attributes[key] 71 | if !ok { 72 | return "", false 73 | } 74 | if len(value) < 2 || value[0] != '"' || value[len(value)-1] != '"' { 75 | return value, true 76 | } 77 | return value[1 : len(value)-1], true 78 | } 79 | 80 | func (s nodeElem) Int(key string) (int64, bool) { 81 | value, ok := s.attributes[key] 82 | if !ok { 83 | return 0, false 84 | } 85 | i, err := strconv.ParseInt(value, 10, 32) 86 | if err != nil { 87 | return i, false 88 | } 89 | return i, true 90 | } 91 | 92 | func (s nodeElem) Boolean(key string) (bool, bool) { 93 | value, ok := s.attributes[key] 94 | if !ok { 95 | return false, false 96 | } 97 | if value == "" { 98 | return true, true 99 | } 100 | i, err := strconv.ParseBool(value) 101 | if err != nil { 102 | return i, false 103 | } 104 | return i, true 105 | } 106 | -------------------------------------------------------------------------------- /core/tokenizer/lexer.go: -------------------------------------------------------------------------------- 1 | package tokenizer 2 | 3 | var ( 4 | EOF = "EOF" 5 | STR = "str" 6 | IDENT = "ident" 7 | SLASH = "sL" 8 | LT = "Lt" 9 | RT = "Rt" 10 | ) 11 | 12 | type token struct { 13 | kind string 14 | literal string 15 | pos int 16 | } 17 | 18 | type Lexer struct { 19 | // 20 | input []rune 21 | pos int 22 | ch rune 23 | } 24 | 25 | func (lex *Lexer) nextToken() *token { 26 | lex.readChar() 27 | switch lex.ch { 28 | case 0: 29 | return lex.newToken(EOF, "") 30 | case '\\': 31 | // 32 | case '<': 33 | return lex.newToken(LT, "<") 34 | case '>': 35 | return lex.newToken(RT, ">") 36 | case '/': 37 | return lex.newToken(SLASH, "/") 38 | } 39 | 40 | lit := lex.readIdentifier() 41 | if lit != "" { 42 | return lex.newToken(IDENT, lit) 43 | } 44 | 45 | lit = lex.readString() 46 | return lex.newToken(STR, lit) 47 | } 48 | 49 | func (lex *Lexer) position(pos int) { 50 | if pos < 0 { 51 | pos = 0 52 | } 53 | if i := len(lex.input); pos >= i { 54 | pos = i - 1 55 | } 56 | lex.pos = pos 57 | lex.ch = lex.input[pos] 58 | } 59 | 60 | func (lex *Lexer) newToken(kind string, literal string) *token { 61 | return &token{kind, literal, lex.pos} 62 | } 63 | 64 | func (lex *Lexer) readChar() { 65 | lex.pos++ 66 | if lex.pos >= len(lex.input) { 67 | lex.ch = 0 68 | return 69 | } 70 | 71 | lex.ch = lex.input[lex.pos] 72 | } 73 | 74 | func (lex *Lexer) peekChar() rune { 75 | if lex.pos+1 >= len(lex.input) { 76 | return 0 77 | } 78 | return lex.input[lex.pos+1] 79 | } 80 | 81 | func (lex *Lexer) readIdentifier() string { 82 | pos := lex.pos 83 | if pos == 0 { 84 | return "" 85 | } 86 | 87 | if pos > 0 { 88 | // 89 | if lex.input[pos-1] == '<' { 90 | goto label 91 | } 92 | } 93 | if pos > 1 { 94 | // 95 | if lex.input[pos-1] == '/' && lex.input[pos-2] == '<' { 96 | goto label 97 | } 98 | } 99 | return "" 100 | label: 101 | 102 | for { 103 | char := lex.ch 104 | if (char >= 'a' && char <= 'z') || (char >= 'A' && char <= 'Z') || (char >= '0' && char <= '9') || char == '@' || char == '_' { 105 | lex.readChar() 106 | continue 107 | } 108 | // , 109 | if lex.ch != ' ' && lex.ch != '/' && lex.ch != '>' { 110 | lex.pos = pos 111 | lex.ch = lex.input[pos] 112 | return "" 113 | } 114 | 115 | lex.position(lex.pos - 1) 116 | return string(lex.input[pos : lex.pos+1]) 117 | } 118 | } 119 | 120 | func (lex *Lexer) readString() string { 121 | pos := lex.pos 122 | for { 123 | switch lex.peekChar() { 124 | case '\\': 125 | lex.readChar() 126 | if ch := lex.peekChar(); ch == '\\' || ch == '>' { 127 | lex.readChar() // ignore next char 128 | } 129 | case 0, '<', '>', '/': 130 | return string(lex.input[pos : lex.pos+1]) 131 | default: 132 | lex.readChar() 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /deploy/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.23-alpine AS builder 2 | 3 | WORKDIR /app 4 | RUN apk add git make && git clone https://github.com/bincooo/chatgpt-adapter.git . 5 | RUN make install 6 | RUN make build-linux 7 | 8 | FROM alpine:3.19.0 9 | WORKDIR /app 10 | COPY --from=builder /app/bin/linux/server ./server 11 | RUN echo -e 'server:\n port: 8080' > ./config.yaml 12 | RUN chmod +x server 13 | 14 | ENV ARG "--port 8080" 15 | CMD ["./server ${ARG}"] 16 | ENTRYPOINT ["sh", "-c"] -------------------------------------------------------------------------------- /deploy/Dockerfile-BL: -------------------------------------------------------------------------------- 1 | FROM golang:1.23-alpine AS builder 2 | 3 | WORKDIR /app 4 | RUN apk add git make && git clone https://github.com/bincooo/chatgpt-adapter.git . 5 | RUN make install 6 | RUN make build-linux 7 | 8 | FROM ubuntu:latest 9 | 10 | WORKDIR /app 11 | COPY --from=builder /app/bin/linux/server ./server 12 | 13 | RUN apt update \ 14 | && apt-get install -y curl unzip wget gnupg2 15 | 16 | # 下载过盾文件 17 | RUN curl -JLO https://raw.githubusercontent.com/bincooo/chatgpt-adapter/refs/heads/hel/bin.zip 18 | RUN echo -e 'server:\n port: 8080' > ./config.yaml 19 | 20 | # Install google 21 | RUN wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \ 22 | && echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ 23 | && apt-get update \ 24 | && apt-get install -y google-chrome-stable 25 | 26 | # Install Edge 27 | #RUN wget -q -O - https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/microsoft.gpg >/dev/null \ 28 | # && echo "deb https://packages.microsoft.com/repos/edge stable main" >> /etc/apt/sources.list.d/microsoft-edge.list \ 29 | # && apt-get update -qqy \ 30 | # && apt-get -qqy --no-install-recommends install microsoft-edge-stable 31 | 32 | RUN unzip bin.zip \ 33 | && chmod +x server \ 34 | && chmod +x bin/linux/helper 35 | 36 | ENV ARG "--port 8080" 37 | CMD ["./server ${ARG}"] 38 | ENTRYPOINT ["sh", "-c"] 39 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module chatgpt-adapter 2 | 3 | go 1.23.3 4 | 5 | require ( 6 | github.com/antonfisher/nested-logrus-formatter v1.3.1 7 | github.com/bincooo/coze-api v1.0.2-0.20250118010946-7c4f3c5e25ea 8 | github.com/bincooo/edge-api v1.0.4-0.20250211074233-37fe84649a9b 9 | github.com/bincooo/emit.io v1.0.1-0.20250327152715-789fc5920a10 10 | github.com/bincooo/you.com v0.0.0-20250205070606-666b6847729b 11 | github.com/bogdanfinn/tls-client v1.8.0 12 | github.com/dlclark/regexp2 v1.11.4 13 | github.com/eko/gocache/lib/v4 v4.1.6 14 | github.com/eko/gocache/store/go_cache/v4 v4.2.2 15 | github.com/gabriel-vasile/mimetype v1.4.3 16 | github.com/gin-gonic/gin v1.10.0 17 | github.com/golang-jwt/jwt/v5 v5.2.2 18 | github.com/golang/protobuf v1.5.4 19 | github.com/google/uuid v1.6.0 20 | github.com/iocgo/sdk v0.0.0-20241203133330-43dcedf3291e 21 | github.com/lestrrat-go/file-rotatelogs v2.4.0+incompatible 22 | github.com/patrickmn/go-cache v2.1.0+incompatible 23 | github.com/samber/go-gpt-3-encoder v0.3.1 24 | github.com/sirupsen/logrus v1.9.3 25 | github.com/wasmerio/wasmer-go v1.0.5-0.20250109124841-f09913d8a0be 26 | google.golang.org/protobuf v1.36.0 27 | ) 28 | 29 | //github.com/iocgo/sdk v0.0.0-20241129021727-ca323c08f298 => ../sdk 30 | //github.com/bincooo/edge-api v1.0.4-0.20250107025218-74fbeaa104b8 => ../edge-api 31 | replace github.com/samber/do/v2 v2.0.0-beta.7 => github.com/iocgo/do/v2 v2.0.0-patch.0.20241204032939-7bbcadbc5f38 32 | 33 | require ( 34 | github.com/RomiChan/websocket v1.4.3-0.20220227141055-9b2c6168c9c5 // indirect 35 | github.com/andybalholm/brotli v1.1.1 // indirect 36 | github.com/beorn7/perks v1.0.1 // indirect 37 | github.com/bincooo/go-annotation v0.0.0-20241210101123-2fc3053d2f16 // indirect 38 | github.com/bogdanfinn/fhttp v0.5.36 // indirect 39 | github.com/bogdanfinn/utls v1.6.5 // indirect 40 | github.com/bytedance/sonic v1.11.6 // indirect 41 | github.com/bytedance/sonic/loader v0.1.1 // indirect 42 | github.com/cespare/xxhash/v2 v2.2.0 // indirect 43 | github.com/cloudflare/circl v1.5.0 // indirect 44 | github.com/cloudwego/base64x v0.1.4 // indirect 45 | github.com/cloudwego/iasm v0.2.0 // indirect 46 | github.com/fsnotify/fsnotify v1.7.0 // indirect 47 | github.com/gin-contrib/sse v0.1.0 // indirect 48 | github.com/gingfrederik/docx v0.0.1 // indirect 49 | github.com/go-playground/locales v0.14.1 // indirect 50 | github.com/go-playground/universal-translator v0.18.1 // indirect 51 | github.com/go-playground/validator/v10 v10.20.0 // indirect 52 | github.com/goccy/go-json v0.10.2 // indirect 53 | github.com/golang/mock v1.6.0 // indirect 54 | github.com/hashicorp/errwrap v1.1.0 // indirect 55 | github.com/hashicorp/go-multierror v1.1.1 // indirect 56 | github.com/hashicorp/hcl v1.0.0 // indirect 57 | github.com/inconshreveable/mousetrap v1.1.0 // indirect 58 | github.com/jonboulle/clockwork v0.4.0 // indirect 59 | github.com/json-iterator/go v1.1.12 // indirect 60 | github.com/klauspost/compress v1.17.11 // indirect 61 | github.com/klauspost/cpuid/v2 v2.2.7 // indirect 62 | github.com/leodido/go-urn v1.4.0 // indirect 63 | github.com/lestrrat-go/strftime v1.1.0 // indirect 64 | github.com/magiconair/properties v1.8.7 // indirect 65 | github.com/mattn/go-isatty v0.0.20 // indirect 66 | github.com/mitchellh/mapstructure v1.5.0 // indirect 67 | github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect 68 | github.com/modern-go/reflect2 v1.0.2 // indirect 69 | github.com/pelletier/go-toml/v2 v2.2.2 // indirect 70 | github.com/pkg/errors v0.9.1 // indirect 71 | github.com/prometheus/client_golang v1.19.1 // indirect 72 | github.com/prometheus/client_model v0.5.0 // indirect 73 | github.com/prometheus/common v0.48.0 // indirect 74 | github.com/prometheus/procfs v0.12.0 // indirect 75 | github.com/quic-go/quic-go v0.48.1 // indirect 76 | github.com/sagikazarmark/locafero v0.4.0 // indirect 77 | github.com/sagikazarmark/slog-shim v0.1.0 // indirect 78 | github.com/samber/do/v2 v2.0.0-beta.7 // indirect 79 | github.com/samber/go-type-to-string v1.6.1 // indirect 80 | github.com/samber/lo v1.37.0 // indirect 81 | github.com/sourcegraph/conc v0.3.0 // indirect 82 | github.com/spf13/afero v1.11.0 // indirect 83 | github.com/spf13/cast v1.6.0 // indirect 84 | github.com/spf13/cobra v1.8.1 // indirect 85 | github.com/spf13/pflag v1.0.5 // indirect 86 | github.com/spf13/viper v1.19.0 // indirect 87 | github.com/subosito/gotenv v1.6.0 // indirect 88 | github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect 89 | github.com/tidwall/gjson v1.18.0 // indirect 90 | github.com/tidwall/match v1.1.1 // indirect 91 | github.com/tidwall/pretty v1.2.0 // indirect 92 | github.com/twitchyliquid64/golang-asm v0.15.1 // indirect 93 | github.com/ugorji/go/codec v1.2.12 // indirect 94 | go.uber.org/atomic v1.9.0 // indirect 95 | go.uber.org/multierr v1.9.0 // indirect 96 | golang.org/x/arch v0.8.0 // indirect 97 | golang.org/x/crypto v0.36.0 // indirect 98 | golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect 99 | golang.org/x/mod v0.21.0 // indirect 100 | golang.org/x/net v0.37.0 // indirect 101 | golang.org/x/sync v0.12.0 // indirect 102 | golang.org/x/sys v0.31.0 // indirect 103 | golang.org/x/text v0.23.0 // indirect 104 | golang.org/x/tools v0.25.0 // indirect 105 | gopkg.in/ini.v1 v1.67.0 // indirect 106 | gopkg.in/yaml.v3 v3.0.1 // indirect 107 | ) 108 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "chatgpt-adapter/wire" 5 | "github.com/iocgo/sdk" 6 | "github.com/iocgo/sdk/errors" 7 | ) 8 | 9 | func main() { 10 | ctx := errors.New(nil) 11 | { 12 | if err := errors.Try1(ctx, func() (c *sdk.Container, err error) { 13 | c = sdk.NewContainer() 14 | err = wire.Injects(c) 15 | return 16 | }).Run(); err != nil { 17 | panic(err) 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /relay/3rd: -------------------------------------------------------------------------------- 1 | /Users/bincooo/code-workspace/golang/20241203/3rd -------------------------------------------------------------------------------- /relay/alloc/bing/bing.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "time" 5 | 6 | "chatgpt-adapter/core/common" 7 | "chatgpt-adapter/core/common/inited" 8 | "chatgpt-adapter/core/common/vars" 9 | "chatgpt-adapter/core/gin/response" 10 | "chatgpt-adapter/core/logger" 11 | "github.com/gin-gonic/gin" 12 | "github.com/iocgo/sdk/env" 13 | "github.com/iocgo/sdk/proxy" 14 | "github.com/iocgo/sdk/stream" 15 | ) 16 | 17 | var ( 18 | cookiesContainer *common.PollContainer[map[string]string] 19 | ) 20 | 21 | func init() { 22 | inited.AddInitialized(func(env *env.Environment) { 23 | cookies, ok := env.Get("bing.cookies").([]interface{}) 24 | if !ok { 25 | return 26 | } 27 | slice := stream.Map(stream.OfSlice(cookies), func(t interface{}) (obj map[string]string) { 28 | m, o := t.(map[string]interface{}) 29 | if !o { 30 | return 31 | } 32 | return map[string]string{ 33 | "scopeId": m["scopeid"].(string), 34 | "idToken": m["idtoken"].(string), 35 | "cookie": m["cookie"].(string), 36 | } 37 | }).ToSlice() 38 | 39 | cookiesContainer = common.NewPollContainer[map[string]string]("bing", slice, 6*time.Hour) 40 | cookiesContainer.Condition = condition 41 | }) 42 | } 43 | 44 | func InvocationHandler(ctx *proxy.Context) { 45 | var ( 46 | gtx = ctx.In[0].(*gin.Context) 47 | echo = gtx.GetBool(vars.GinEcho) 48 | ) 49 | 50 | if echo || ctx.Method != "Completion" && ctx.Method != "ToolChoice" { 51 | ctx.Do() 52 | return 53 | } 54 | 55 | logger.Infof("execute static proxy [relay/llm/bing.api]: func %s(...)", ctx.Method) 56 | 57 | if cookiesContainer.Len() == 0 { 58 | response.Error(gtx, -1, "empty cookies") 59 | return 60 | } 61 | 62 | cookie, err := cookiesContainer.Poll() 63 | if err != nil { 64 | logger.Error(err) 65 | response.Error(gtx, -1, err) 66 | return 67 | } 68 | defer resetMarked(cookie) 69 | gtx.Set("token", cookie) 70 | 71 | // 72 | ctx.Do() 73 | 74 | // 75 | if ctx.Method == "Completion" { 76 | err = elseOf[error](ctx.Out[0]) 77 | } 78 | if ctx.Method == "ToolChoice" { 79 | err = elseOf[error](ctx.Out[1]) 80 | } 81 | 82 | if err != nil { 83 | logger.Error(err) 84 | return 85 | } 86 | } 87 | 88 | func condition(cookie map[string]string, argv ...interface{}) bool { 89 | marker, err := cookiesContainer.Marked(cookie) 90 | if err != nil { 91 | logger.Error(err) 92 | return false 93 | } 94 | return marker == 0 95 | } 96 | 97 | func resetMarked(cookie map[string]string) { 98 | marker, err := cookiesContainer.Marked(cookie) 99 | if err != nil { 100 | logger.Error(err) 101 | return 102 | } 103 | 104 | if marker != 1 { 105 | return 106 | } 107 | 108 | err = cookiesContainer.MarkTo(cookie, 0) 109 | if err != nil { 110 | logger.Error(err) 111 | } 112 | } 113 | 114 | func elseOf[T any](obj any) (zero T) { 115 | if obj == nil { 116 | return 117 | } 118 | return obj.(T) 119 | } 120 | -------------------------------------------------------------------------------- /relay/alloc/bing/ctor.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "github.com/iocgo/sdk/proxy" 5 | 6 | _ "chatgpt-adapter/core/gin/inter" 7 | _ "chatgpt-adapter/core/gin/model" 8 | _ "github.com/gin-gonic/gin" 9 | _ "reflect" 10 | ) 11 | 12 | // @Proxy( 13 | // 14 | // target = "chatgpt-adapter/core/gin/inter.Adapter", 15 | // scan = "chatgpt-adapter/relay/llm/bing.api", 16 | // igm = "!(Completion|ToolChoice)" 17 | // 18 | // ) 19 | func Proxy(ctx *proxy.Context) { InvocationHandler(ctx) } 20 | -------------------------------------------------------------------------------- /relay/alloc/coze/coze.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/common/inited" 12 | "chatgpt-adapter/core/common/vars" 13 | "chatgpt-adapter/core/gin/model" 14 | "chatgpt-adapter/core/gin/response" 15 | "chatgpt-adapter/core/logger" 16 | "github.com/bincooo/coze-api" 17 | "github.com/bincooo/emit.io" 18 | "github.com/gin-gonic/gin" 19 | "github.com/iocgo/sdk/env" 20 | "github.com/iocgo/sdk/proxy" 21 | ) 22 | 23 | type account struct { 24 | Cookies string `mapstructure:"-" json:"-"` 25 | 26 | E string `mapstructure:"email" json:"email"` 27 | P string `mapstructure:"password" json:"password"` 28 | V string `mapstructure:"validate" json:"validate"` 29 | } 30 | 31 | var ( 32 | cookiesContainer *common.PollContainer[*account] 33 | ) 34 | 35 | func init() { 36 | inited.AddInitialized(func(env *env.Environment) { 37 | var values []*account 38 | err := env.UnmarshalKey("coze.websdk.accounts", &values) 39 | if err != nil { 40 | panic(err) 41 | } 42 | if len(values) == 0 { 43 | return 44 | } 45 | 46 | if !env.GetBool("browser-less.enabled") && env.GetString("browser-less.reversal") == "" { 47 | panic("don't used browser-less, please setting `browser-less.enabled` or `browser-less.reversal`") 48 | } 49 | 50 | cookiesContainer = common.NewPollContainer("coze", make([]*account, 0), 60*time.Second) // 报错进入60秒冷却 51 | cookiesContainer.Condition = condition(env.GetString("server.proxied")) 52 | run(env, values...) 53 | }) 54 | } 55 | 56 | func InvocationHandler(ctx *proxy.Context) { 57 | var ( 58 | context = ctx.In[0].(*gin.Context) 59 | completion = common.GetGinCompletion(context) 60 | proxied = env.Env.GetString("server.proxied") 61 | echo = context.GetBool(vars.GinEcho) 62 | ) 63 | 64 | if echo || ctx.Method != "Completion" && ctx.Method != "ToolChoice" { 65 | ctx.Do() 66 | return 67 | } 68 | 69 | logger.Infof("execute static proxy [relay/llm/coze.api]: func %s(...)", ctx.Method) 70 | 71 | var ( 72 | err error 73 | meta *account 74 | 75 | cookies string 76 | ) 77 | 78 | if isSdk(context, completion.Model) { 79 | meta, err = cookiesContainer.Poll() 80 | if err != nil { 81 | logger.Error(err) 82 | response.Error(context, -1, err) 83 | return 84 | } 85 | 86 | defer resetMarked(meta) 87 | cookies = meta.Cookies 88 | logger.Infof("roll now Cookies: %s", cookies) 89 | 90 | completion.Model, err = sdkModel(context, proxied, cookies) 91 | if err != nil { 92 | logger.Error(err) 93 | response.Error(context, -1, err) 94 | return 95 | } 96 | context.Set(vars.GinCompletion, completion) 97 | } 98 | 99 | values := strings.Split(completion.Model[5:], "-") 100 | if isOwner(completion.Model) && len(values) > 2 { 101 | var scene int 102 | if scene, err = strconv.Atoi(values[2]); err != nil { 103 | logger.Error(err) 104 | response.Error(context, -1, err) 105 | return 106 | } 107 | 108 | co, msToken := extCookie(cookies) 109 | options := coze.NewDefaultOptions(values[0], values[1], scene, true, proxied) 110 | chat := coze.New(co, msToken, options) 111 | chat.Session(common.HTTPClient) 112 | emitErr := draftBot(context, "", chat, completion) 113 | if emitErr != nil { 114 | response.Error(context, emitErr.Code, emitErr.Err) 115 | return 116 | } 117 | } 118 | 119 | context.Set("token", cookies) 120 | 121 | ctx.Do() 122 | 123 | if ctx.Method == "Completion" { 124 | err = elseOf[error](ctx.Out[0]) 125 | } 126 | if ctx.Method == "ToolChoice" { 127 | err = elseOf[error](ctx.Out[1]) 128 | } 129 | 130 | if err != nil { 131 | if meta != nil { 132 | _ = cookiesContainer.MarkTo(meta, 2) 133 | logger.Infof("coze websdk[%s] 进入冷却状态", meta.E) 134 | } 135 | return 136 | } 137 | } 138 | 139 | func isSdk(ctx *gin.Context, model string) bool { 140 | if common.IsGinCozeWebsdk(ctx) { 141 | return true 142 | } 143 | if model == "coze/websdk" { 144 | ctx.Set(vars.GinCozeWebsdk, true) 145 | return true 146 | } 147 | return false 148 | } 149 | 150 | func sdkModel(ctx *gin.Context, proxies string, cookie string) (model string, err error) { 151 | options := coze.NewDefaultOptions("xxx", "xxx", 1000, false, proxies) 152 | co, msToken := extCookie(cookie) 153 | chat := coze.New(co, msToken, options) 154 | chat.Session(common.HTTPClient) 155 | bots, err := chat.QueryBots(ctx) 156 | if err != nil { 157 | return "", err 158 | } 159 | 160 | botId := "" 161 | botn := bot 162 | if botn == "" { 163 | botn = "custom-128k" 164 | } 165 | 166 | for _, value := range bots { 167 | info := value.(map[string]interface{}) 168 | if info["name"] == botn { 169 | botId = info["id"].(string) 170 | break 171 | } 172 | } 173 | 174 | if botId == "" { 175 | return "", errors.New(botn + " bot not found") 176 | } 177 | 178 | space, _ := chat.GetSpace(ctx) 179 | return "coze/" + botId + "-" + space + "-1000-w", nil 180 | } 181 | 182 | // return true 终止 183 | func draftBot(ctx *gin.Context, systemMessage string, chat coze.Chat, completion model.Completion) (emitErr *emit.Error) { 184 | value, err := chat.BotInfo(ctx.Request.Context()) 185 | if err != nil { 186 | logger.Error(err) 187 | return &emit.Error{Code: -1, Err: err} 188 | } 189 | 190 | botId := customBotId(completion.Model) 191 | if err = chat.DraftBot(ctx.Request.Context(), coze.DraftInfo{ 192 | Model: value["model"].(string), 193 | TopP: completion.TopP, 194 | Temperature: completion.Temperature, 195 | MaxTokens: completion.MaxTokens, 196 | FrequencyPenalty: 0, 197 | PresencePenalty: 0, 198 | ResponseFormat: 0, 199 | }, systemMessage); err != nil { 200 | logger.Error(fmt.Errorf("全局配置修改失败[%s]:%v", botId, err)) 201 | return &emit.Error{Code: -1, Err: err} 202 | } 203 | return 204 | } 205 | 206 | func extCookie(co string) (cookie, msToken string) { 207 | cookie = co 208 | index := strings.Index(cookie, "[msToken=") 209 | if index > -1 { 210 | end := strings.Index(cookie[index:], "]") 211 | if end > -1 { 212 | msToken = cookie[index+6 : index+end] 213 | cookie = cookie[:index] + cookie[index+end+1:] 214 | } 215 | } 216 | return 217 | } 218 | 219 | func customBotId(model string) string { 220 | if strings.HasPrefix(model, "coze/") { 221 | values := strings.Split(model[5:], "-") 222 | return values[0] 223 | } 224 | return "" 225 | } 226 | 227 | func isOwner(model string) bool { return strings.HasSuffix(model, "-o") } 228 | func elseOf[T any](obj any) (zero T) { 229 | if obj == nil { 230 | return 231 | } 232 | return obj.(T) 233 | } 234 | -------------------------------------------------------------------------------- /relay/alloc/coze/ctor.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "github.com/iocgo/sdk/proxy" 5 | 6 | _ "chatgpt-adapter/core/gin/inter" 7 | _ "chatgpt-adapter/core/gin/model" 8 | _ "github.com/gin-gonic/gin" 9 | _ "reflect" 10 | ) 11 | 12 | // @Proxy( 13 | // 14 | // target = "chatgpt-adapter/core/gin/inter.Adapter", 15 | // scan = "chatgpt-adapter/relay/llm/coze.api", 16 | // igm = "!(Completion|ToolChoice)" 17 | // 18 | // ) 19 | func Proxy(ctx *proxy.Context) { InvocationHandler(ctx) } 20 | -------------------------------------------------------------------------------- /relay/alloc/grok/ctor.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "github.com/iocgo/sdk/proxy" 5 | 6 | _ "chatgpt-adapter/core/gin/inter" 7 | _ "chatgpt-adapter/core/gin/model" 8 | _ "github.com/gin-gonic/gin" 9 | _ "reflect" 10 | ) 11 | 12 | // @Proxy( 13 | // 14 | // target = "chatgpt-adapter/core/gin/inter.Adapter", 15 | // scan = "chatgpt-adapter/relay/llm/grok.api", 16 | // igm = "!(Completion|ToolChoice)" 17 | // 18 | // ) 19 | func Proxy(ctx *proxy.Context) { InvocationHandler(ctx) } 20 | -------------------------------------------------------------------------------- /relay/alloc/grok/grok.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "errors" 5 | "net/http" 6 | "strings" 7 | "sync" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/common/inited" 12 | "chatgpt-adapter/core/common/vars" 13 | "chatgpt-adapter/core/gin/response" 14 | "chatgpt-adapter/core/logger" 15 | "github.com/bincooo/emit.io" 16 | "github.com/gin-gonic/gin" 17 | "github.com/google/uuid" 18 | "github.com/iocgo/sdk/env" 19 | "github.com/iocgo/sdk/proxy" 20 | ) 21 | 22 | var ( 23 | cookiesContainer *common.PollContainer[string] 24 | userAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.15" 25 | lang string 26 | clearance string 27 | 28 | mu sync.Mutex 29 | ) 30 | 31 | func init() { 32 | inited.AddInitialized(func(env *env.Environment) { 33 | cookies := env.GetStringSlice("grok.cookies") 34 | cookiesContainer = common.NewPollContainer[string]("grok", cookies, time.Hour) 35 | cookiesContainer.Condition = condition 36 | }) 37 | } 38 | 39 | func InvocationHandler(ctx *proxy.Context) { 40 | var ( 41 | gtx = ctx.In[0].(*gin.Context) 42 | echo = gtx.GetBool(vars.GinEcho) 43 | ) 44 | 45 | if echo || ctx.Method != "Completion" && ctx.Method != "ToolChoice" { 46 | ctx.Do() 47 | return 48 | } 49 | 50 | logger.Infof("execute static proxy [relay/llm/grok.api]: func %s(...)", ctx.Method) 51 | 52 | if cookiesContainer.Len() == 0 { 53 | response.Error(gtx, -1, "empty cookies") 54 | return 55 | } 56 | 57 | cookie, err := cookiesContainer.Poll(gtx) 58 | if err != nil { 59 | logger.Error(err) 60 | response.Error(gtx, -1, err) 61 | return 62 | } 63 | defer resetMarked(cookie) 64 | gtx.Set("token", cookie) 65 | 66 | // 67 | ctx.Do() 68 | 69 | // 70 | if ctx.Method == "Completion" { 71 | err = elseOf[error](ctx.Out[0]) 72 | } 73 | if ctx.Method == "ToolChoice" { 74 | err = elseOf[error](ctx.Out[1]) 75 | } 76 | 77 | if err != nil { 78 | logger.Error(err) 79 | return 80 | } 81 | } 82 | 83 | func condition(cookie string, argv ...interface{}) (ok bool) { 84 | marker, err := cookiesContainer.Marked(cookie) 85 | if err != nil { 86 | logger.Error(err) 87 | return false 88 | } 89 | 90 | ok = marker == 0 91 | if !ok { 92 | return 93 | } 94 | 95 | ctx := argv[0].(*gin.Context) 96 | completion := common.GetGinCompletion(ctx) 97 | r, err := emit.ClientBuilder(common.HTTPClient). 98 | Context(ctx.Request.Context()). 99 | POST("https://grok.com/rest/rate-limits"). 100 | //Header("accept-language", "en-US,en;q=0.9"). 101 | Header("origin", "https://grok.com"). 102 | Header("referer", "https://grok.com/"). 103 | Header("baggage", "sentry-environment=production,sentry-release="+common.Hex(21)+",sentry-public_key="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-trace_id="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-replay_id="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-sample_rate=1,sentry-sampled=true"). 104 | Header("sentry-trace", strings.ReplaceAll(uuid.NewString(), "-", "")+"-"+common.Hex(16)+"-1"). 105 | Header("user-agent", userAgent). 106 | Header("accept-language", lang). 107 | Header("cookie", emit.MergeCookies(cookie, clearance)). 108 | JSONHeader(). 109 | Body(map[string]interface{}{ 110 | "requestKind": "DEFAULT", 111 | "modelName": completion.Model, 112 | }). 113 | DoC(emit.Status(http.StatusOK), emit.IsJSON) 114 | if err != nil { 115 | var busErr emit.Error 116 | if errors.As(err, &busErr) && busErr.Code == 403 { 117 | _ = hookCloudflare(env.Env) 118 | ctx.Set("clearance", clearance) 119 | ctx.Set("userAgent", userAgent) 120 | ctx.Set("lang", lang) 121 | } 122 | logger.Error(err) 123 | return false 124 | } 125 | 126 | defer r.Body.Close() 127 | obj, err := emit.ToMap(r) 128 | if err != nil { 129 | logger.Error(err) 130 | return false 131 | } 132 | 133 | count := obj["remainingQueries"].(float64) 134 | ok = count > 0 135 | if !ok { 136 | _ = cookiesContainer.MarkTo(cookie, 2) 137 | } 138 | return 139 | } 140 | 141 | func hookCloudflare(env *env.Environment) error { 142 | baseUrl := env.GetString("browser-less.reversal") 143 | if !env.GetBool("browser-less.enabled") && baseUrl == "" { 144 | return errors.New("trying cloudflare failed, please setting `browser-less.enabled` or `browser-less.reversal`") 145 | } 146 | 147 | logger.Info("trying cloudflare ...") 148 | 149 | mu.Lock() 150 | defer mu.Unlock() 151 | 152 | if baseUrl == "" { 153 | baseUrl = "http://127.0.0.1:" + env.GetString("browser-less.port") 154 | } 155 | 156 | r, err := emit.ClientBuilder(common.HTTPClient). 157 | GET(baseUrl+"/v0/clearance"). 158 | Header("x-website", "https://grok.com"). 159 | DoC(emit.Status(http.StatusOK), emit.IsJSON) 160 | if err != nil { 161 | logger.Error(err) 162 | if emit.IsJSON(r) == nil { 163 | logger.Error(emit.TextResponse(r)) 164 | } 165 | return err 166 | } 167 | 168 | defer r.Body.Close() 169 | obj, err := emit.ToMap(r) 170 | if err != nil { 171 | logger.Error(err) 172 | return err 173 | } 174 | 175 | data := obj["data"].(map[string]interface{}) 176 | clearance = data["cookie"].(string) 177 | userAgent = data["userAgent"].(string) 178 | lang = data["lang"].(string) 179 | return nil 180 | } 181 | 182 | //func cleanCloudflare() { 183 | // mu.Lock() 184 | // clearance = "" 185 | // mu.Unlock() 186 | //} 187 | 188 | func resetMarked(cookie string) { 189 | marker, err := cookiesContainer.Marked(cookie) 190 | if err != nil { 191 | logger.Error(err) 192 | return 193 | } 194 | 195 | if marker != 1 { 196 | return 197 | } 198 | 199 | err = cookiesContainer.MarkTo(cookie, 0) 200 | if err != nil { 201 | logger.Error(err) 202 | } 203 | } 204 | 205 | func elseOf[T any](obj any) (zero T) { 206 | if obj == nil { 207 | return 208 | } 209 | return obj.(T) 210 | } 211 | -------------------------------------------------------------------------------- /relay/alloc/you/ctor.go: -------------------------------------------------------------------------------- 1 | package you 2 | 3 | import ( 4 | "github.com/iocgo/sdk/proxy" 5 | 6 | _ "chatgpt-adapter/core/gin/inter" 7 | _ "chatgpt-adapter/core/gin/model" 8 | _ "github.com/gin-gonic/gin" 9 | _ "reflect" 10 | ) 11 | 12 | // @Proxy( 13 | // 14 | // target = "chatgpt-adapter/core/gin/inter.Adapter", 15 | // scan = "chatgpt-adapter/relay/llm/you.api", 16 | // igm = "!(Completion|ToolChoice)" 17 | // 18 | // ) 19 | func Proxy(ctx *proxy.Context) { InvocationHandler(ctx) } 20 | -------------------------------------------------------------------------------- /relay/hf/ctor.go: -------------------------------------------------------------------------------- 1 | package hf 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "hf-adapter") 11 | func New(env *env.Environment) inter.Adapter { return &api{env: env} } 12 | -------------------------------------------------------------------------------- /relay/hf/model.go: -------------------------------------------------------------------------------- 1 | package hf 2 | 3 | var ( 4 | SD_MODELS = []string{ 5 | "3Guofeng3_v34.safetensors [50f420de]", 6 | "absolutereality_V16.safetensors [37db0fc3]", 7 | "absolutereality_v181.safetensors [3d9d4d2b]", 8 | "amIReal_V41.safetensors [0a8a2e61]", 9 | "analog-diffusion-1.0.ckpt [9ca13f02]", 10 | "aniverse_v30.safetensors [579e6f85]", 11 | "anythingv3_0-pruned.ckpt [2700c435]", 12 | "anything-v4.5-pruned.ckpt [65745d25]", 13 | "anythingV5_PrtRE.safetensors [893e49b9]", 14 | "AOM3A3_orangemixs.safetensors [9600da17]", 15 | "blazing_drive_v10g.safetensors [ca1c1eab]", 16 | "breakdomain_I2428.safetensors [43cc7d2f]", 17 | "breakdomain_M2150.safetensors [15f7afca]", 18 | "cetusMix_Version35.safetensors [de2f2560]", 19 | "childrensStories_v13D.safetensors [9dfaabcb]", 20 | "childrensStories_v1SemiReal.safetensors [a1c56dbb]", 21 | "childrensStories_v1ToonAnime.safetensors [2ec7b88b]", 22 | "Counterfeit_v30.safetensors [9e2a8f19]", 23 | "cuteyukimixAdorable_midchapter3.safetensors [04bdffe6]", 24 | "cyberrealistic_v33.safetensors [82b0d085]", 25 | "dalcefo_v4.safetensors [425952fe]", 26 | "deliberate_v2.safetensors [10ec4b29]", 27 | "deliberate_v3.safetensors [afd9d2d4]", 28 | "dreamlike-anime-1.0.safetensors [4520e090]", 29 | "dreamlike-diffusion-1.0.safetensors [5c9fd6e0]", 30 | "dreamlike-photoreal-2.0.safetensors [fdcf65e7]", 31 | "dreamshaper_6BakedVae.safetensors [114c8abb]", 32 | "dreamshaper_7.safetensors [5cf5ae06]", 33 | "dreamshaper_8.safetensors [9d40847d]", 34 | "edgeOfRealism_eorV20.safetensors [3ed5de15]", 35 | "EimisAnimeDiffusion_V1.ckpt [4f828a15]", 36 | "elldreths-vivid-mix.safetensors [342d9d26]", 37 | "epicphotogasm_xPlusPlus.safetensors [1a8f6d35]", 38 | "epicrealism_naturalSinRC1VAE.safetensors [90a4c676]", 39 | "epicrealism_pureEvolutionV3.safetensors [42c8440c]", 40 | "ICantBelieveItsNotPhotography_seco.safetensors [4e7a3dfd]", 41 | "indigoFurryMix_v75Hybrid.safetensors [91208cbb]", 42 | "juggernaut_aftermath.safetensors [5e20c455]", 43 | "lofi_v4.safetensors [ccc204d6]", 44 | "lyriel_v16.safetensors [68fceea2]", 45 | "majicmixRealistic_v4.safetensors [29d0de58]", 46 | "mechamix_v10.safetensors [ee685731]", 47 | "meinamix_meinaV9.safetensors [2ec66ab0]", 48 | "meinamix_meinaV11.safetensors [b56ce717]", 49 | "neverendingDream_v122.safetensors [f964ceeb]", 50 | "openjourney_V4.ckpt [ca2f377f]", 51 | "pastelMixStylizedAnime_pruned_fp16.safetensors [793a26e8]", 52 | "portraitplus_V1.0.safetensors [1400e684]", 53 | "protogenx34.safetensors [5896f8d5]", 54 | "Realistic_Vision_V1.4-pruned-fp16.safetensors [8d21810b]", 55 | "Realistic_Vision_V2.0.safetensors [79587710]", 56 | "Realistic_Vision_V4.0.safetensors [29a7afaa]", 57 | "Realistic_Vision_V5.0.safetensors [614d1063]", 58 | "Realistic_Vision_V5.1.safetensors [a0f13c83]", 59 | "redshift_diffusion-V10.safetensors [1400e684]", 60 | "revAnimated_v122.safetensors [3f4fefd9]", 61 | "rundiffusionFX25D_v10.safetensors [cd12b0ee]", 62 | "rundiffusionFX_v10.safetensors [cd4e694d]", 63 | "sdv1_4.ckpt [7460a6fa]", 64 | "v1-5-pruned-emaonly.safetensors [d7049739]", 65 | "v1-5-inpainting.safetensors [21c7ab71]", 66 | "shoninsBeautiful_v10.safetensors [25d8c546]", 67 | "theallys-mix-ii-churned.safetensors [5d9225a4]", 68 | "timeless-1.0.ckpt [7c4971d4]", 69 | "toonyou_beta6.safetensors [980f6b15]", 70 | } 71 | 72 | SD_SAMPLES = []string{ 73 | "DPM++ 2M Karras", 74 | "DPM++ SDE Karras", 75 | "DPM++ 2M SDE Exponential", 76 | "DPM++ 2M SDE Karras", 77 | "Euler a", 78 | "Euler", 79 | "LMS", 80 | "Heun", 81 | "DPM2", 82 | "DPM2 a", 83 | "DPM++ 2S a", 84 | "DPM++ 2M", 85 | "DPM++ SDE", 86 | "DPM++ 2M SDE", 87 | "DPM++ 2M SDE Heun", 88 | "DPM++ 2M SDE Heun Karras", 89 | "DPM++ 2M SDE Heun Exponential", 90 | "DPM++ 3M SDE", 91 | "DPM++ 3M SDE Karras", 92 | "DPM++ 3M SDE Exponential", 93 | "DPM fast", 94 | "DPM adaptive", 95 | "LMS Karras", 96 | "DPM2 Karras", 97 | "DPM2 a Karras", 98 | "DPM++ 2S a Karras", 99 | "Restart", 100 | "DDIM", 101 | "PLMS", 102 | "UniPC", 103 | } 104 | ) 105 | 106 | var ( 107 | XL_MODELS = []string{ 108 | "animagineXLV3_v30.safetensors [75f2f05b]", 109 | "devlishphotorealism_sdxl15.safetensors [77cba69f]", 110 | "dreamshaperXL10_alpha2.safetensors [c8afe2ef]", 111 | "dynavisionXL_0411.safetensors [c39cc051]", 112 | "juggernautXL_v45.safetensors [e75f5471]", 113 | "realismEngineSDXL_v10.safetensors [af771c3f]", 114 | "realvisxlV40.safetensors [f7fdcb51]", 115 | "sd_xl_base_1.0.safetensors [be9edd61]", 116 | "sd_xl_base_1.0_inpainting_0.1.safetensors [5679a81a]", 117 | "turbovisionXL_v431.safetensors [78890989]", 118 | } 119 | 120 | XL_SAMPLES = []string{ 121 | "DPM++ 2M Karras", 122 | "DPM++ SDE Karras", 123 | "DPM++ 2M SDE Exponential", 124 | "DPM++ 2M SDE Karras", 125 | "Euler a", 126 | "Euler", 127 | "LMS", 128 | "Heun", 129 | "DPM2", 130 | "DPM2 a", 131 | "DPM++ 2S a", 132 | "DPM++ 2M", 133 | "DPM++ SDE", 134 | "DPM++ 2M SDE", 135 | "DPM++ 2M SDE Heun", 136 | "DPM++ 2M SDE Heun Karras", 137 | "DPM++ 2M SDE Heun Exponential", 138 | "DPM++ 3M SDE", 139 | "DPM++ 3M SDE Karras", 140 | "DPM++ 3M SDE Exponential", 141 | "DPM fast", 142 | "DPM adaptive", 143 | "LMS Karras", 144 | "DPM2 Karras", 145 | "DPM2 a Karras", 146 | "DPM++ 2S a Karras", 147 | "Restart", 148 | "DDIM", 149 | "PLMS", 150 | "UniPC", 151 | } 152 | ) 153 | 154 | var ( 155 | GOOGLE_MODELS = []string{ 156 | "(No style)", 157 | "Cinematic", 158 | "Photographic", 159 | "Anime", 160 | "Manga", 161 | "Digital Art", 162 | "Pixel art", 163 | "Fantasy art", 164 | "Neonpunk", 165 | "3D Model", 166 | } 167 | ) 168 | 169 | var ( 170 | DALLE4K_MODELS = []string{ 171 | "3840 x 2160", 172 | "2560 x 1440", 173 | "Photo", 174 | "Cinematic", 175 | "Anime", 176 | "3D Model", 177 | "(No style)", 178 | } 179 | ) 180 | 181 | var ( 182 | ANIMAGINE_XL31_MODELS = []string{ 183 | "(None)", 184 | "Cinematic", 185 | "Photographic", 186 | "Anime", 187 | "Manga", 188 | "Digital Art", 189 | "Pixel art", 190 | "Fantasy art", 191 | "Neonpunk", 192 | "3D Model", 193 | } 194 | 195 | ANIMAGINE_XL31_SAMPLES = []string{ 196 | "DPM++ 2M Karras", 197 | "DPM++ SDE Karras", 198 | "DPM++ 2M SDE Karras", 199 | "Euler", 200 | "Euler a", 201 | "DDIM", 202 | } 203 | ) 204 | 205 | // let value = ""; $0.querySelectorAll('li').forEach((it) => { value += `"${it.getAttribute('aria-label')}",\n` });console.log(value) 206 | var ( 207 | ANIMAGINE_XL40_MODELS = []string{ 208 | "(None)", 209 | "Anim4gine", 210 | "Painting", 211 | "Pixel art", 212 | "1980s", 213 | "1990s", 214 | "2000s", 215 | "Toon", 216 | "Lineart", 217 | "Art Nouveau", 218 | "Western Comics", 219 | "3D", 220 | "Realistic", 221 | "Neonpunk", 222 | } 223 | 224 | ANIMAGINE_XL40_SAMPLES = []string{ 225 | "DPM++ 2M Karras", 226 | "DPM++ SDE Karras", 227 | "DPM++ 2M SDE Karras", 228 | "Euler", 229 | "Euler a", 230 | "DDIM", 231 | } 232 | ) 233 | -------------------------------------------------------------------------------- /relay/llm/bing/ctor.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "bing-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/bing/message.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/model" 5 | "encoding/json" 6 | "errors" 7 | "fmt" 8 | "github.com/bincooo/emit.io" 9 | "github.com/iocgo/sdk/env" 10 | "net/http" 11 | "sync" 12 | "time" 13 | 14 | "chatgpt-adapter/core/common" 15 | "chatgpt-adapter/core/common/vars" 16 | "chatgpt-adapter/core/gin/response" 17 | "chatgpt-adapter/core/logger" 18 | "github.com/gin-gonic/gin" 19 | ) 20 | 21 | const ( 22 | ginTokens = "__tokens__" 23 | ) 24 | 25 | func waitMessage(message chan []byte, cancel func(str string) bool) (content string, err error) { 26 | for { 27 | chunk, ok := <-message 28 | if !ok { 29 | break 30 | } 31 | 32 | magic := chunk[0] 33 | chunk = chunk[1:] 34 | if magic == 1 { 35 | err = fmt.Errorf("%s", chunk) 36 | break 37 | } 38 | 39 | var msg model.Keyv[interface{}] 40 | err = json.Unmarshal(chunk, &msg) 41 | if err != nil { 42 | logger.Error(err) 43 | continue 44 | } 45 | 46 | if !msg.Is("event", "appendText") { 47 | continue 48 | } 49 | 50 | raw := msg.GetString("text") 51 | logger.Debug("----- raw -----") 52 | logger.Debug(raw) 53 | content += raw 54 | if cancel != nil && cancel(content) { 55 | return content, nil 56 | } 57 | } 58 | return 59 | } 60 | 61 | func waitResponse(ctx *gin.Context, message chan []byte, sse bool) (content string) { 62 | created := time.Now().Unix() 63 | logger.Infof("waitResponse ...") 64 | tokens := ctx.GetInt(ginTokens) 65 | onceExec := sync.OnceFunc(func() { 66 | if !sse { 67 | ctx.Writer.WriteHeader(http.StatusOK) 68 | } 69 | }) 70 | 71 | var ( 72 | matchers = common.GetGinMatchers(ctx) 73 | ) 74 | 75 | for { 76 | 77 | chunk, ok := <-message 78 | if !ok { 79 | raw := response.ExecMatchers(matchers, "", true) 80 | if raw != "" && sse { 81 | response.SSEResponse(ctx, Model, raw, created) 82 | } 83 | content += raw 84 | break 85 | } 86 | 87 | magic := chunk[0] 88 | chunk = chunk[1:] 89 | if magic == 1 { 90 | asError(ctx, string(chunk)) 91 | break 92 | } 93 | 94 | var msg model.Keyv[interface{}] 95 | err := json.Unmarshal(chunk, &msg) 96 | if err != nil { 97 | logger.Error(err) 98 | continue 99 | } 100 | 101 | raw := "" 102 | if msg.Is("event", "appendText") { 103 | raw = msg.GetString("text") 104 | } 105 | if msg.Is("event", "imageGenerated") { 106 | raw = fmt.Sprintf("![image](%s)", msg.GetString("url")) 107 | } 108 | if msg.Is("event", "replaceText") { 109 | raw = msg.GetString("text") 110 | } 111 | if len(raw) == 0 { 112 | continue 113 | } 114 | 115 | logger.Debug("----- raw -----") 116 | logger.Debug(raw) 117 | onceExec() 118 | 119 | raw = response.ExecMatchers(matchers, raw, false) 120 | if len(raw) == 0 { 121 | continue 122 | } 123 | 124 | if raw == response.EOF { 125 | break 126 | } 127 | 128 | if sse { 129 | response.SSEResponse(ctx, Model, raw, created) 130 | } 131 | content += raw 132 | } 133 | 134 | if content == "" && response.NotSSEHeader(ctx) { 135 | return 136 | } 137 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(content, tokens)) 138 | if !sse { 139 | response.Response(ctx, Model, content) 140 | } else { 141 | response.SSEResponse(ctx, Model, "[DONE]", created) 142 | } 143 | return 144 | } 145 | 146 | func asError(ctx *gin.Context, msg interface{}) { 147 | if msg == nil || msg == "" { 148 | return 149 | } 150 | logger.Error(msg) 151 | if response.NotSSEHeader(ctx) { 152 | response.Error(ctx, -1, msg) 153 | } 154 | return 155 | } 156 | 157 | func hookCloudflare() (challenge string, err error) { 158 | baseUrl := env.Env.GetString("browser-less.reversal") 159 | if !env.Env.GetBool("browser-less.enabled") && baseUrl == "" { 160 | return "", errors.New("trying cloudflare failed, please setting `browser-less.enabled` or `browser-less.reversal`") 161 | } 162 | 163 | logger.Info("trying cloudflare ...") 164 | if baseUrl == "" { 165 | baseUrl = "http://127.0.0.1:" + env.Env.GetString("browser-less.port") 166 | } 167 | 168 | r, err := emit.ClientBuilder(common.HTTPClient). 169 | GET(baseUrl+"/v0/turnstile"). 170 | Header("sitekey", "0x4AAAAAAAg146IpY3lPNWte"). 171 | Header("website", "https://copilot.microsoft.com"). 172 | DoC(emit.Status(http.StatusOK), emit.IsJSON) 173 | if err != nil { 174 | logger.Error(err) 175 | if emit.IsJSON(r) == nil { 176 | logger.Error(emit.TextResponse(r)) 177 | } 178 | return 179 | } 180 | 181 | defer r.Body.Close() 182 | obj, err := emit.ToMap(r) 183 | if err != nil { 184 | logger.Error(err) 185 | return 186 | } 187 | 188 | if data, ok := obj["data"].(string); ok { 189 | challenge = data 190 | return 191 | } 192 | 193 | msg := "challenge failed" 194 | if data, ok := obj["msg"].(string); ok { 195 | msg = data 196 | } 197 | err = errors.New(msg) 198 | return 199 | } 200 | -------------------------------------------------------------------------------- /relay/llm/bing/toolcall.go: -------------------------------------------------------------------------------- 1 | package bing 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/common/toolcall" 6 | "chatgpt-adapter/core/common/vars" 7 | "chatgpt-adapter/core/gin/model" 8 | "chatgpt-adapter/core/gin/response" 9 | "chatgpt-adapter/core/logger" 10 | "context" 11 | "errors" 12 | "github.com/bincooo/edge-api" 13 | "github.com/bincooo/emit.io" 14 | "github.com/gin-gonic/gin" 15 | "github.com/iocgo/sdk/env" 16 | "time" 17 | ) 18 | 19 | func toolChoice(ctx *gin.Context, completion model.Completion) bool { 20 | logger.Info("completeTools ...") 21 | echo := ctx.GetBool(vars.GinEcho) 22 | cookie, _ := common.GetGinValue[map[string]string](ctx, "token") 23 | proxied := env.Env.GetBool("bing.proxied") 24 | 25 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 26 | message += "\n\nAi:" 27 | if echo { 28 | logger.Infof("toolCall message: \n%s", message) 29 | return "", nil 30 | } 31 | 32 | newTok := false 33 | refresh: 34 | timeout, cancel := context.WithTimeout(ctx.Request.Context(), 10*time.Second) 35 | defer cancel() 36 | accessToken, err := genToken(timeout, cookie, proxied, newTok) 37 | if err != nil { 38 | return "", err 39 | } 40 | 41 | timeout, cancel = context.WithTimeout(ctx.Request.Context(), 10*time.Second) 42 | defer cancel() 43 | conversationId, err := edge.CreateConversation(elseOf(proxied, common.HTTPClient, common.NopHTTPClient), timeout, accessToken) 44 | if err != nil { 45 | var hErr emit.Error 46 | if errors.As(err, &hErr) && hErr.Code == 401 && !newTok { 47 | newTok = true 48 | goto refresh 49 | } 50 | return "", err 51 | } 52 | 53 | timeout, cancel = context.WithTimeout(context.TODO(), 10*time.Second) 54 | defer cancel() 55 | defer edge.DeleteConversation(elseOf(proxied, common.HTTPClient, common.NopHTTPClient), timeout, conversationId, accessToken) 56 | 57 | challenge := "" 58 | label: 59 | buffer, err := edge.Chat(elseOf(proxied, common.HTTPClient, common.NopHTTPClient), 60 | ctx.Request.Context(), 61 | accessToken, 62 | conversationId, 63 | challenge, "", message, "", 64 | elseOf[byte](completion.Model == Model, 0, 1)) 65 | if err != nil { 66 | if challenge == "" && err.Error() == "challenge" { 67 | challenge, err = hookCloudflare() 68 | if err != nil { 69 | return "", err 70 | } 71 | goto label 72 | } 73 | return "", err 74 | } 75 | 76 | return waitMessage(buffer, toolcall.Cancel) 77 | }) 78 | 79 | if err != nil { 80 | logger.Error(err) 81 | response.Error(ctx, -1, err) 82 | return true 83 | } 84 | 85 | return exec 86 | } 87 | -------------------------------------------------------------------------------- /relay/llm/blackbox/adapter.go: -------------------------------------------------------------------------------- 1 | package blackbox 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | var ( 14 | Model = "blackbox" 15 | ) 16 | 17 | type api struct { 18 | inter.BaseAdapter 19 | 20 | env *env.Environment 21 | } 22 | 23 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 24 | if len(model) <= 9 || Model+"/" != model[:9] { 25 | return 26 | } 27 | 28 | slice := api.env.GetStringSlice("blackbox.model") 29 | for _, mod := range append(slice, []string{ 30 | "GPT-4o", 31 | "Gemini-PRO", 32 | "Claude-Sonnet-3.5", 33 | "Claude-Sonnet-3.7", 34 | "DeepSeek-V3", 35 | "DeepSeek-R1", 36 | }...) { 37 | if model[9:] == mod { 38 | ok = true 39 | return 40 | } 41 | } 42 | return 43 | } 44 | 45 | func (api *api) Models() (slice []model.Model) { 46 | s := api.env.GetStringSlice("blackbox.model") 47 | for _, mod := range append(s, []string{ 48 | "GPT-4o", 49 | "Gemini-PRO", 50 | "Claude-Sonnet-3.5", 51 | "Claude-Sonnet-3.7", 52 | "DeepSeek-V3", 53 | "DeepSeek-R1", 54 | }...) { 55 | slice = append(slice, model.Model{ 56 | Id: Model + "/" + mod, 57 | Object: "model", 58 | Created: 1686935002, 59 | By: Model + "-adapter", 60 | }) 61 | } 62 | return 63 | } 64 | 65 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 66 | var ( 67 | cookie = ctx.GetString("token") 68 | proxied = api.env.GetString("server.proxied") 69 | completion = common.GetGinCompletion(ctx) 70 | ) 71 | 72 | if toolChoice(ctx, api.env, cookie, proxied, completion) { 73 | ok = true 74 | } 75 | return 76 | } 77 | 78 | func (api *api) Completion(ctx *gin.Context) (err error) { 79 | var ( 80 | cookie = ctx.GetString("token") 81 | proxied = api.env.GetString("server.proxied") 82 | completion = common.GetGinCompletion(ctx) 83 | ) 84 | 85 | request := convertRequest(ctx, api.env, completion) 86 | r, err := fetch(ctx.Request.Context(), proxied, cookie, request) 87 | if err != nil { 88 | logger.Error(err) 89 | return 90 | } 91 | 92 | content := waitResponse(ctx, r, completion.Stream) 93 | if content == "" && response.NotResponse(ctx) { 94 | response.Error(ctx, -1, "EMPTY RESPONSE") 95 | } 96 | return 97 | } 98 | -------------------------------------------------------------------------------- /relay/llm/blackbox/ctor.go: -------------------------------------------------------------------------------- 1 | package blackbox 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "blackbox-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/blackbox/fetch.go: -------------------------------------------------------------------------------- 1 | package blackbox 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/model" 6 | "chatgpt-adapter/core/gin/response" 7 | "context" 8 | "github.com/bincooo/emit.io" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | "net/http" 12 | ) 13 | 14 | const ( 15 | userAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.15" 16 | ) 17 | 18 | type blackboxRequest struct { 19 | Messages []model.Keyv[interface{}] `json:"messages"` 20 | 21 | Id string `json:"id"` 22 | PreviewToken interface{} `json:"previewToken"` 23 | UserId interface{} `json:"userId"` 24 | CodeModelMode bool `json:"codeModelMode"` 25 | AgentMode struct { 26 | } `json:"agentMode"` 27 | TrendingAgentMode struct { 28 | } `json:"trendingAgentMode"` 29 | IsMicMode bool `json:"isMicMode"` 30 | MaxTokens int `json:"maxTokens"` 31 | PlaygroundTopP interface{} `json:"playgroundTopP"` 32 | PlaygroundTemperature interface{} `json:"playgroundTemperature"` 33 | IsChromeExt bool `json:"isChromeExt"` 34 | GithubToken string `json:"githubToken"` 35 | ClickedAnswer2 bool `json:"clickedAnswer2"` 36 | ClickedAnswer3 bool `json:"clickedAnswer3"` 37 | ClickedForceWebSearch bool `json:"clickedForceWebSearch"` 38 | VisitFromDelta bool `json:"visitFromDelta"` 39 | MobileClient bool `json:"mobileClient"` 40 | UserSelectedModel string `json:"userSelectedModel"` 41 | Validated string `json:"validated"` 42 | ImageGenerationMode bool `json:"imageGenerationMode"` 43 | WebSearchModePrompt bool `json:"webSearchModePrompt"` 44 | } 45 | 46 | func fetch(ctx context.Context, proxied, cookie string, request blackboxRequest) (response *http.Response, err error) { 47 | response, err = emit.ClientBuilder(common.HTTPClient). 48 | Context(ctx). 49 | Proxies(proxied). 50 | POST("https://www.blackbox.ai/api/chat"). 51 | JSONHeader(). 52 | Header("accept-language", "en-US,en;q=0.9"). 53 | Header("origin", "https://www.blackbox.ai"). 54 | Header("referer", "https://www.blackbox.ai/"). 55 | Header("user-agent", userAgent). 56 | Header("cookie", cookie). 57 | Body(request). 58 | DoC(emit.Status(http.StatusOK), emit.IsTEXT) 59 | return 60 | } 61 | 62 | func convertRequest(ctx *gin.Context, env *env.Environment, completion model.Completion) (request blackboxRequest) { 63 | request.Messages = completion.Messages 64 | specialized := ctx.GetBool("specialized") 65 | if specialized && response.IsClaude(ctx, completion.Model) { 66 | request.Messages = completion.Messages[:1] 67 | request.Messages[0].Set("role", "user") 68 | } 69 | 70 | id := request.Messages[0].GetString("id") 71 | if id == "" { 72 | id = common.Hex(7) 73 | request.Messages[0].Set("id", id) 74 | } 75 | 76 | request.Id = id 77 | request.CodeModelMode = true 78 | request.MaxTokens = completion.MaxTokens 79 | request.PlaygroundTopP = completion.TopP 80 | request.PlaygroundTemperature = completion.Temperature 81 | request.UserSelectedModel = completion.Model[9:] 82 | request.Validated = env.GetString("blackbox.token") 83 | request.AgentMode = struct{}{} 84 | request.TrendingAgentMode = struct{}{} 85 | return 86 | } 87 | -------------------------------------------------------------------------------- /relay/llm/blackbox/message.go: -------------------------------------------------------------------------------- 1 | package blackbox 2 | 3 | import ( 4 | "bufio" 5 | "io" 6 | "net/http" 7 | "sync" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/common/vars" 12 | "chatgpt-adapter/core/gin/response" 13 | "chatgpt-adapter/core/logger" 14 | "github.com/gin-gonic/gin" 15 | ) 16 | 17 | const ( 18 | ginTokens = "__tokens__" 19 | ) 20 | 21 | func waitMessage(r *http.Response, cancel func(str string) bool) (content string, err error) { 22 | defer r.Body.Close() 23 | reader := bufio.NewReader(r.Body) 24 | var char rune 25 | for { 26 | char, _, err = reader.ReadRune() 27 | if err == io.EOF { 28 | break 29 | } 30 | 31 | if err != nil { 32 | return 33 | } 34 | 35 | raw := string(char) 36 | logger.Debug("----- raw -----") 37 | logger.Debug(raw) 38 | content += raw 39 | if cancel != nil && cancel(content) { 40 | return content, nil 41 | } 42 | } 43 | return 44 | } 45 | 46 | func waitResponse(ctx *gin.Context, r *http.Response, sse bool) (content string) { 47 | created := time.Now().Unix() 48 | logger.Infof("waitResponse ...") 49 | tokens := ctx.GetInt(ginTokens) 50 | onceExec := sync.OnceFunc(func() { 51 | if !sse { 52 | ctx.Writer.WriteHeader(http.StatusOK) 53 | } 54 | }) 55 | 56 | var ( 57 | matchers = common.GetGinMatchers(ctx) 58 | ) 59 | 60 | defer r.Body.Close() 61 | reader := bufio.NewReader(r.Body) 62 | for { 63 | char, _, err := reader.ReadRune() 64 | if err == io.EOF { 65 | raw := response.ExecMatchers(matchers, "", true) 66 | if raw != "" && sse { 67 | response.SSEResponse(ctx, Model, raw, created) 68 | } 69 | content += raw 70 | break 71 | } 72 | 73 | if asError(ctx, err) { 74 | return 75 | } 76 | 77 | raw := string(char) 78 | logger.Debug("----- raw -----") 79 | logger.Debug(raw) 80 | onceExec() 81 | 82 | raw = response.ExecMatchers(matchers, raw, false) 83 | if len(raw) == 0 { 84 | continue 85 | } 86 | 87 | if raw == response.EOF { 88 | break 89 | } 90 | 91 | if sse { 92 | response.SSEResponse(ctx, Model, raw, created) 93 | } 94 | content += raw 95 | } 96 | 97 | if content == "" && response.NotSSEHeader(ctx) { 98 | return 99 | } 100 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(content, tokens)) 101 | if !sse { 102 | response.Response(ctx, Model, content) 103 | } else { 104 | response.SSEResponse(ctx, Model, "[DONE]", created) 105 | } 106 | return 107 | } 108 | 109 | func asError(ctx *gin.Context, err error) (ok bool) { 110 | if err == nil { 111 | return 112 | } 113 | 114 | logger.Error(err) 115 | if response.NotSSEHeader(ctx) { 116 | response.Error(ctx, -1, err) 117 | } 118 | ok = true 119 | return 120 | } 121 | -------------------------------------------------------------------------------- /relay/llm/blackbox/toolcall.go: -------------------------------------------------------------------------------- 1 | package blackbox 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, proxies, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | completion.Messages = []model.Keyv[interface{}]{ 23 | { 24 | "role": "user", 25 | "content": message, 26 | }, 27 | } 28 | r, err := fetch(ctx.Request.Context(), proxies, cookie, convertRequest(ctx, env, completion)) 29 | if err != nil { 30 | return "", err 31 | } 32 | 33 | return waitMessage(r, toolcall.Cancel) 34 | }) 35 | 36 | if err != nil { 37 | logger.Error(err) 38 | response.Error(ctx, -1, err) 39 | return true 40 | } 41 | 42 | return exec 43 | } 44 | -------------------------------------------------------------------------------- /relay/llm/coze/adapter.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "strconv" 7 | "strings" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/gin/inter" 12 | "chatgpt-adapter/core/gin/model" 13 | "chatgpt-adapter/core/gin/response" 14 | "chatgpt-adapter/core/logger" 15 | "github.com/bincooo/coze-api" 16 | "github.com/gin-gonic/gin" 17 | "github.com/iocgo/sdk/env" 18 | "github.com/iocgo/sdk/stream" 19 | ) 20 | 21 | var ( 22 | Model = "coze" 23 | ) 24 | 25 | type api struct { 26 | inter.BaseAdapter 27 | 28 | env *env.Environment 29 | } 30 | 31 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 32 | if Model == model { 33 | ok = true 34 | return 35 | } 36 | 37 | var token = ctx.GetString("token") 38 | if model == "coze/websdk" { 39 | password := api.env.GetString("server.password") 40 | if password != "" && password != token { 41 | err = response.UnauthorizedError 42 | return 43 | } 44 | ok = true 45 | return 46 | } 47 | 48 | if strings.HasPrefix(model, "coze/") { 49 | // coze/botId-version-scene 50 | values := strings.Split(model[5:], "-") 51 | if len(values) > 2 { 52 | _, err = strconv.Atoi(values[2]) 53 | logger.Warn(err) 54 | ok = err == nil 55 | return 56 | } 57 | } 58 | 59 | // 检查绘图 60 | if model == "dall-e-3" { 61 | if strings.Contains(token, "msToken=") || strings.Contains(token, "sessionid=") { 62 | ok = true 63 | return 64 | } 65 | } 66 | return 67 | } 68 | 69 | func (*api) Models() []model.Model { 70 | return []model.Model{ 71 | { 72 | Id: Model, 73 | Object: "model", 74 | Created: 1686935002, 75 | By: Model + "-adapter", 76 | }, 77 | { 78 | Id: "coze/websdk", 79 | Object: "model", 80 | Created: 1686935002, 81 | By: Model + "-adapter", 82 | }, 83 | } 84 | } 85 | 86 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 87 | var ( 88 | cookie = ctx.GetString("token") 89 | proxied = api.env.GetString("server.proxied") 90 | completion = common.GetGinCompletion(ctx) 91 | ) 92 | 93 | if toolChoice(ctx, cookie, proxied, completion) { 94 | ok = true 95 | } 96 | return 97 | } 98 | 99 | func (api *api) Completion(ctx *gin.Context) (err error) { 100 | var ( 101 | cookie = ctx.GetString("token") 102 | proxied = api.env.GetString("server.proxied") 103 | completion = common.GetGinCompletion(ctx) 104 | ) 105 | 106 | newMessages, err := mergeMessages(ctx) 107 | if err != nil { 108 | logger.Error(err) 109 | response.Error(ctx, -1, err) 110 | err = nil 111 | return 112 | } 113 | 114 | options, mode, err := newOptions(proxied, completion.Model) 115 | if err != nil { 116 | response.Error(ctx, -1, err) 117 | return 118 | } 119 | 120 | co, msToken := extCookie(cookie) 121 | chat := coze.New(co, msToken, options) 122 | chat.Session(common.HTTPClient) 123 | 124 | query := "" 125 | if mode == 'w' { 126 | query = newMessages[len(newMessages)-1].Content 127 | chat.WebSdk(chat.TransferMessages(newMessages[:len(newMessages)-1])) 128 | } else { 129 | query = strings.Join(stream.Map( 130 | stream.OfSlice(newMessages), func(t coze.Message) string { return t.Content }).ToSlice(), "\n\n") 131 | } 132 | 133 | chatResponse, err := chat.Reply(ctx.Request.Context(), coze.Text, query) 134 | if err != nil { 135 | logger.Error(err) 136 | return 137 | } 138 | 139 | content := waitResponse(ctx, chatResponse, completion.Stream) 140 | if content == "" && response.NotResponse(ctx) { 141 | response.Error(ctx, -1, "EMPTY RESPONSE") 142 | } 143 | return 144 | } 145 | 146 | func (api *api) Generation(ctx *gin.Context) (err error) { 147 | var ( 148 | cookie = ctx.GetString("token") 149 | proxied = api.env.GetString("server.proxied") 150 | generation = common.GetGinGeneration(ctx) 151 | ) 152 | 153 | options, _, err := newOptions(proxied, generation.Model) 154 | if err != nil { 155 | return 156 | } 157 | 158 | co, msToken := extCookie(cookie) 159 | chat := coze.New(co, msToken, options) 160 | chat.Session(common.HTTPClient) 161 | 162 | image, err := chat.Images(ctx.Request.Context(), generation.Message) 163 | if err != nil { 164 | return 165 | } 166 | 167 | ctx.JSON(http.StatusOK, gin.H{ 168 | "created": time.Now().Unix(), 169 | "styles:": make([]string, 0), 170 | "data": []map[string]string{ 171 | {"url": image}, 172 | }, 173 | }) 174 | 175 | return 176 | } 177 | 178 | func newOptions(proxies string, model string) (options coze.Options, mode byte, err error) { 179 | if model == "coze/websdk" { 180 | mode = 'w' 181 | options = coze.NewDefaultOptions("xxx", "xxx", 1000, false, proxies) 182 | return 183 | } 184 | 185 | if strings.HasPrefix(model, "coze/") { 186 | var scene int 187 | values := strings.Split(model[5:], "-") 188 | if scene, err = strconv.Atoi(values[2]); err == nil { 189 | isO := isOwner(model) 190 | if isO { 191 | mode = 'o' 192 | } else if isWebSdk(model) { 193 | mode = 'w' 194 | } 195 | options = coze.NewDefaultOptions(values[0], values[1], scene, isO, proxies) 196 | logger.Infof("using custom coze options: botId = %s, version = %s, scene = %d, mode = %c", values[0], values[1], scene, mode) 197 | return 198 | } 199 | } 200 | 201 | err = fmt.Errorf("coze model '%s' is incorrect", model) 202 | return 203 | } 204 | 205 | func extCookie(co string) (cookie, msToken string) { 206 | cookie = co 207 | index := strings.Index(cookie, "[msToken=") 208 | if index > -1 { 209 | end := strings.Index(cookie[index:], "]") 210 | if end > -1 { 211 | msToken = cookie[index+6 : index+end] 212 | cookie = cookie[:index] + cookie[index+end+1:] 213 | } 214 | } 215 | return 216 | } 217 | 218 | func isOwner(model string) bool { 219 | return strings.HasSuffix(model, "-o") 220 | } 221 | 222 | func isWebSdk(model string) bool { 223 | return strings.HasSuffix(model, "-w") 224 | } 225 | -------------------------------------------------------------------------------- /relay/llm/coze/ctor.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "coze-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/coze/message.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "errors" 5 | "net/http" 6 | "strings" 7 | "sync" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/common/vars" 12 | "chatgpt-adapter/core/gin/response" 13 | "chatgpt-adapter/core/logger" 14 | "github.com/bincooo/coze-api" 15 | "github.com/gin-gonic/gin" 16 | ) 17 | 18 | const ( 19 | ginTokens = "__tokens__" 20 | ) 21 | 22 | func waitMessage(chatResponse chan string, cancel func(str string) bool) (content string, err error) { 23 | 24 | for { 25 | message, ok := <-chatResponse 26 | if !ok { 27 | break 28 | } 29 | 30 | if strings.HasPrefix(message, "error: ") { 31 | return "", errors.New(strings.TrimPrefix(message, "error: ")) 32 | } 33 | 34 | message = strings.TrimPrefix(message, "text: ") 35 | logger.Debug("----- raw -----") 36 | logger.Debug(message) 37 | if len(message) > 0 { 38 | content += message 39 | if cancel != nil && cancel(content) { 40 | return content, nil 41 | } 42 | } 43 | } 44 | 45 | return content, nil 46 | } 47 | 48 | func waitResponse(ctx *gin.Context, chatResponse chan string, sse bool) (content string) { 49 | created := time.Now().Unix() 50 | logger.Infof("waitResponse ...") 51 | tokens := ctx.GetInt(ginTokens) 52 | onceExec := sync.OnceFunc(func() { 53 | if !sse { 54 | ctx.Writer.WriteHeader(http.StatusOK) 55 | } 56 | }) 57 | 58 | var ( 59 | matchers = common.GetGinMatchers(ctx) 60 | ) 61 | 62 | for { 63 | raw, ok := <-chatResponse 64 | if !ok { 65 | raw = response.ExecMatchers(matchers, "", true) 66 | if raw != "" && sse { 67 | response.SSEResponse(ctx, Model, raw, created) 68 | } 69 | content += raw 70 | break 71 | } 72 | 73 | if strings.HasPrefix(raw, "error: ") { 74 | err := strings.TrimPrefix(raw, "error: ") 75 | logger.Error(err) 76 | if response.NotSSEHeader(ctx) { 77 | response.Error(ctx, -1, err) 78 | } 79 | return 80 | } 81 | 82 | raw = strings.TrimPrefix(raw, "text: ") 83 | contentL := len(raw) 84 | if contentL <= 0 { 85 | continue 86 | } 87 | 88 | logger.Debug("----- raw -----") 89 | logger.Debug(raw) 90 | onceExec() 91 | 92 | raw = response.ExecMatchers(matchers, raw, false) 93 | if len(raw) == 0 { 94 | continue 95 | } 96 | 97 | if raw == response.EOF { 98 | break 99 | } 100 | 101 | if sse { 102 | response.SSEResponse(ctx, Model, raw, created) 103 | } 104 | content += raw 105 | } 106 | 107 | if content == "" && response.NotSSEHeader(ctx) { 108 | return 109 | } 110 | 111 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(content, tokens)) 112 | if !sse { 113 | response.Response(ctx, Model, content) 114 | } else { 115 | response.SSEResponse(ctx, Model, "[DONE]", created) 116 | } 117 | return 118 | } 119 | 120 | func mergeMessages(ctx *gin.Context) (newMessages []coze.Message, err error) { 121 | var ( 122 | completion = common.GetGinCompletion(ctx) 123 | messages = completion.Messages 124 | isC = response.IsClaude(ctx, completion.Model) 125 | ) 126 | 127 | tokens := 0 128 | defer func() { ctx.Set(ginTokens, tokens) }() 129 | 130 | messageL := len(messages) 131 | if isC && messageL == 1 { 132 | message := messages[0].GetString("content") 133 | newMessages = append(newMessages, coze.Message{ 134 | Role: "user", 135 | Content: message, 136 | }) 137 | tokens += response.CalcTokens(message) 138 | return 139 | } 140 | 141 | var ( 142 | pos = 0 143 | contents []string 144 | ) 145 | for { 146 | if pos > messageL-1 { 147 | break 148 | } 149 | 150 | message := messages[pos] 151 | //if pos == 0 && message.Is("role", "system") { 152 | // newMessages = append(newMessages, coze.Message{ 153 | // Role: "system", 154 | // Content: message.GetString("content"), 155 | // }) 156 | // pos++ 157 | // continue 158 | //} 159 | 160 | convertRole, trun := response.ConvertRole(ctx, message.GetString("role")) 161 | contents = append(contents, convertRole+message.GetString("content")+trun) 162 | pos++ 163 | } 164 | 165 | message := strings.Join(contents, "") 166 | tokens += response.CalcTokens(message) 167 | newMessages = append(newMessages, coze.Message{ 168 | Role: "user", 169 | Content: message, 170 | }) 171 | return 172 | } 173 | -------------------------------------------------------------------------------- /relay/llm/coze/toolcall.go: -------------------------------------------------------------------------------- 1 | package coze 2 | 3 | import ( 4 | "net/http" 5 | "strings" 6 | 7 | "chatgpt-adapter/core/common" 8 | "chatgpt-adapter/core/common/toolcall" 9 | "chatgpt-adapter/core/gin/model" 10 | "chatgpt-adapter/core/gin/response" 11 | "chatgpt-adapter/core/logger" 12 | "github.com/bincooo/coze-api" 13 | "github.com/gin-gonic/gin" 14 | ) 15 | 16 | func toolChoice(ctx *gin.Context, cookie, proxies string, completion model.Completion) bool { 17 | logger.Info("completeTools ...") 18 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 19 | message = strings.TrimSpace(message) 20 | system := "" 21 | if strings.HasPrefix(message, "<|system|>") { 22 | index := strings.Index(message, "<|end|>") 23 | system = message[:index+7] 24 | message = strings.TrimSpace(message[index+7:]) 25 | } 26 | 27 | var pMessages []coze.Message 28 | if system != "" { 29 | pMessages = append(pMessages, coze.Message{ 30 | Role: "system", 31 | Content: system, 32 | }) 33 | } 34 | 35 | pMessages = append(pMessages, coze.Message{ 36 | Role: "user", 37 | Content: message, 38 | }) 39 | 40 | co, msToken := extCookie(cookie) 41 | options, mode, err := newOptions(proxies, completion.Model) 42 | if err != nil { 43 | return "", err 44 | } 45 | 46 | chat := coze.New(co, msToken, options) 47 | chat.Session(common.HTTPClient) 48 | 49 | query := "" 50 | if mode == 'w' { 51 | query = pMessages[len(pMessages)-1].Content 52 | chat.WebSdk(chat.TransferMessages(pMessages[:len(pMessages)-1])) 53 | } else { 54 | query = coze.MergeMessages(pMessages) 55 | } 56 | 57 | chatResponse, err := chat.Reply(ctx.Request.Context(), coze.Text, query) 58 | if err != nil { 59 | return "", err 60 | } 61 | 62 | return waitMessage(chatResponse, toolcall.Cancel) 63 | }) 64 | 65 | if err != nil { 66 | errMessage := err.Error() 67 | if strings.Contains(errMessage, "Login verification is invalid") { 68 | logger.Error(err) 69 | response.Error(ctx, http.StatusUnauthorized, errMessage) 70 | return true 71 | } 72 | 73 | logger.Error(err) 74 | response.Error(ctx, -1, errMessage) 75 | return true 76 | } 77 | 78 | return exec 79 | } 80 | -------------------------------------------------------------------------------- /relay/llm/cursor/adapter.go: -------------------------------------------------------------------------------- 1 | package cursor 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | "net/url" 12 | "strings" 13 | ) 14 | 15 | var ( 16 | Model = "cursor" 17 | ) 18 | 19 | type api struct { 20 | inter.BaseAdapter 21 | 22 | env *env.Environment 23 | } 24 | 25 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 26 | if len(model) <= 7 || Model+"/" != model[:7] { 27 | return 28 | } 29 | slice := api.env.GetStringSlice("cursor.model") 30 | for _, mod := range append(slice, []string{ 31 | "claude-3.5-sonnet", 32 | "gpt-4.1", 33 | "gpt-4o", 34 | "claude-3-opus", 35 | "cursor-fast", 36 | "cursor-small", 37 | "gpt-3.5-turbo", 38 | "gpt-4-turbo-2024-04-09", 39 | "gpt-4o-128k", 40 | "gemini-1.5-flash-500k", 41 | "claude-3-haiku-200k", 42 | "claude-3-5-sonnet-200k", 43 | "claude-3-5-sonnet-20241022", 44 | "claude-3.7-sonnet", 45 | "claude-3.7-sonnet-max", 46 | "claude-3.7-sonnet-thinking", 47 | "claude-3.7-sonnet-thinking-max", 48 | "gpt-4o-mini", 49 | "o1-mini", 50 | "o1-preview", 51 | "o1", 52 | "claude-3.5-haiku", 53 | "gemini-exp-1206", 54 | "gemini-2.0-flash-thinking-exp", 55 | "gemini-2.0-flash-exp", 56 | "deepseek-v3", 57 | "deepseek-r1", 58 | "o3-mini", 59 | }...) { 60 | if model[7:] == mod { 61 | ok = true 62 | return 63 | } 64 | } 65 | return 66 | } 67 | 68 | func (api *api) Models() (slice []model.Model) { 69 | for _, mod := range append(api.env.GetStringSlice("cursor.model"), []string{ 70 | "claude-3.5-sonnet", 71 | "gpt-4", 72 | "gpt-4o", 73 | "claude-3-opus", 74 | "cursor-fast", 75 | "cursor-small", 76 | "gpt-3.5-turbo", 77 | "gpt-4-turbo-2024-04-09", 78 | "gpt-4o-128k", 79 | "gemini-1.5-flash-500k", 80 | "claude-3-haiku-200k", 81 | "claude-3-5-sonnet-200k", 82 | "claude-3-5-sonnet-20241022", 83 | "claude-3.7-sonnet", 84 | "claude-3.7-sonnet-max", 85 | "claude-3.7-sonnet-thinking", 86 | "claude-3.7-sonnet-thinking-max", 87 | "gpt-4o-mini", 88 | "o1-mini", 89 | "o1-preview", 90 | "o1", 91 | "claude-3.5-haiku", 92 | "gemini-exp-1206", 93 | "gemini-2.0-flash-thinking-exp", 94 | "gemini-2.0-flash-exp", 95 | "deepseek-v3", 96 | "deepseek-r1", 97 | "o3-mini", 98 | }...) { 99 | slice = append(slice, model.Model{ 100 | Id: Model + "/" + mod, 101 | Object: "model", 102 | Created: 1686935002, 103 | By: Model + "-adapter", 104 | }) 105 | } 106 | return 107 | } 108 | 109 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 110 | var ( 111 | cookie = ctx.GetString("token") 112 | completion = common.GetGinCompletion(ctx) 113 | ) 114 | 115 | if toolChoice(ctx, api.env, cookie, completion) { 116 | ok = true 117 | } 118 | return 119 | } 120 | 121 | func (api *api) Completion(ctx *gin.Context) (err error) { 122 | var ( 123 | cookie = ctx.GetString("token") 124 | completion = common.GetGinCompletion(ctx) 125 | ) 126 | 127 | cookie, err = url.QueryUnescape(cookie) 128 | if err != nil { 129 | return 130 | } 131 | 132 | if strings.Contains(cookie, "::") { 133 | cookie = strings.Split(cookie, "::")[1] 134 | } 135 | 136 | buffer, err := convertRequest(completion) 137 | if err != nil { 138 | return 139 | } 140 | 141 | r, err := fetch(ctx, api.env, cookie, buffer) 142 | if err != nil { 143 | logger.Error(err) 144 | return 145 | } 146 | 147 | content := waitResponse(ctx, r, completion.Stream) 148 | if content == "" && response.NotResponse(ctx) { 149 | response.Error(ctx, -1, "EMPTY RESPONSE") 150 | } 151 | return 152 | } 153 | -------------------------------------------------------------------------------- /relay/llm/cursor/ctor.go: -------------------------------------------------------------------------------- 1 | package cursor 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "cursor-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/cursor/message.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package cursor; 3 | option go_package = "./cursor"; 4 | 5 | message BidiAppend { 6 | 7 | message SessionKey { 8 | string value = 1; 9 | } 10 | 11 | string chunk = 1; 12 | SessionKey sessionKey = 2; 13 | } 14 | 15 | message StreamUnified { 16 | string value = 1; 17 | } 18 | 19 | message ChatMessage { 20 | 21 | message Content { 22 | message Message { 23 | string value = 1; 24 | uint32 unknown_field2 = 2; 25 | string uid = 13; 26 | uint32 unknown_field29 = 29; 27 | uint32 role = 47; 28 | optional string empty51 = 51; 29 | } 30 | 31 | message Model { 32 | string value = 1; 33 | optional string empty4 = 4; 34 | } 35 | 36 | message Unknown_field15 { 37 | message Unknown_field6 { 38 | optional string empty1 = 1; 39 | optional string empty2 = 2; 40 | } 41 | 42 | optional string empty3 = 3; 43 | Unknown_field6 unknown_field6 = 6; 44 | uint32 unknown_field8 = 8; 45 | uint32 unknown_field9 = 9; 46 | } 47 | 48 | message Info { 49 | string os = 1; 50 | string arch = 2; 51 | string version = 3; 52 | string bash = 4; 53 | string date = 5; 54 | } 55 | 56 | message Unknown_field30 { 57 | string uuid = 1; 58 | uint32 unknown_field3 = 3; 59 | } 60 | 61 | repeated Message messages = 1; 62 | uint32 unknown_field2 = 2; 63 | optional string empty3 = 3; 64 | uint32 unknown_field4 = 4; 65 | Model model = 5; 66 | Unknown_field15 unknown_field15 = 15; 67 | uint32 unknown_field19 = 19; 68 | string uid = 23; 69 | Info info = 26; 70 | uint32 unknown_field27 = 27; 71 | optional string empty29 = 29; 72 | Unknown_field30 unknown_field30 = 30; 73 | optional uint32 unknown_field35 = 35; 74 | optional uint32 unknown_field38 = 38; 75 | uint32 unknown_field46 = 46; // 2 76 | optional string empty47 = 47; 77 | optional uint32 unknown_field48 = 48; 78 | optional uint32 unknown_field49 = 49; 79 | optional uint32 unknown_field51 = 51; 80 | optional uint32 unknown_field53 = 53; 81 | string agent = 54; 82 | } 83 | 84 | Content content = 1; 85 | } 86 | 87 | message ResMessage { 88 | message Msg { 89 | string value = 1; 90 | } 91 | 92 | Msg msg = 2; 93 | } -------------------------------------------------------------------------------- /relay/llm/cursor/toolcall.go: -------------------------------------------------------------------------------- 1 | package cursor 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | 23 | completion.Messages = []model.Keyv[interface{}]{ 24 | { 25 | "role": "user", 26 | "content": message, 27 | }, 28 | } 29 | messageBuffer, err := convertRequest(completion) 30 | if err != nil { 31 | return "", err 32 | } 33 | 34 | r, err := fetch(ctx, env, cookie, messageBuffer) 35 | if err != nil { 36 | return "", err 37 | } 38 | 39 | return waitMessage(r, toolcall.Cancel) 40 | }) 41 | 42 | if err != nil { 43 | logger.Error(err) 44 | response.Error(ctx, -1, err) 45 | return true 46 | } 47 | 48 | return exec 49 | } 50 | -------------------------------------------------------------------------------- /relay/llm/deepseek/adapter.go: -------------------------------------------------------------------------------- 1 | package deepseek 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | var ( 14 | Model = "deepseek" 15 | ) 16 | 17 | type api struct { 18 | inter.BaseAdapter 19 | 20 | env *env.Environment 21 | } 22 | 23 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 24 | if len(model) <= 9 || Model+"-" != model[:9] { 25 | return 26 | } 27 | 28 | ok = model[9:] == "chat" || model[9:] == "reasoner" 29 | return 30 | } 31 | 32 | func (api *api) Models() (slice []model.Model) { 33 | slice = append(slice, 34 | model.Model{ 35 | Id: Model + "-chat", 36 | Object: "model", 37 | Created: 1686935002, 38 | By: Model + "-adapter", 39 | }, model.Model{ 40 | Id: Model + "-reasoner", 41 | Object: "model", 42 | Created: 1686935002, 43 | By: Model + "-adapter", 44 | }) 45 | return 46 | } 47 | 48 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 49 | var ( 50 | cookie = ctx.GetString("token") 51 | proxied = api.env.GetString("server.proxied") 52 | completion = common.GetGinCompletion(ctx) 53 | ) 54 | 55 | if toolChoice(ctx, api.env, cookie, proxied, completion) { 56 | ok = true 57 | } 58 | return 59 | } 60 | 61 | func (api *api) Completion(ctx *gin.Context) (err error) { 62 | var ( 63 | cookie = ctx.GetString("token") 64 | proxied = api.env.GetString("server.proxied") 65 | completion = common.GetGinCompletion(ctx) 66 | ) 67 | 68 | request, err := convertRequest(ctx, api.env, completion) 69 | if err != nil { 70 | logger.Error(err) 71 | return 72 | } 73 | 74 | r, err := fetch(ctx.Request.Context(), proxied, cookie, request) 75 | if err != nil { 76 | logger.Error(err) 77 | return 78 | } 79 | 80 | defer deleteSession(ctx, api.env, request.ChatSessionId) 81 | content := waitResponse(ctx, r, completion.Stream) 82 | if content == "" && response.NotResponse(ctx) { 83 | response.Error(ctx, -1, "EMPTY RESPONSE") 84 | } 85 | return 86 | } 87 | -------------------------------------------------------------------------------- /relay/llm/deepseek/ctor.go: -------------------------------------------------------------------------------- 1 | package deepseek 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "deepseek-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/deepseek/message.go: -------------------------------------------------------------------------------- 1 | package deepseek 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "chatgpt-adapter/core/gin/model" 7 | "encoding/json" 8 | "github.com/iocgo/sdk/env" 9 | "io" 10 | "net/http" 11 | "sync" 12 | "time" 13 | 14 | "chatgpt-adapter/core/common" 15 | "chatgpt-adapter/core/common/vars" 16 | "chatgpt-adapter/core/gin/response" 17 | "chatgpt-adapter/core/logger" 18 | "github.com/gin-gonic/gin" 19 | ) 20 | 21 | const ( 22 | ginTokens = "__tokens__" 23 | ) 24 | 25 | func waitMessage(r *http.Response, cancel func(str string) bool) (content string, err error) { 26 | defer r.Body.Close() 27 | reader := bufio.NewReader(r.Body) 28 | var dataBytes []byte 29 | for { 30 | dataBytes, _, err = reader.ReadLine() 31 | if err == io.EOF { 32 | break 33 | } 34 | 35 | if err != nil { 36 | return 37 | } 38 | 39 | var res model.Response 40 | if bytes.HasPrefix(dataBytes, []byte("data: ")) { 41 | dataBytes = dataBytes[6:] 42 | } 43 | if len(dataBytes) == 0 { 44 | continue 45 | } 46 | 47 | err = json.Unmarshal(dataBytes, &res) 48 | if err != nil { 49 | logger.Warn(err) 50 | continue 51 | } 52 | 53 | if len(res.Choices) == 0 { 54 | continue 55 | } 56 | 57 | if res.Choices[0].FinishReason != nil && *res.Choices[0].FinishReason == "stop" { 58 | break 59 | } 60 | 61 | delta := res.Choices[0].Delta 62 | if delta.Type == "thinking" { 63 | continue 64 | } 65 | 66 | raw := delta.Content 67 | logger.Debug("----- raw -----") 68 | logger.Debug(raw) 69 | content += raw 70 | if cancel != nil && cancel(content) { 71 | return content, nil 72 | } 73 | } 74 | return 75 | } 76 | 77 | func waitResponse(ctx *gin.Context, r *http.Response, sse bool) (content string) { 78 | created := time.Now().Unix() 79 | logger.Infof("waitResponse ...") 80 | tokens := ctx.GetInt(ginTokens) 81 | thinkReason := env.Env.GetBool("server.think_reason") 82 | reasoningContent := "" 83 | 84 | onceExec := sync.OnceFunc(func() { 85 | if !sse { 86 | ctx.Writer.WriteHeader(http.StatusOK) 87 | } 88 | }) 89 | 90 | var ( 91 | matchers = common.GetGinMatchers(ctx) 92 | ) 93 | 94 | defer r.Body.Close() 95 | reader := bufio.NewReader(r.Body) 96 | think := 0 97 | for { 98 | dataBytes, _, err := reader.ReadLine() 99 | if err == io.EOF { 100 | raw := response.ExecMatchers(matchers, "", true) 101 | if raw != "" && sse { 102 | response.SSEResponse(ctx, Model, raw, created) 103 | } 104 | content += raw 105 | break 106 | } 107 | 108 | if asError(ctx, err) { 109 | return 110 | } 111 | 112 | var res model.Response 113 | if bytes.HasPrefix(dataBytes, []byte("data: ")) { 114 | dataBytes = dataBytes[6:] 115 | } 116 | if len(dataBytes) == 0 { 117 | continue 118 | } 119 | 120 | err = json.Unmarshal(dataBytes, &res) 121 | if err != nil { 122 | logger.Warn(err) 123 | continue 124 | } 125 | 126 | if len(res.Choices) == 0 { 127 | continue 128 | } 129 | 130 | if res.Choices[0].FinishReason != nil && *res.Choices[0].FinishReason == "stop" { 131 | break 132 | } 133 | 134 | delta := res.Choices[0].Delta 135 | if delta.Type == "thinking" { 136 | if thinkReason { 137 | delta.ReasoningContent = delta.Content 138 | reasoningContent += delta.Content 139 | delta.Content = "" 140 | think = 1 141 | } else if think == 0 { 142 | think = 1 143 | delta.Content = "\n" + delta.Content 144 | } 145 | } else { 146 | if thinkReason { 147 | think = 2 148 | } else if think == 1 { 149 | think = 2 150 | delta.Content = "\n\n" + delta.Content 151 | } 152 | } 153 | 154 | raw := delta.Content 155 | if thinkReason && think == 1 { 156 | logger.Debug("----- think raw -----") 157 | logger.Debug(delta.ReasoningContent) 158 | goto label 159 | } 160 | 161 | logger.Debug("----- raw -----") 162 | logger.Debug(raw) 163 | onceExec() 164 | 165 | raw = response.ExecMatchers(matchers, raw, false) 166 | if len(raw) == 0 { 167 | continue 168 | } 169 | 170 | label: 171 | if raw == response.EOF { 172 | break 173 | } 174 | 175 | if sse { 176 | response.ReasonSSEResponse(ctx, Model, raw, delta.ReasoningContent, created) 177 | } 178 | content += raw 179 | } 180 | 181 | if content == "" && response.NotSSEHeader(ctx) { 182 | return 183 | } 184 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(reasoningContent+content, tokens)) 185 | if !sse { 186 | response.ReasonResponse(ctx, Model, content, reasoningContent) 187 | } else { 188 | response.SSEResponse(ctx, Model, "[DONE]", created) 189 | } 190 | return 191 | } 192 | 193 | func asError(ctx *gin.Context, err error) (ok bool) { 194 | if err == nil { 195 | return 196 | } 197 | 198 | logger.Error(err) 199 | if response.NotSSEHeader(ctx) { 200 | response.Error(ctx, -1, err) 201 | } 202 | ok = true 203 | return 204 | } 205 | -------------------------------------------------------------------------------- /relay/llm/deepseek/sha3_wasm_bg.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bincooo/chatgpt-adapter/30fffc3c2756b805e4b7ea306d4c4233d90fde0f/relay/llm/deepseek/sha3_wasm_bg.wasm -------------------------------------------------------------------------------- /relay/llm/deepseek/toolcall.go: -------------------------------------------------------------------------------- 1 | package deepseek 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, proxies, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | completion.Messages = []model.Keyv[interface{}]{ 23 | { 24 | "role": "user", 25 | "content": message, 26 | }, 27 | } 28 | 29 | request, err := convertRequest(ctx, env, completion) 30 | if err != nil { 31 | return "", err 32 | } 33 | 34 | r, err := fetch(ctx.Request.Context(), proxies, cookie, request) 35 | if err != nil { 36 | return "", err 37 | } 38 | 39 | defer deleteSession(ctx, env, request.ChatSessionId) 40 | return waitMessage(r, toolcall.Cancel) 41 | }) 42 | 43 | if err != nil { 44 | logger.Error(err) 45 | response.Error(ctx, -1, err) 46 | return true 47 | } 48 | 49 | return exec 50 | } 51 | -------------------------------------------------------------------------------- /relay/llm/grok/adapter.go: -------------------------------------------------------------------------------- 1 | package grok 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | var ( 14 | Model = "grok" 15 | ) 16 | 17 | type api struct { 18 | inter.BaseAdapter 19 | 20 | env *env.Environment 21 | } 22 | 23 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 24 | if len(model) <= 4 { 25 | return 26 | } 27 | 28 | ok = Model+"-2" == model || Model+"-3" == model 29 | return 30 | } 31 | 32 | func (api *api) Models() (slice []model.Model) { 33 | slice = append(slice, 34 | model.Model{ 35 | Id: Model + "-2", 36 | Object: "model", 37 | Created: 1686935002, 38 | By: Model + "-adapter", 39 | }, model.Model{ 40 | Id: Model + "-3", 41 | Object: "model", 42 | Created: 1686935002, 43 | By: Model + "-adapter", 44 | }) 45 | return 46 | } 47 | 48 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 49 | var ( 50 | cookie = ctx.GetString("token") 51 | proxied = api.env.GetString("server.proxied") 52 | completion = common.GetGinCompletion(ctx) 53 | ) 54 | 55 | if toolChoice(ctx, api.env, cookie, proxied, completion) { 56 | ok = true 57 | } 58 | return 59 | } 60 | 61 | func (api *api) Completion(ctx *gin.Context) (err error) { 62 | var ( 63 | cookie = ctx.GetString("token") 64 | proxied = api.env.GetString("server.proxied") 65 | completion = common.GetGinCompletion(ctx) 66 | ) 67 | 68 | request, err := convertRequest(ctx, api.env, completion) 69 | if err != nil { 70 | logger.Error(err) 71 | return 72 | } 73 | 74 | r, err := fetch(ctx, proxied, cookie, request) 75 | if err != nil { 76 | logger.Error(err) 77 | return 78 | } 79 | 80 | content := waitResponse(ctx, r, completion.Stream) 81 | if content == "" && response.NotResponse(ctx) { 82 | response.Error(ctx, -1, "EMPTY RESPONSE") 83 | } 84 | return 85 | } 86 | -------------------------------------------------------------------------------- /relay/llm/grok/ctor.go: -------------------------------------------------------------------------------- 1 | package grok 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "grok-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/grok/fetch.go: -------------------------------------------------------------------------------- 1 | package grok 2 | 3 | import ( 4 | "bytes" 5 | "net/http" 6 | "strings" 7 | 8 | "chatgpt-adapter/core/common" 9 | "chatgpt-adapter/core/gin/model" 10 | "chatgpt-adapter/core/gin/response" 11 | "github.com/bincooo/emit.io" 12 | "github.com/gin-gonic/gin" 13 | "github.com/google/uuid" 14 | "github.com/iocgo/sdk/env" 15 | ) 16 | 17 | var ( 18 | userAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.15" 19 | ) 20 | 21 | type grokRequest struct { 22 | Temporary bool `json:"temporary"` 23 | ModelName string `json:"modelName"` 24 | Message string `json:"message"` 25 | FileAttachments []interface{} `json:"fileAttachments"` 26 | ImageAttachments []interface{} `json:"imageAttachments"` 27 | DisableSearch bool `json:"disableSearch"` 28 | EnableImageGeneration bool `json:"enableImageGeneration"` 29 | ReturnImageBytes bool `json:"returnImageBytes"` 30 | ReturnRawGrokInXaiRequest bool `json:"returnRawGrokInXaiRequest"` 31 | EnableImageStreaming bool `json:"enableImageStreaming"` 32 | ImageGenerationCount int `json:"imageGenerationCount"` 33 | ForceConcise bool `json:"forceConcise"` 34 | ToolOverrides struct{} `json:"toolOverrides"` 35 | EnableSideBySide bool `json:"enableSideBySide"` 36 | IsPreset bool `json:"isPreset"` 37 | SendFinalMetadata bool `json:"sendFinalMetadata"` 38 | CustomInstructions string `json:"customInstructions"` 39 | DeepsearchPreset string `json:"deepsearchPreset"` 40 | IsReasoning bool `json:"isReasoning"` 41 | } 42 | 43 | func fetch(ctx *gin.Context, proxied, cookie string, request grokRequest) (response *http.Response, err error) { 44 | ua := ctx.GetString("userAgent") 45 | lang := ctx.GetString("lang") 46 | response, err = emit.ClientBuilder(common.HTTPClient). 47 | Context(ctx.Request.Context()). 48 | Proxies(proxied). 49 | POST("https://grok.com/rest/app-chat/conversations/new"). 50 | JSONHeader(). 51 | Header("origin", "https://grok.com"). 52 | Header("referer", "https://grok.com/"). 53 | Header("baggage", "sentry-environment=production,sentry-release="+common.Hex(21)+",sentry-public_key="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-trace_id="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-replay_id="+strings.ReplaceAll(uuid.NewString(), "-", "")+",sentry-sample_rate=1,sentry-sampled=true"). 54 | Header("sentry-trace", strings.ReplaceAll(uuid.NewString(), "-", "")+"-"+common.Hex(16)+"-1"). 55 | Header("accept-language", elseOf(lang == "", "en-US,en;q=0.9", lang)). 56 | Header("user-agent", elseOf(ua == "", userAgent, ua)). 57 | Header("cookie", emit.MergeCookies(cookie, ctx.GetString("clearance"))). 58 | Body(request). 59 | DoC(emit.Status(http.StatusOK), emit.IsJSON) 60 | return 61 | } 62 | 63 | func convertRequest(ctx *gin.Context, env *env.Environment, completion model.Completion) (request grokRequest, err error) { 64 | contentBuffer := new(bytes.Buffer) 65 | customInstructions := "" 66 | 67 | if len(completion.Messages) == 1 { 68 | contentBuffer.WriteString(completion.Messages[0].GetString("content")) 69 | goto label 70 | } 71 | 72 | for idx, message := range completion.Messages { 73 | if idx == 0 && message.Is("role", "system") { 74 | customInstructions = message.GetString("content") 75 | continue 76 | } 77 | role, end := response.ConvertRole(ctx, message.GetString("role")) 78 | contentBuffer.WriteString(role) 79 | contentBuffer.WriteString(message.GetString("content")) 80 | contentBuffer.WriteString(end) 81 | } 82 | 83 | label: 84 | request = grokRequest{ 85 | Temporary: true, 86 | ModelName: completion.Model, 87 | FileAttachments: make([]interface{}, 0), 88 | ImageAttachments: make([]interface{}, 0), 89 | EnableImageGeneration: true, 90 | ReturnImageBytes: false, 91 | ReturnRawGrokInXaiRequest: false, 92 | EnableImageStreaming: true, 93 | ImageGenerationCount: 1, 94 | ForceConcise: false, 95 | ToolOverrides: struct{}{}, 96 | EnableSideBySide: true, 97 | IsPreset: false, 98 | SendFinalMetadata: true, 99 | DeepsearchPreset: "", 100 | CustomInstructions: customInstructions, 101 | Message: contentBuffer.String(), 102 | DisableSearch: env.GetBool("grok.disable_search"), 103 | IsReasoning: env.GetBool("grok.think_reason"), 104 | } 105 | return 106 | } 107 | 108 | func elseOf[T any](condition bool, t1, t2 T) T { 109 | if condition { 110 | return t1 111 | } 112 | return t2 113 | } 114 | -------------------------------------------------------------------------------- /relay/llm/grok/message.go: -------------------------------------------------------------------------------- 1 | package grok 2 | 3 | import ( 4 | "bufio" 5 | "encoding/json" 6 | "io" 7 | "net/http" 8 | "sync" 9 | "time" 10 | 11 | "chatgpt-adapter/core/common" 12 | "chatgpt-adapter/core/common/vars" 13 | "chatgpt-adapter/core/gin/response" 14 | "chatgpt-adapter/core/logger" 15 | "github.com/gin-gonic/gin" 16 | "github.com/iocgo/sdk/env" 17 | ) 18 | 19 | const ( 20 | ginTokens = "__tokens__" 21 | ) 22 | 23 | type grokResponse struct { 24 | Result struct { 25 | Response struct { 26 | Token string `json:"token"` 27 | IsThinking bool `json:"isThinking"` 28 | IsSoftStop bool `json:"isSoftStop"` 29 | ResponseId string `json:"responseId"` 30 | Title *struct { 31 | NewTitle string `json:"newTitle"` 32 | } `json:"title,omitempty"` 33 | ModelResponse *map[string]interface{} `json:"modelResponse,omitempty"` 34 | } `json:"response"` 35 | } `json:"result"` 36 | } 37 | 38 | func waitMessage(r *http.Response, cancel func(str string) bool) (content string, err error) { 39 | defer r.Body.Close() 40 | reader := bufio.NewReader(r.Body) 41 | var dataBytes []byte 42 | for { 43 | dataBytes, _, err = reader.ReadLine() 44 | if err == io.EOF { 45 | break 46 | } 47 | 48 | if err != nil { 49 | return 50 | } 51 | 52 | var res grokResponse 53 | if len(dataBytes) == 0 { 54 | continue 55 | } 56 | 57 | err = json.Unmarshal(dataBytes, &res) 58 | if err != nil { 59 | logger.Warn(err) 60 | continue 61 | } 62 | 63 | if res.Result.Response.IsSoftStop { 64 | break 65 | } 66 | 67 | delta := res.Result.Response 68 | if delta.IsThinking { 69 | continue 70 | } 71 | 72 | raw := delta.Token 73 | logger.Debug("----- raw -----") 74 | logger.Debug(raw) 75 | content += raw 76 | if cancel != nil && cancel(content) { 77 | return content, nil 78 | } 79 | } 80 | return 81 | } 82 | 83 | func waitResponse(ctx *gin.Context, r *http.Response, sse bool) (content string) { 84 | created := time.Now().Unix() 85 | logger.Infof("waitResponse ...") 86 | tokens := ctx.GetInt(ginTokens) 87 | thinkReason := env.Env.GetBool("server.think_reason") 88 | reasoningContent := "" 89 | 90 | onceExec := sync.OnceFunc(func() { 91 | if !sse { 92 | ctx.Writer.WriteHeader(http.StatusOK) 93 | } 94 | }) 95 | 96 | var ( 97 | matchers = common.GetGinMatchers(ctx) 98 | ) 99 | 100 | defer r.Body.Close() 101 | reader := bufio.NewReader(r.Body) 102 | think := 0 103 | for { 104 | dataBytes, _, err := reader.ReadLine() 105 | if err == io.EOF { 106 | raw := response.ExecMatchers(matchers, "", true) 107 | if raw != "" && sse { 108 | response.SSEResponse(ctx, Model, raw, created) 109 | } 110 | content += raw 111 | break 112 | } 113 | 114 | if asError(ctx, err) { 115 | return 116 | } 117 | 118 | var res grokResponse 119 | if len(dataBytes) == 0 { 120 | continue 121 | } 122 | 123 | err = json.Unmarshal(dataBytes, &res) 124 | if err != nil { 125 | logger.Warn(err) 126 | continue 127 | } 128 | 129 | if res.Result.Response.IsSoftStop { 130 | break 131 | } 132 | 133 | delta := res.Result.Response 134 | reasonContent := "" 135 | if delta.IsThinking { 136 | if thinkReason { 137 | reasonContent = delta.Token 138 | reasoningContent += delta.Token 139 | delta.Token = "" 140 | think = 1 141 | } else if think == 0 { 142 | think = 1 143 | delta.Token = "\n" + delta.Token 144 | } 145 | } else { 146 | if thinkReason { 147 | think = 2 148 | } else if think == 1 { 149 | think = 2 150 | delta.Token = "\n\n" + delta.Token 151 | } 152 | } 153 | 154 | raw := delta.Token 155 | if thinkReason && think == 1 { 156 | logger.Debug("----- think raw -----") 157 | logger.Debug(reasonContent) 158 | goto label 159 | } 160 | 161 | logger.Debug("----- raw -----") 162 | logger.Debug(raw) 163 | onceExec() 164 | 165 | raw = response.ExecMatchers(matchers, raw, false) 166 | if len(raw) == 0 { 167 | continue 168 | } 169 | 170 | label: 171 | if raw == response.EOF { 172 | break 173 | } 174 | 175 | if sse { 176 | response.ReasonSSEResponse(ctx, Model, raw, reasonContent, created) 177 | } 178 | content += raw 179 | } 180 | 181 | if content == "" && response.NotSSEHeader(ctx) { 182 | return 183 | } 184 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(reasoningContent+content, tokens)) 185 | if !sse { 186 | response.ReasonResponse(ctx, Model, content, reasoningContent) 187 | } else { 188 | response.SSEResponse(ctx, Model, "[DONE]", created) 189 | } 190 | return 191 | } 192 | 193 | func asError(ctx *gin.Context, err error) (ok bool) { 194 | if err == nil { 195 | return 196 | } 197 | 198 | logger.Error(err) 199 | if response.NotSSEHeader(ctx) { 200 | response.Error(ctx, -1, err) 201 | } 202 | ok = true 203 | return 204 | } 205 | -------------------------------------------------------------------------------- /relay/llm/grok/toolcall.go: -------------------------------------------------------------------------------- 1 | package grok 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, proxies, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | completion.Messages = []model.Keyv[interface{}]{ 23 | { 24 | "role": "user", 25 | "content": message, 26 | }, 27 | } 28 | 29 | request, err := convertRequest(ctx, env, completion) 30 | if err != nil { 31 | return "", err 32 | } 33 | 34 | r, err := fetch(ctx, proxies, cookie, request) 35 | if err != nil { 36 | return "", err 37 | } 38 | 39 | return waitMessage(r, toolcall.Cancel) 40 | }) 41 | 42 | if err != nil { 43 | logger.Error(err) 44 | response.Error(ctx, -1, err) 45 | return true 46 | } 47 | 48 | return exec 49 | } 50 | -------------------------------------------------------------------------------- /relay/llm/lmsys/adapter.go: -------------------------------------------------------------------------------- 1 | package lmsys 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | var ( 14 | Model = "lmsys" 15 | 16 | /* 17 | // lmsys 模型导出代码 18 | const lis = $0.querySelectorAll('li') 19 | let result = '' 20 | for (let index = 0, len = lis.length; index < len; index ++) { 21 | result += `"${lis[index].getAttribute('aria-label')}",\n` 22 | } 23 | console.log(`[${result}]`) 24 | */ 25 | modelSlice = []string{ 26 | "chatgpt-4o-latest-20241120", 27 | "gemini-exp-1121", 28 | "gemini-exp-1114", 29 | "chatgpt-4o-latest-20240903", 30 | "gpt-4o-mini-2024-07-18", 31 | "gpt-4o-2024-08-06", 32 | "gpt-4o-2024-05-13", 33 | "claude-3-5-sonnet-20241022", 34 | "claude-3-5-sonnet-20240620", 35 | "grok-2-2024-08-13", 36 | "grok-2-mini-2024-08-13", 37 | "gemini-1.5-pro-002", 38 | "gemini-1.5-flash-002", 39 | "gemini-1.5-flash-8b-001", 40 | "gemini-1.5-pro-001", 41 | "gemini-1.5-flash-001", 42 | "llama-3.1-nemotron-70b-instruct", 43 | "llama-3.1-nemotron-51b-instruct", 44 | "llama-3.2-vision-90b-instruct", 45 | "llama-3.2-vision-11b-instruct", 46 | "llama-3.1-405b-instruct-bf16", 47 | "llama-3.1-405b-instruct-fp8", 48 | "llama-3.1-70b-instruct", 49 | "llama-3.1-8b-instruct", 50 | "llama-3.2-3b-instruct", 51 | "llama-3.2-1b-instruct", 52 | "hunyuan-standard-256k", 53 | "mistral-large-2411", 54 | "pixtral-large-2411", 55 | "mistral-large-2407", 56 | "yi-lightning", 57 | "yi-vision", 58 | "glm-4-plus", 59 | "molmo-72b-0924", 60 | "molmo-7b-d-0924", 61 | "im-also-a-good-gpt2-chatbot", 62 | "im-a-good-gpt2-chatbot", 63 | "jamba-1.5-large", 64 | "jamba-1.5-mini", 65 | "gemma-2-27b-it", 66 | "gemma-2-9b-it", 67 | "gemma-2-2b-it", 68 | "eureka-chatbot", 69 | "claude-3-haiku-20240307", 70 | "claude-3-sonnet-20240229", 71 | "claude-3-opus-20240229", 72 | "deepseek-v2.5", 73 | "nemotron-4-340b", 74 | "llama-3-70b-instruct", 75 | "llama-3-8b-instruct", 76 | "athene-v2-chat", 77 | "qwen2.5-coder-32b-instruct", 78 | "qwen2.5-72b-instruct", 79 | "qwen-max-0919", 80 | "qwen-plus-0828", 81 | "qwen-vl-max-0809", 82 | "gpt-3.5-turbo-0125", 83 | "phi-3-mini-4k-instruct-june-2024", 84 | "reka-core-20240904", 85 | "reka-flash-20240904", 86 | "c4ai-aya-expanse-32b", 87 | "command-r-plus-08-2024", 88 | "command-r-08-2024", 89 | "codestral-2405", 90 | "mixtral-8x22b-instruct-v0.1", 91 | "f1-mini-preview", 92 | "mixtral-8x7b-instruct-v0.1", 93 | "pixtral-12b-2409", 94 | "ministral-8b-2410", 95 | "internvl2-26b", 96 | "qwen2-vl-7b-instruct", 97 | "internvl2-4b", 98 | } 99 | ) 100 | 101 | type api struct { 102 | inter.BaseAdapter 103 | 104 | env *env.Environment 105 | } 106 | 107 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 108 | token := ctx.GetString("token") 109 | if len(model) <= 6 || model[:6] != Model+"/" { 110 | return 111 | } 112 | 113 | slice := api.env.GetStringSlice("lmsys.model") 114 | for _, mod := range append(slice, modelSlice...) { 115 | if model[6:] != mod { 116 | continue 117 | } 118 | 119 | password := api.env.GetString("server.password") 120 | if password != "" && password != token { 121 | err = response.UnauthorizedError 122 | return 123 | } 124 | 125 | ok = true 126 | } 127 | return 128 | } 129 | 130 | func (api *api) Models() (result []model.Model) { 131 | slice := api.env.GetStringSlice("lmsys.model") 132 | for _, mod := range append(slice, modelSlice...) { 133 | result = append(result, model.Model{ 134 | Id: "lmsys/" + mod, 135 | Object: "model", 136 | Created: 1686935002, 137 | By: "lmsys-adapter", 138 | }) 139 | } 140 | return 141 | } 142 | 143 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 144 | var ( 145 | proxied = api.env.GetString("server.proxied") 146 | completion = common.GetGinCompletion(ctx) 147 | ) 148 | 149 | if toolChoice(ctx, api.env, proxied, completion) { 150 | ok = true 151 | } 152 | return 153 | } 154 | 155 | func (api *api) Completion(ctx *gin.Context) (err error) { 156 | var ( 157 | proxied = api.env.GetString("server.proxied") 158 | completion = common.GetGinCompletion(ctx) 159 | ) 160 | 161 | completion.Model = completion.Model[6:] 162 | newMessages, err := mergeMessages(ctx, completion) 163 | if err != nil { 164 | response.Error(ctx, -1, err) 165 | return 166 | } 167 | ctx.Set(ginTokens, response.CalcTokens(newMessages)) 168 | ch, err := fetch(ctx.Request.Context(), api.env, proxied, newMessages, 169 | options{ 170 | model: completion.Model, 171 | temperature: completion.Temperature, 172 | topP: completion.TopP, 173 | maxTokens: completion.MaxTokens, 174 | }) 175 | if err != nil { 176 | logger.Error(err) 177 | return 178 | } 179 | 180 | content := waitResponse(ctx, ch, completion.Stream) 181 | if content == "" && response.NotResponse(ctx) { 182 | response.Error(ctx, -1, "EMPTY RESPONSE") 183 | } 184 | return 185 | } 186 | -------------------------------------------------------------------------------- /relay/llm/lmsys/ctor.go: -------------------------------------------------------------------------------- 1 | package lmsys 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "lmsys-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/lmsys/message.go: -------------------------------------------------------------------------------- 1 | package lmsys 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "errors" 10 | "github.com/gin-gonic/gin" 11 | "net/http" 12 | "strings" 13 | "sync" 14 | "time" 15 | ) 16 | 17 | const ginTokens = "__tokens__" 18 | 19 | func waitMessage(chatResponse chan string, cancel func(str string) bool) (content string, err error) { 20 | 21 | for { 22 | message, ok := <-chatResponse 23 | if !ok { 24 | break 25 | } 26 | 27 | if strings.HasPrefix(message, "error: ") { 28 | return "", errors.New(strings.TrimPrefix(message, "error: ")) 29 | } 30 | 31 | message = strings.TrimPrefix(message, "text: ") 32 | logger.Debug("----- raw -----") 33 | logger.Debug(message) 34 | if len(message) > 0 { 35 | content += message 36 | if cancel != nil && cancel(content) { 37 | return content, nil 38 | } 39 | } 40 | } 41 | 42 | return content, nil 43 | } 44 | 45 | func waitResponse(ctx *gin.Context, chatResponse chan string, sse bool) (content string) { 46 | created := time.Now().Unix() 47 | logger.Info("waitResponse ...") 48 | tokens := ctx.GetInt(ginTokens) 49 | matchers := common.GetGinMatchers(ctx) 50 | onceExec := sync.OnceFunc(func() { 51 | if !sse { 52 | ctx.Writer.WriteHeader(http.StatusOK) 53 | } 54 | }) 55 | 56 | for { 57 | raw, ok := <-chatResponse 58 | if !ok { 59 | raw = response.ExecMatchers(matchers, "", true) 60 | if raw != "" && sse { 61 | response.SSEResponse(ctx, Model, raw, created) 62 | } 63 | content += raw 64 | break 65 | } 66 | 67 | if strings.HasPrefix(raw, "error: ") { 68 | err := strings.TrimPrefix(raw, "error: ") 69 | logger.Error(err) 70 | if response.NotSSEHeader(ctx) { 71 | logger.Error(err) 72 | response.Error(ctx, -1, err) 73 | } 74 | return 75 | } 76 | 77 | raw = strings.TrimPrefix(raw, "text: ") 78 | contentL := len(raw) 79 | if contentL <= 0 { 80 | continue 81 | } 82 | 83 | logger.Debug("----- raw -----") 84 | logger.Debug(raw) 85 | onceExec() 86 | 87 | raw = response.ExecMatchers(matchers, raw, false) 88 | if len(raw) == 0 { 89 | continue 90 | } 91 | 92 | if raw == response.EOF { 93 | break 94 | } 95 | 96 | if sse && len(raw) > 0 { 97 | response.SSEResponse(ctx, Model, raw, created) 98 | } 99 | content += raw 100 | } 101 | 102 | if content == "" && response.NotSSEHeader(ctx) { 103 | return 104 | } 105 | 106 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(content, tokens)) 107 | if !sse { 108 | response.Response(ctx, Model, content) 109 | } else { 110 | response.SSEResponse(ctx, Model, "[DONE]", created) 111 | } 112 | return 113 | } 114 | 115 | func mergeMessages(ctx *gin.Context, completion model.Completion) (newMessages string, err error) { 116 | var ( 117 | messages = completion.Messages 118 | specialized = ctx.GetBool("specialized") 119 | isC = response.IsClaude(ctx, completion.Model) 120 | ) 121 | 122 | messageL := len(messages) 123 | if specialized && isC && messageL == 1 { 124 | newMessages = messages[0].GetString("content") 125 | return 126 | } 127 | 128 | var ( 129 | pos = 0 130 | contents []string 131 | ) 132 | 133 | for { 134 | if pos > messageL-1 { 135 | break 136 | } 137 | 138 | message := messages[pos] 139 | role, end := response.ConvertRole(ctx, message.GetString("role")) 140 | contents = append(contents, role+message.GetString("content")+end) 141 | pos++ 142 | } 143 | 144 | newMessages = strings.Join(contents, "") 145 | if strings.HasSuffix(newMessages, "<|end|>\n\n") { 146 | newMessages = newMessages[:len(newMessages)-9] 147 | } 148 | return 149 | } 150 | -------------------------------------------------------------------------------- /relay/llm/lmsys/toolcall.go: -------------------------------------------------------------------------------- 1 | package lmsys 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, proxies string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | 23 | ch, err := fetch(ctx.Request.Context(), env, proxies, message, 24 | options{ 25 | model: completion.Model, 26 | temperature: completion.Temperature, 27 | topP: completion.TopP, 28 | maxTokens: completion.MaxTokens, 29 | }) 30 | if err != nil { 31 | return "", err 32 | } 33 | 34 | return waitMessage(ch, toolcall.Cancel) 35 | }) 36 | 37 | if err != nil { 38 | logger.Error(err) 39 | response.Error(ctx, -1, err) 40 | return true 41 | } 42 | 43 | return exec 44 | } 45 | -------------------------------------------------------------------------------- /relay/llm/qodo/adapter.go: -------------------------------------------------------------------------------- 1 | package qodo 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/gin/inter" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | var ( 14 | Model = "qodo" 15 | ) 16 | 17 | type api struct { 18 | inter.BaseAdapter 19 | 20 | env *env.Environment 21 | } 22 | 23 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 24 | if len(model) <= 5 { 25 | return 26 | } 27 | 28 | slice := api.env.GetStringSlice("qodo.model") 29 | for _, mod := range append(slice, []string{ 30 | "claude-3-5-sonnet", 31 | "claude-3-7-sonnet", 32 | "gpt-4o", 33 | "o1", 34 | "o3-mini", 35 | "o3-mini-high", 36 | "gemini-2.0-flash", 37 | "deepseek-r1", 38 | "deepseek-r1-full", 39 | }...) { 40 | if model[5:] == mod { 41 | ok = true 42 | return 43 | } 44 | } 45 | return 46 | } 47 | 48 | func (api *api) Models() (slice []model.Model) { 49 | for _, mod := range append(api.env.GetStringSlice("qodo.model"), []string{ 50 | "claude-3-5-sonnet", 51 | "claude-3-7-sonnet", 52 | "gpt-4o", 53 | "o1", 54 | "o3-mini", 55 | "o3-mini-high", 56 | "gemini-2.0-flash", 57 | "deepseek-r1", 58 | "deepseek-r1-full", 59 | }...) { 60 | slice = append(slice, model.Model{ 61 | Id: Model + "/" + mod, 62 | Object: "model", 63 | Created: 1686935002, 64 | By: Model + "-adapter", 65 | }) 66 | } 67 | return 68 | } 69 | 70 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 71 | var ( 72 | cookie = ctx.GetString("token") 73 | proxied = api.env.GetString("server.proxied") 74 | completion = common.GetGinCompletion(ctx) 75 | ) 76 | 77 | if toolChoice(ctx, api.env, cookie, proxied, completion) { 78 | ok = true 79 | } 80 | return 81 | } 82 | 83 | func (api *api) Completion(ctx *gin.Context) (err error) { 84 | var ( 85 | proxied = api.env.GetString("server.proxied") 86 | completion = common.GetGinCompletion(ctx) 87 | ) 88 | 89 | request, err := convertRequest(ctx, api.env, completion) 90 | if err != nil { 91 | logger.Error(err) 92 | return 93 | } 94 | 95 | r, err := fetch(ctx, proxied, request) 96 | if err != nil { 97 | logger.Error(err) 98 | return 99 | } 100 | 101 | content := waitResponse(ctx, r, completion.Stream) 102 | if content == "" && response.NotResponse(ctx) { 103 | response.Error(ctx, -1, "EMPTY RESPONSE") 104 | } 105 | return 106 | } 107 | -------------------------------------------------------------------------------- /relay/llm/qodo/ctor.go: -------------------------------------------------------------------------------- 1 | package qodo 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "qodo-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/qodo/message.go: -------------------------------------------------------------------------------- 1 | package qodo 2 | 3 | import ( 4 | "bufio" 5 | "chatgpt-adapter/core/gin/model" 6 | "encoding/json" 7 | "errors" 8 | "io" 9 | "net/http" 10 | "strings" 11 | "sync" 12 | "time" 13 | 14 | "chatgpt-adapter/core/common" 15 | "chatgpt-adapter/core/common/vars" 16 | "chatgpt-adapter/core/gin/response" 17 | "chatgpt-adapter/core/logger" 18 | "github.com/gin-gonic/gin" 19 | "github.com/iocgo/sdk/env" 20 | ) 21 | 22 | const ( 23 | ginTokens = "__tokens__" 24 | ) 25 | 26 | type qodoResponse struct { 27 | SessionId string `json:"session_id"` 28 | Data struct { 29 | Message string `json:"message"` 30 | ErrorCode string `json:"error_code"` 31 | Tool string `json:"tool"` 32 | ToolArgs struct { 33 | Data string `json:"data"` 34 | } `json:"tool_args"` 35 | } `json:"data"` 36 | Type string `json:"type"` 37 | SubType string `json:"sub_type"` 38 | } 39 | 40 | func waitMessage(r *http.Response, cancel func(str string) bool) (content string, err error) { 41 | defer r.Body.Close() 42 | reader := bufio.NewReader(r.Body) 43 | var dataBytes []byte 44 | for { 45 | dataBytes, _, err = reader.ReadLine() 46 | if err == io.EOF { 47 | break 48 | } 49 | 50 | if err != nil { 51 | return 52 | } 53 | 54 | var res qodoResponse 55 | if len(dataBytes) == 0 { 56 | continue 57 | } 58 | 59 | err = json.Unmarshal(dataBytes, &res) 60 | if err != nil { 61 | logger.Warn(err) 62 | continue 63 | } 64 | 65 | delta := res.Data.ToolArgs 66 | if delta.Data == "" { 67 | continue 68 | } 69 | 70 | var obj model.Keyv[interface{}] 71 | if err = json.Unmarshal([]byte(delta.Data), &obj); err != nil { 72 | logger.Warn(err) 73 | continue 74 | } 75 | 76 | obj = obj.GetKeyv("data") 77 | if obj == nil { 78 | continue 79 | } 80 | 81 | if obj.GetString("title") != "Chat" { 82 | continue 83 | } 84 | 85 | raw := obj.GetString("content") 86 | logger.Debug("----- raw -----") 87 | logger.Debug(raw) 88 | content += raw 89 | if cancel != nil && cancel(content) { 90 | return content, nil 91 | } 92 | } 93 | return 94 | } 95 | 96 | func waitResponse(ctx *gin.Context, r *http.Response, sse bool) (content string) { 97 | created := time.Now().Unix() 98 | logger.Infof("waitResponse ...") 99 | tokens := ctx.GetInt(ginTokens) 100 | thinkReason := env.Env.GetBool("server.think_reason") 101 | reasoningContent := "" 102 | 103 | onceExec := sync.OnceFunc(func() { 104 | if !sse { 105 | ctx.Writer.WriteHeader(http.StatusOK) 106 | } 107 | }) 108 | 109 | var ( 110 | matchers = common.GetGinMatchers(ctx) 111 | ) 112 | 113 | //matchers = addUnpackMatcher(env.Env, matchers) 114 | 115 | defer r.Body.Close() 116 | reader := bufio.NewReader(r.Body) 117 | think := 0 118 | for { 119 | dataBytes, _, err := reader.ReadLine() 120 | if err == io.EOF { 121 | raw := response.ExecMatchers(matchers, "", true) 122 | if raw != "" && sse { 123 | response.SSEResponse(ctx, Model, raw, created) 124 | } 125 | content += raw 126 | break 127 | } 128 | 129 | if asError(ctx, err) { 130 | return 131 | } 132 | 133 | var res qodoResponse 134 | if len(dataBytes) == 0 { 135 | continue 136 | } 137 | 138 | err = json.Unmarshal(dataBytes, &res) 139 | if err != nil { 140 | logger.Warn(err) 141 | continue 142 | } 143 | 144 | if res.Data.ErrorCode != "" { 145 | asError(ctx, errors.New(res.Data.Message)) 146 | return 147 | } 148 | 149 | if res.SubType == "code_implementation_end" { 150 | continue 151 | } 152 | 153 | reasonContent := "" 154 | delta := res.Data.ToolArgs 155 | if delta.Data == "" { 156 | continue 157 | } 158 | 159 | var obj model.Keyv[interface{}] 160 | if err = json.Unmarshal([]byte(delta.Data), &obj); err != nil { 161 | logger.Warn(err) 162 | continue 163 | } 164 | 165 | obj = obj.GetKeyv("data") 166 | if obj == nil { 167 | continue 168 | } 169 | 170 | if obj.GetString("title") != "Chat" { 171 | continue 172 | } 173 | 174 | raw := obj.GetString("content") 175 | if thinkReason && think == 0 { 176 | if strings.HasPrefix(raw, "") { 177 | reasonContent = raw[7:] 178 | raw = "" 179 | think = 1 180 | } 181 | } 182 | 183 | if thinkReason && think == 1 { 184 | reasonContent = raw 185 | if strings.HasPrefix(raw, "") { 186 | reasonContent = "" 187 | think = 2 188 | } 189 | 190 | raw = "" 191 | logger.Debug("----- think raw -----") 192 | logger.Debug(reasonContent) 193 | reasoningContent += reasonContent 194 | goto label 195 | } 196 | 197 | logger.Debug("----- raw -----") 198 | logger.Debug(raw) 199 | onceExec() 200 | 201 | raw = response.ExecMatchers(matchers, raw, false) 202 | if len(raw) == 0 { 203 | continue 204 | } 205 | 206 | label: 207 | if raw == response.EOF { 208 | break 209 | } 210 | 211 | if sse { 212 | response.ReasonSSEResponse(ctx, Model, raw, reasonContent, created) 213 | } 214 | content += raw 215 | } 216 | 217 | if content == "" && response.NotSSEHeader(ctx) { 218 | return 219 | } 220 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(reasoningContent+content, tokens)) 221 | if !sse { 222 | response.ReasonResponse(ctx, Model, content, reasoningContent) 223 | } else { 224 | response.SSEResponse(ctx, Model, "[DONE]", created) 225 | } 226 | return 227 | } 228 | 229 | func asError(ctx *gin.Context, err error) (ok bool) { 230 | if err == nil { 231 | return 232 | } 233 | 234 | logger.Error(err) 235 | if response.NotSSEHeader(ctx) { 236 | response.Error(ctx, -1, err) 237 | } 238 | ok = true 239 | return 240 | } 241 | -------------------------------------------------------------------------------- /relay/llm/qodo/toolcall.go: -------------------------------------------------------------------------------- 1 | package qodo 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, proxies, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | completion.Messages = []model.Keyv[interface{}]{ 23 | { 24 | "role": "user", 25 | "content": message, 26 | }, 27 | } 28 | 29 | request, err := convertRequest(ctx, env, completion) 30 | if err != nil { 31 | return "", err 32 | } 33 | 34 | r, err := fetch(ctx, proxies, request) 35 | if err != nil { 36 | return "", err 37 | } 38 | 39 | return waitMessage(r, toolcall.Cancel) 40 | }) 41 | 42 | if err != nil { 43 | logger.Error(err) 44 | response.Error(ctx, -1, err) 45 | return true 46 | } 47 | 48 | return exec 49 | } 50 | -------------------------------------------------------------------------------- /relay/llm/v1/adapter.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "net/http" 5 | "strings" 6 | 7 | "chatgpt-adapter/core/common" 8 | "chatgpt-adapter/core/common/inited" 9 | "chatgpt-adapter/core/gin/inter" 10 | "chatgpt-adapter/core/gin/model" 11 | "chatgpt-adapter/core/gin/response" 12 | "chatgpt-adapter/core/logger" 13 | "github.com/bincooo/emit.io" 14 | "github.com/gin-gonic/gin" 15 | "github.com/iocgo/sdk/env" 16 | ) 17 | 18 | var ( 19 | Model = "custom" 20 | schema = make([]map[string]interface{}, 0) 21 | key = "__custom-url__" 22 | upKey = "__custom-proxies__" 23 | modKey = "__custom-model__" 24 | tcKey = "__custom-toolCall__" 25 | ) 26 | 27 | type api struct { 28 | inter.BaseAdapter 29 | env *env.Environment 30 | } 31 | 32 | func init() { 33 | inited.AddInitialized(func(env *env.Environment) { 34 | llm := env.Get("custom-llm") 35 | if slice, ok := llm.([]interface{}); ok { 36 | for _, it := range slice { 37 | item, o := it.(map[string]interface{}) 38 | if !o { 39 | continue 40 | } 41 | schema = append(schema, item) 42 | } 43 | } 44 | }) 45 | } 46 | 47 | func (*api) Match(ctx *gin.Context, model string) (ok bool, _ error) { 48 | for _, it := range schema { 49 | if prefix, o := it["prefix"].(string); o && strings.HasPrefix(model, prefix+"/") { 50 | ctx.Set(key, it["reversal"]) 51 | ctx.Set(upKey, it["proxied"] == "true") 52 | ctx.Set(modKey, model[len(prefix)+1:]) 53 | ctx.Set(tcKey, it["tc"] == "true") 54 | ok = true 55 | return 56 | } 57 | } 58 | return 59 | } 60 | 61 | func (*api) Models() []model.Model { 62 | return []model.Model{ 63 | { 64 | Id: "custom", 65 | Object: "model", 66 | Created: 1686935002, 67 | By: "custom-adapter", 68 | }, 69 | } 70 | } 71 | 72 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 73 | var ( 74 | proxies = api.env.GetString("server.proxied") 75 | completion = common.GetGinCompletion(ctx) 76 | ) 77 | if !ctx.GetBool(tcKey) { 78 | return 79 | } 80 | if toolChoice(ctx, proxies, completion) { 81 | ok = true 82 | } 83 | return 84 | } 85 | 86 | func (api *api) Completion(ctx *gin.Context) (err error) { 87 | var ( 88 | cookie = ctx.GetString("token") 89 | proxies = api.env.GetString("server.proxied") 90 | completion = common.GetGinCompletion(ctx) 91 | ) 92 | 93 | r, err := fetch(ctx, proxies, cookie, completion) 94 | if err != nil { 95 | logger.Error(err) 96 | return 97 | } 98 | 99 | defer r.Body.Close() 100 | content := waitResponse(ctx, r, completion.Stream) 101 | if content == "" && response.NotResponse(ctx) { 102 | logger.Error("EMPTY RESPONSE") 103 | } 104 | return 105 | } 106 | 107 | func (api *api) Embedding(ctx *gin.Context) (err error) { 108 | embedding := common.GetGinEmbedding(ctx) 109 | embedding.Model = ctx.GetString(modKey) 110 | var ( 111 | token = ctx.GetString("token") 112 | proxies = api.env.GetString("proxied") 113 | baseUrl = ctx.GetString(key) 114 | ) 115 | if !ctx.GetBool(upKey) { 116 | proxies = "" 117 | } 118 | 119 | resp, err := emit.ClientBuilder(common.HTTPClient). 120 | Proxies(proxies). 121 | Context(ctx). 122 | POST(baseUrl+"/embeddings"). 123 | Header("Authorization", "Bearer "+token). 124 | JSONHeader(). 125 | Body(embedding).DoC(emit.Status(http.StatusOK), emit.IsJSON) 126 | if err != nil { 127 | logger.Error(err) 128 | return 129 | } 130 | 131 | obj, err := emit.ToMap(resp) 132 | if err != nil { 133 | logger.Error(err) 134 | return 135 | } 136 | 137 | ctx.JSON(http.StatusOK, obj) 138 | return 139 | } 140 | -------------------------------------------------------------------------------- /relay/llm/v1/ctor.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "v1-adapter") 11 | func New(env *env.Environment) inter.Adapter { return &api{env: env} } 12 | -------------------------------------------------------------------------------- /relay/llm/v1/fetch.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "encoding/json" 5 | "net/http" 6 | 7 | "chatgpt-adapter/core/common" 8 | "chatgpt-adapter/core/gin/model" 9 | "chatgpt-adapter/core/gin/response" 10 | "github.com/bincooo/emit.io" 11 | "github.com/gin-gonic/gin" 12 | ) 13 | 14 | func fetch(ctx *gin.Context, proxies, token string, completion model.Completion) (r *http.Response, err error) { 15 | var ( 16 | baseUrl = ctx.GetString(key) 17 | ) 18 | 19 | if !ctx.GetBool(upKey) { 20 | proxies = "" 21 | } 22 | 23 | if completion.TopP == 0 { 24 | completion.TopP = 1 25 | } 26 | 27 | if completion.Temperature == 0 { 28 | completion.Temperature = 0.7 29 | } 30 | 31 | if completion.MaxTokens == 0 { 32 | completion.MaxTokens = 1024 33 | } 34 | 35 | tokens := 0 36 | for _, message := range completion.Messages { 37 | tokens += response.CalcTokens(message.GetString("content")) 38 | } 39 | ctx.Set(ginTokens, token) 40 | 41 | completion.Stream = true 42 | completion.Model = ctx.GetString(modKey) 43 | obj, err := toMap(completion) 44 | if err != nil { 45 | return nil, err 46 | } 47 | 48 | if completion.TopK == 0 { 49 | delete(obj, "top_k") 50 | } 51 | 52 | r, err = emit.ClientBuilder(common.HTTPClient). 53 | Proxies(proxies). 54 | Context(ctx). 55 | POST(baseUrl+"/chat/completions"). 56 | Header("Authorization", "Bearer "+token). 57 | JSONHeader(). 58 | Body(obj). 59 | DoC(emit.Status(http.StatusOK), emit.IsSTREAM) 60 | return 61 | } 62 | 63 | func toMap(obj interface{}) (mo map[string]interface{}, err error) { 64 | if obj == nil { 65 | return 66 | } 67 | 68 | bytes, err := json.Marshal(obj) 69 | if err != nil { 70 | return 71 | } 72 | 73 | err = json.Unmarshal(bytes, &mo) 74 | return 75 | } 76 | -------------------------------------------------------------------------------- /relay/llm/v1/message.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "bufio" 5 | "encoding/json" 6 | "net/http" 7 | "sync" 8 | "time" 9 | 10 | "chatgpt-adapter/core/common" 11 | "chatgpt-adapter/core/common/toolcall" 12 | "chatgpt-adapter/core/common/vars" 13 | "chatgpt-adapter/core/gin/model" 14 | "chatgpt-adapter/core/gin/response" 15 | "chatgpt-adapter/core/logger" 16 | "github.com/gin-gonic/gin" 17 | ) 18 | 19 | const ginTokens = "__tokens__" 20 | 21 | func waitMessage(r *http.Response, cancel func(str string) bool) (content string, err error) { 22 | defer r.Body.Close() 23 | 24 | scanner := bufio.NewScanner(r.Body) 25 | for { 26 | if !scanner.Scan() { 27 | if err = scanner.Err(); err != nil { 28 | logger.Error(err) 29 | } 30 | break 31 | } 32 | 33 | data := scanner.Text() 34 | if len(data) < 6 || data[:6] != "data: " { 35 | continue 36 | } 37 | 38 | data = data[6:] 39 | if data == "[DONE]" { 40 | break 41 | } 42 | 43 | var chat model.Response 44 | err = json.Unmarshal([]byte(data), &chat) 45 | if err != nil { 46 | logger.Error(err) 47 | continue 48 | } 49 | 50 | if len(chat.Choices) == 0 { 51 | continue 52 | } 53 | 54 | choice := chat.Choices[0] 55 | if choice.Delta.Role != "" && choice.Delta.Role != "assistant" { 56 | continue 57 | } 58 | 59 | if choice.FinishReason != nil && *choice.FinishReason == "stop" { 60 | continue 61 | } 62 | 63 | raw := choice.Delta.Content 64 | if len(raw) == 0 { 65 | continue 66 | } 67 | 68 | content += raw 69 | if cancel != nil && cancel(content) { 70 | return content, nil 71 | } 72 | } 73 | 74 | return content, nil 75 | } 76 | 77 | func waitResponse(ctx *gin.Context, r *http.Response, sse bool) (content string) { 78 | defer r.Body.Close() 79 | 80 | var ( 81 | matchers = common.GetGinMatchers(ctx) 82 | ) 83 | 84 | logger.Info("waitResponse ...") 85 | tokens := ctx.GetInt(ginTokens) 86 | completion := common.GetGinCompletion(ctx) 87 | toolId := common.GetGinToolValue(ctx).GetString("id") 88 | toolId = toolcall.Query(toolId, completion.Tools) 89 | var toolCall map[string]interface{} 90 | htc := false 91 | 92 | onceExec := sync.OnceFunc(func() { 93 | if !sse { 94 | ctx.Writer.WriteHeader(http.StatusOK) 95 | } 96 | }) 97 | 98 | scanner := bufio.NewScanner(r.Body) 99 | for { 100 | if !scanner.Scan() { 101 | if err := scanner.Err(); err != nil { 102 | logger.Error(err) 103 | } 104 | break 105 | } 106 | 107 | data := scanner.Text() 108 | logger.Tracef("--------- ORIGINAL MESSAGE ---------") 109 | logger.Tracef("%s", data) 110 | 111 | if len(data) < 6 || data[:6] != "data: " { 112 | continue 113 | } 114 | 115 | data = data[6:] 116 | if data == "[DONE]" { 117 | raw := response.ExecMatchers(matchers, "", true) 118 | if raw != "" && sse { 119 | response.Event(ctx, "", raw) 120 | } 121 | content += raw 122 | if htc && !sse { 123 | toolCall["args"] = content 124 | } 125 | break 126 | } 127 | 128 | var chat model.Response 129 | err := json.Unmarshal([]byte(data), &chat) 130 | if err != nil { 131 | logger.Error(err.Error()) 132 | continue 133 | } 134 | 135 | if len(chat.Choices) == 0 { 136 | continue 137 | } 138 | 139 | choice := chat.Choices[0] 140 | if choice.Delta.Role != "" && choice.Delta.Role != "assistant" { 141 | continue 142 | } 143 | 144 | if choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0 { 145 | htc = true 146 | if sse { 147 | response.Event(ctx, "", chat) 148 | continue 149 | } 150 | 151 | keyv := choice.Delta.ToolCalls[0].GetKeyv("function") 152 | if name := keyv.GetString("name"); name != "" { 153 | toolCall = map[string]interface{}{ 154 | "name": name, 155 | "args": "", 156 | } 157 | } 158 | content += keyv.GetString("arguments") 159 | continue 160 | } 161 | 162 | if choice.FinishReason != nil && *choice.FinishReason == "stop" { 163 | if chat.Usage == nil { 164 | chat.Usage = response.CalcUsageTokens(content, tokens) 165 | } 166 | ctx.Set(vars.GinCompletionUsage, chat.Usage) 167 | if sse { 168 | response.Event(ctx, "", chat) 169 | } 170 | continue 171 | } 172 | 173 | raw := choice.Delta.Content 174 | logger.Debug("----- raw -----") 175 | logger.Debug(raw) 176 | onceExec() 177 | 178 | raw = response.ExecMatchers(matchers, raw, false) 179 | if len(raw) == 0 { 180 | continue 181 | } 182 | 183 | if raw == response.EOF { 184 | break 185 | } 186 | 187 | if !htc && toolId != "-1" { 188 | toolCall = map[string]interface{}{ 189 | "name": toolId, 190 | "args": "", 191 | } 192 | break 193 | } 194 | 195 | choice.Delta.Content = raw 196 | if sse && len(raw) > 0 { 197 | response.Event(ctx, "", chat) 198 | } 199 | content += raw 200 | } 201 | 202 | if toolCall != nil { 203 | if !sse { 204 | response.ToolCallResponse(ctx, Model, toolCall["name"].(string), toolCall["args"].(string)) 205 | } else { 206 | response.SSEToolCallResponse(ctx, Model, toolCall["name"].(string), toolCall["args"].(string), time.Now().Unix()) 207 | } 208 | return 209 | } 210 | 211 | if content == "" && response.NotSSEHeader(ctx) { 212 | return 213 | } 214 | 215 | if !sse { 216 | response.Response(ctx, Model, content) 217 | } else { 218 | response.Event(ctx, "", "[DONE]") 219 | } 220 | return 221 | } 222 | -------------------------------------------------------------------------------- /relay/llm/v1/toolcall.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/gin/model" 6 | "chatgpt-adapter/core/gin/response" 7 | "chatgpt-adapter/core/logger" 8 | "github.com/gin-gonic/gin" 9 | ) 10 | 11 | func toolChoice(ctx *gin.Context, proxies string, completion model.Completion) bool { 12 | logger.Info("tool choice ...") 13 | cookie := ctx.GetString("token") 14 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 15 | completion.Stream = true 16 | completion.Messages = []model.Keyv[interface{}]{ 17 | { 18 | "role": "user", 19 | "content": message, 20 | }, 21 | } 22 | 23 | r, err := fetch(ctx, proxies, cookie, completion) 24 | if err != nil { 25 | return "", err 26 | } 27 | 28 | return waitMessage(r, toolcall.Cancel) 29 | }) 30 | 31 | if err != nil { 32 | logger.Error(err) 33 | response.Error(ctx, -1, err) 34 | return true 35 | } 36 | 37 | return exec 38 | } 39 | -------------------------------------------------------------------------------- /relay/llm/windsurf/adapter.go: -------------------------------------------------------------------------------- 1 | package windsurf 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/inter" 7 | "chatgpt-adapter/core/gin/model" 8 | "chatgpt-adapter/core/gin/response" 9 | "chatgpt-adapter/core/logger" 10 | "github.com/gin-gonic/gin" 11 | "github.com/iocgo/sdk/env" 12 | "strings" 13 | ) 14 | 15 | var ( 16 | Model = "windsurf" 17 | ) 18 | 19 | type api struct { 20 | inter.BaseAdapter 21 | 22 | env *env.Environment 23 | } 24 | 25 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 26 | if len(model) <= 9 || Model+"/" != model[:9] { 27 | return 28 | } 29 | for mod := range mapModel { 30 | if model[9:] == mod { 31 | if strings.HasPrefix(mod, "deepseek") { 32 | completion := common.GetGinCompletion(ctx) 33 | completion.StopSequences = append(completion.StopSequences, "", "", "") 34 | ctx.Set(vars.GinCompletion, completion) 35 | } 36 | ok = true 37 | return 38 | } 39 | } 40 | return 41 | } 42 | 43 | func (*api) Models() (slice []model.Model) { 44 | for mod := range mapModel { 45 | slice = append(slice, model.Model{ 46 | Id: Model + "/" + mod, 47 | Object: "model", 48 | Created: 1686935002, 49 | By: Model + "-adapter", 50 | }) 51 | } 52 | return 53 | } 54 | 55 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 56 | var ( 57 | cookie = ctx.GetString("token") 58 | completion = common.GetGinCompletion(ctx) 59 | ) 60 | 61 | if toolChoice(ctx, api.env, cookie, completion) { 62 | ok = true 63 | } 64 | return 65 | } 66 | 67 | func (api *api) Completion(ctx *gin.Context) (err error) { 68 | var ( 69 | cookie = ctx.GetString("token") 70 | completion = common.GetGinCompletion(ctx) 71 | ) 72 | 73 | token, err := genToken(ctx.Request.Context(), api.env.GetString("server.proxied"), cookie) 74 | if err != nil { 75 | return 76 | } 77 | 78 | buffer, err := convertRequest(completion, cookie, token) 79 | if err != nil { 80 | return 81 | } 82 | 83 | r, err := fetch(ctx.Request.Context(), api.env, buffer) 84 | if err != nil { 85 | logger.Error(err) 86 | return 87 | } 88 | 89 | content := waitResponse(ctx, r, completion.Stream) 90 | if content == "" && response.NotResponse(ctx) { 91 | response.Error(ctx, -1, "EMPTY RESPONSE") 92 | } 93 | return 94 | } 95 | -------------------------------------------------------------------------------- /relay/llm/windsurf/ctor.go: -------------------------------------------------------------------------------- 1 | package windsurf 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "windsurf-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/windsurf/message.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package windsurf; 3 | option go_package = "./windsurf"; 4 | 5 | message ChatMessage { 6 | 7 | message Schema { 8 | string name = 1; 9 | string version1 = 2; 10 | string id = 3; 11 | string lang = 4; 12 | string os = 5; 13 | string version2 = 7; 14 | string equi = 8; 15 | string title = 12; 16 | string token = 21; 17 | } 18 | 19 | message UserMessage { 20 | 21 | message Unknown_Field8 { 22 | uint32 value = 1; 23 | } 24 | 25 | message File { 26 | 27 | message Timestamp { 28 | fixed64 value = 13; 29 | } 30 | 31 | string value = 1; // base64 32 | Timestamp timestamp = 2; // ?? 不确定是否是日期,建议固定值 33 | } 34 | 35 | uint32 role = 2; // 上下文类型??1 user, 2 bot 36 | string message = 3; 37 | uint32 token = 4; 38 | uint32 unknown_field5 = 5; 39 | Unknown_Field8 unknown_field8 = 8; 40 | repeated File files = 10; 41 | } 42 | 43 | message Config { 44 | uint32 unknown_field1 = 1; 45 | uint32 maxTokens = 2; 46 | uint32 topK = 3; 47 | double topP = 5; // 不确定 48 | double temperature = 6; // 不确定 49 | int32 unknown_field7 = 7; 50 | double presence_penalty = 8; // 不确定 51 | repeated string stop = 9; 52 | double frequency_penalty = 11; // 不确定 53 | } 54 | 55 | message Tool { 56 | string name = 1; 57 | string desc = 2; 58 | string schema = 3; 59 | } 60 | 61 | message ToolChoice { 62 | string value = 1; 63 | } 64 | 65 | message Unknown_Field9 { 66 | message Data { 67 | uint32 unknown_field1 = 1; // 68 | string value3 = 3; 69 | string value5 = 5; 70 | uint32 unknown_field6 = 6; // 1 71 | uint32 unknown_field7 = 7; // 2 72 | } 73 | Data value = 6; 74 | } 75 | 76 | message Unknown_Field13 { 77 | uint32 value = 1; 78 | } 79 | 80 | message Unknown_Field15 { 81 | string uuid = 1; 82 | uint32 value = 2; 83 | } 84 | 85 | Schema schema = 1; 86 | string Instructions = 2; // 不允许为空? 87 | repeated UserMessage messages = 3; 88 | uint32 model = 6; // 模型 ?? 109: gpt4o 166:claude3.5sonnet 89 | uint32 unknown_field7 = 7; 90 | Config config = 8; 91 | repeated Unknown_Field9 unknown_field9 = 9; 92 | repeated Tool tools = 10; 93 | ToolChoice choice = 12; 94 | Unknown_Field13 unknown_field13 = 13; 95 | Unknown_Field15 unknown_field15 = 15; 96 | string uuid = 16; 97 | } 98 | 99 | message ResMessage { 100 | 101 | message Unknown_Field2 { // 应该是两个时间戳 102 | int32 value1 = 1; 103 | int32 value2 = 2; 104 | } 105 | 106 | 107 | message Tokens { // 觉得是token统计,但是数据对不上 108 | message Args { 109 | string key = 1; 110 | string value = 2; 111 | } 112 | 113 | int32 model = 1; 114 | int32 unknown_field2 = 2; 115 | int32 unknown_field3 = 3; 116 | int32 unknown_field4 = 4; 117 | int32 unknown_field5 = 5; 118 | int32 unknown_field6 = 6; 119 | 120 | string uuid = 7; 121 | repeated Args args = 8; 122 | string unknown_field10 = 10; 123 | } 124 | 125 | // 找不到think标记的字段?? 126 | string id = 1; 127 | Unknown_Field2 unknown_field2 = 2; 128 | string message = 3; 129 | uint32 count_token = 4; 130 | uint32 unknown_field5 = 5; 131 | Tokens tokens = 7; 132 | string think = 9; 133 | } 134 | 135 | message Jwt { 136 | 137 | message Args { 138 | string name = 1; 139 | string version1 = 2; 140 | string ident = 3; 141 | string lang = 4; 142 | string version2 = 7; 143 | string title = 12; 144 | } 145 | 146 | Args args = 1; 147 | } 148 | 149 | message JwtToken { 150 | string value = 1; 151 | } -------------------------------------------------------------------------------- /relay/llm/windsurf/toolcall.go: -------------------------------------------------------------------------------- 1 | package windsurf 2 | 3 | import ( 4 | "chatgpt-adapter/core/common/toolcall" 5 | "chatgpt-adapter/core/common/vars" 6 | "chatgpt-adapter/core/gin/model" 7 | "chatgpt-adapter/core/gin/response" 8 | "chatgpt-adapter/core/logger" 9 | "github.com/gin-gonic/gin" 10 | "github.com/iocgo/sdk/env" 11 | ) 12 | 13 | func toolChoice(ctx *gin.Context, env *env.Environment, cookie string, completion model.Completion) bool { 14 | logger.Info("completeTools ...") 15 | echo := ctx.GetBool(vars.GinEcho) 16 | 17 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 18 | if echo { 19 | logger.Infof("toolCall message: \n%s", message) 20 | return "", nil 21 | } 22 | 23 | completion.Messages = []model.Keyv[interface{}]{ 24 | { 25 | "role": "user", 26 | "content": message, 27 | }, 28 | } 29 | 30 | token, err := genToken(ctx.Request.Context(), env.GetString("server.proxied"), cookie) 31 | if err != nil { 32 | return "", err 33 | } 34 | 35 | messageBuffer, err := convertRequest(completion, cookie, token) 36 | if err != nil { 37 | return "", err 38 | } 39 | 40 | r, err := fetch(ctx.Request.Context(), env, messageBuffer) 41 | if err != nil { 42 | return "", err 43 | } 44 | 45 | return waitMessage(r, toolcall.Cancel) 46 | }) 47 | 48 | if err != nil { 49 | logger.Error(err) 50 | response.Error(ctx, -1, err) 51 | return true 52 | } 53 | 54 | return exec 55 | } 56 | -------------------------------------------------------------------------------- /relay/llm/you/adapter.go: -------------------------------------------------------------------------------- 1 | package you 2 | 3 | import ( 4 | "encoding/json" 5 | "strings" 6 | 7 | "chatgpt-adapter/core/common" 8 | "chatgpt-adapter/core/gin/inter" 9 | "chatgpt-adapter/core/gin/model" 10 | "chatgpt-adapter/core/gin/response" 11 | "chatgpt-adapter/core/logger" 12 | "github.com/bincooo/you.com" 13 | "github.com/gin-gonic/gin" 14 | "github.com/iocgo/sdk/env" 15 | ) 16 | 17 | var ( 18 | Model = "you" 19 | ) 20 | 21 | type api struct { 22 | inter.BaseAdapter 23 | 24 | env *env.Environment 25 | } 26 | 27 | func (api *api) Match(ctx *gin.Context, model string) (ok bool, err error) { 28 | token := ctx.GetString("token") 29 | if !strings.HasPrefix(model, "you/") { 30 | return 31 | } 32 | 33 | slice := api.env.GetStringSlice("you.model") 34 | for _, mod := range append(slice, []string{ 35 | you.GPT_4, 36 | you.GPT_4_TURBO, 37 | you.GPT_4o, 38 | you.GPT_4o_MINI, 39 | you.OPENAI_O1, 40 | you.OPENAI_O1_MINI, 41 | you.CLAUDE_2, 42 | you.CLAUDE_3_HAIKU, 43 | you.CLAUDE_3_SONNET, 44 | you.CLAUDE_3_5_SONNET, 45 | you.CLAUDE_3_OPUS, 46 | you.GEMINI_1_0_PRO, 47 | you.GEMINI_1_5_PRO, 48 | you.GEMINI_1_5_FLASH, 49 | }...) { 50 | if model[4:] == mod { 51 | password := api.env.GetString("server.password") 52 | if password != "" && password != token { 53 | err = response.UnauthorizedError 54 | return 55 | } 56 | ok = true 57 | return 58 | } 59 | } 60 | return 61 | } 62 | 63 | func (api *api) Models() (slice []model.Model) { 64 | s := api.env.GetStringSlice("you.model") 65 | for _, mod := range append(s, []string{ 66 | you.GPT_4, 67 | you.GPT_4_TURBO, 68 | you.GPT_4o, 69 | you.GPT_4o_MINI, 70 | you.OPENAI_O1, 71 | you.OPENAI_O1_MINI, 72 | you.CLAUDE_2, 73 | you.CLAUDE_3_HAIKU, 74 | you.CLAUDE_3_SONNET, 75 | you.CLAUDE_3_5_SONNET, 76 | you.CLAUDE_3_OPUS, 77 | you.GEMINI_1_0_PRO, 78 | you.GEMINI_1_5_PRO, 79 | you.GEMINI_1_5_FLASH, 80 | }...) { 81 | slice = append(slice, model.Model{ 82 | Id: "you/" + mod, 83 | Object: "model", 84 | Created: 1686935002, 85 | By: Model + "-adapter", 86 | }) 87 | } 88 | return 89 | } 90 | 91 | func (api *api) ToolChoice(ctx *gin.Context) (ok bool, err error) { 92 | var ( 93 | cookie = ctx.GetString("token") 94 | proxied = api.env.GetString("server.proxied") 95 | completion = common.GetGinCompletion(ctx) 96 | ) 97 | 98 | if toolChoice(ctx, cookie, proxied, completion) { 99 | ok = true 100 | } 101 | return 102 | } 103 | 104 | func (api *api) Completion(ctx *gin.Context) (err error) { 105 | var ( 106 | proxies = ctx.GetString("proxies") 107 | completion = common.GetGinCompletion(ctx) 108 | token = ctx.GetString("token") 109 | ) 110 | 111 | completion.Model = completion.Model[4:] 112 | fileMessage, chatM, message := mergeMessages(ctx, completion) 113 | 114 | chat := you.New(token, completion.Model, proxies) 115 | chat.LimitWithE(true) 116 | chat.Client(common.HTTPClient) 117 | if clearance := ctx.GetString("clearance"); clearance != "" { 118 | chat.CloudFlare(clearance, ctx.GetString("userAgent"), ctx.GetString("lang")) 119 | } 120 | 121 | var cancel chan error 122 | if api.env.GetBool("you.custom") { 123 | err = chat.Custom(ctx.Request.Context(), "custom-"+completion.Model, "", false) 124 | if err != nil { 125 | logger.Error(err) 126 | response.Error(ctx, -1, err) 127 | return 128 | } 129 | } 130 | 131 | var chats []you.Message 132 | if i := len(chatM); i > 2 && chatM[0] == '[' && chatM[i-1] == ']' { 133 | err = json.Unmarshal([]byte(chatM), &chats) 134 | if err != nil { 135 | logger.Error(err) 136 | } 137 | } 138 | 139 | ch, err := chat.Reply(ctx.Request.Context(), chats, fileMessage, message) 140 | if err != nil { 141 | return 142 | } 143 | 144 | content := waitResponse(ctx, cancel, ch, completion.Stream) 145 | if content == "" && response.NotResponse(ctx) { 146 | response.Error(ctx, -1, "EMPTY RESPONSE") 147 | } 148 | return 149 | } 150 | -------------------------------------------------------------------------------- /relay/llm/you/ctor.go: -------------------------------------------------------------------------------- 1 | package you 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "you-adapter") 11 | func New(env *env.Environment) inter.Adapter { 12 | return &api{env: env} 13 | } 14 | -------------------------------------------------------------------------------- /relay/llm/you/message.go: -------------------------------------------------------------------------------- 1 | package you 2 | 3 | import ( 4 | "errors" 5 | "net/http" 6 | "net/url" 7 | "strings" 8 | "sync" 9 | "time" 10 | 11 | "chatgpt-adapter/core/common" 12 | "chatgpt-adapter/core/common/vars" 13 | "chatgpt-adapter/core/gin/model" 14 | "chatgpt-adapter/core/gin/response" 15 | "chatgpt-adapter/core/logger" 16 | "github.com/gin-gonic/gin" 17 | "github.com/iocgo/sdk/env" 18 | ) 19 | 20 | const ginTokens = "__tokens__" 21 | 22 | func waitMessage(ch chan string, cancel func(str string) bool) (content string, err error) { 23 | 24 | for { 25 | message, ok := <-ch 26 | if !ok { 27 | break 28 | } 29 | 30 | if strings.HasPrefix(message, "error:") { 31 | return "", errors.New(message[6:]) 32 | } 33 | 34 | if strings.HasPrefix(message, "limits:") { 35 | continue 36 | } 37 | 38 | logger.Debug("----- raw -----") 39 | logger.Debug(message) 40 | if len(message) > 0 { 41 | content += message 42 | if cancel != nil && cancel(content) { 43 | return content, nil 44 | } 45 | } 46 | } 47 | 48 | return content, nil 49 | } 50 | 51 | func waitResponse(ctx *gin.Context, cancel chan error, ch chan string, sse bool) (content string) { 52 | var ( 53 | created = time.Now().Unix() 54 | tokens = ctx.GetInt(ginTokens) 55 | matchers = common.GetGinMatchers(ctx) 56 | ) 57 | 58 | onceExec := sync.OnceFunc(func() { 59 | if !sse { 60 | ctx.Writer.WriteHeader(http.StatusOK) 61 | } 62 | }) 63 | 64 | logger.Info("waitResponse ...") 65 | for { 66 | select { 67 | case err := <-cancel: 68 | if err != nil { 69 | logger.Error(err) 70 | if response.NotSSEHeader(ctx) { 71 | response.Error(ctx, -1, err) 72 | } 73 | return 74 | } 75 | goto label 76 | default: 77 | message, ok := <-ch 78 | if !ok { 79 | raw := response.ExecMatchers(matchers, "", true) 80 | if raw != "" && sse { 81 | response.SSEResponse(ctx, Model, raw, created) 82 | } 83 | content += raw 84 | goto label 85 | } 86 | 87 | if strings.HasPrefix(message, "error:") { 88 | logger.Error(message[6:]) 89 | if response.NotSSEHeader(ctx) { 90 | response.Error(ctx, -1, message[6:]) 91 | } 92 | return 93 | } 94 | 95 | if strings.HasPrefix(message, "limits:") { 96 | continue 97 | } 98 | 99 | var raw = message 100 | logger.Debug("----- raw -----") 101 | logger.Debug(raw) 102 | onceExec() 103 | 104 | raw = response.ExecMatchers(matchers, raw, false) 105 | if len(raw) == 0 { 106 | continue 107 | } 108 | 109 | if raw == response.EOF { 110 | goto label 111 | } 112 | 113 | if sse { 114 | response.SSEResponse(ctx, Model, raw, created) 115 | } 116 | content += raw 117 | } 118 | } 119 | 120 | label: 121 | if content == "" && response.NotSSEHeader(ctx) { 122 | return 123 | } 124 | 125 | ctx.Set(vars.GinCompletionUsage, response.CalcUsageTokens(content, tokens)) 126 | if !sse { 127 | response.Response(ctx, Model, content) 128 | } else { 129 | response.SSEResponse(ctx, Model, "[DONE]", created) 130 | } 131 | 132 | return 133 | } 134 | 135 | func mergeMessages(ctx *gin.Context, completion model.Completion) (fileMessage, chat, query string) { 136 | query = env.Env.GetString("you.notice") 137 | tokens := 0 138 | var ( 139 | messages = completion.Messages 140 | isC = response.IsClaude(ctx, completion.Model) 141 | ) 142 | defer func() { ctx.Set(ginTokens, tokens) }() 143 | 144 | messageL := len(messages) 145 | if messageL == 1 { 146 | var notice = query 147 | message := messages[0] 148 | fileMessage = message.GetString("content") 149 | chat = message.GetString("chat") 150 | query = message.GetString("query") 151 | if notice != "" { 152 | query += "\n\n" + notice 153 | } 154 | 155 | join := fileMessage 156 | if query != "" { 157 | join += "\n\n" + query 158 | } 159 | if encodingLen(join) <= 12499 { 160 | query = join 161 | fileMessage = "" 162 | } 163 | 164 | tokens += response.CalcTokens(fileMessage) 165 | tokens += response.CalcTokens(chat) 166 | tokens += response.CalcTokens(query) 167 | return 168 | } 169 | 170 | var ( 171 | pos = 0 172 | contents []string 173 | ) 174 | 175 | for { 176 | if pos > messageL-1 { 177 | break 178 | } 179 | 180 | message := messages[pos] 181 | convertRole, turn := response.ConvertRole(ctx, message.GetString("role")) 182 | if isC && message.Is("role", "system") { 183 | convertRole = "" 184 | } 185 | contents = append(contents, convertRole+message.GetString("content")+turn) 186 | pos++ 187 | } 188 | 189 | convertRole, _ := response.ConvertRole(ctx, "assistant") 190 | fileMessage = strings.Join(contents, "") + convertRole 191 | tokens += response.CalcTokens(fileMessage) 192 | if encodingLen(fileMessage) <= 12499 { 193 | query = fileMessage 194 | fileMessage = "" 195 | } 196 | 197 | return 198 | } 199 | 200 | func encodingLen(str string) (count int) { 201 | escape := url.QueryEscape(str) 202 | chars := []rune(escape) 203 | for _, ch := range chars { 204 | count++ 205 | if ch == '+' { 206 | count += 2 207 | } 208 | } 209 | return 210 | } 211 | -------------------------------------------------------------------------------- /relay/llm/you/toolcall.go: -------------------------------------------------------------------------------- 1 | package you 2 | 3 | import ( 4 | "chatgpt-adapter/core/common" 5 | "chatgpt-adapter/core/common/toolcall" 6 | "chatgpt-adapter/core/common/vars" 7 | "chatgpt-adapter/core/gin/model" 8 | "chatgpt-adapter/core/gin/response" 9 | "chatgpt-adapter/core/logger" 10 | "github.com/bincooo/you.com" 11 | "github.com/gin-gonic/gin" 12 | ) 13 | 14 | func toolChoice(ctx *gin.Context, cookie, proxies string, completion model.Completion) bool { 15 | logger.Infof("completeTools ...") 16 | 17 | var ( 18 | echo = ctx.GetBool(vars.GinEcho) 19 | ) 20 | 21 | exec, err := toolcall.ToolChoice(ctx, completion, func(message string) (string, error) { 22 | if echo { 23 | logger.Infof("toolCall message: \n%s", message) 24 | return "", nil 25 | } 26 | 27 | chat := you.New(cookie, completion.Model, proxies) 28 | chat.LimitWithE(true) 29 | chat.Client(common.HTTPClient) 30 | clearance := ctx.GetString("clearance") 31 | if clearance != "" { 32 | chat.CloudFlare(clearance, ctx.GetString("userAgent"), ctx.GetString("lang")) 33 | } 34 | 35 | chatResponse, err := chat.Reply(ctx.Request.Context(), nil, message, "Please review the attached prompt") 36 | if err != nil { 37 | return "", err 38 | } 39 | 40 | return waitMessage(chatResponse, toolcall.Cancel) 41 | }) 42 | 43 | if err != nil { 44 | logger.Error(err) 45 | response.Error(ctx, -1, err) 46 | return true 47 | } 48 | 49 | return exec 50 | } 51 | -------------------------------------------------------------------------------- /relay/pg/adapter.go: -------------------------------------------------------------------------------- 1 | package pg 2 | 3 | import ( 4 | "bytes" 5 | "context" 6 | "encoding/json" 7 | "errors" 8 | "io" 9 | "math/rand" 10 | "net/http" 11 | "regexp" 12 | "slices" 13 | "strings" 14 | "time" 15 | 16 | "chatgpt-adapter/core/common" 17 | "chatgpt-adapter/core/gin/inter" 18 | "chatgpt-adapter/core/gin/response" 19 | "chatgpt-adapter/core/logger" 20 | "github.com/bincooo/emit.io" 21 | "github.com/gin-gonic/gin" 22 | "github.com/google/uuid" 23 | "github.com/iocgo/sdk/env" 24 | ) 25 | 26 | type modelPayload struct { 27 | BatchId string `json:"batchId"` 28 | CfgScale int32 `json:"cfg_scale"` 29 | BoothModel string `json:"dream_booth_model"` 30 | Filter string `json:"filter"` 31 | GenerateVariants bool `json:"generateVariants"` 32 | GuidanceScale int32 `json:"guidance_scale"` 33 | Width int32 `json:"width"` 34 | Height int32 `json:"height"` 35 | HighNoiseFrac float32 `json:"high_noise_frac"` 36 | InitImageFromPlayground bool `json:"initImageFromPlayground"` 37 | IsPrivate bool `json:"isPrivate"` 38 | ModelType string `json:"modelType"` 39 | NegativePrompt string `json:"negativePrompt"` 40 | NumImages int32 `json:"num_images"` 41 | Prompt string `json:"prompt"` 42 | Sampler int32 `json:"sampler"` 43 | Seed int32 `json:"seed"` 44 | StatusUUID string `json:"statusUUID"` 45 | Steps int32 `json:"steps"` 46 | Strength float32 `json:"strength"` 47 | } 48 | 49 | type modelCompleted struct { 50 | Meta struct { 51 | NumImagesInLast24Hours int32 `json:"numImagesInLast24Hours"` 52 | } `json:"meta"` 53 | Images []struct { 54 | ImageKey string `json:"imageKey"` 55 | Prompt string `json:"prompt"` 56 | Url string `json:"url"` 57 | Loading bool `json:"loading"` 58 | } `json:"images"` 59 | } 60 | 61 | var ( 62 | models = []string{ 63 | "none", 64 | "Realism_Engine_SDXL", 65 | "Real_Cartoon_XL", 66 | "Blue_Pencil_XL", 67 | "Starlight_XL", 68 | "Juggernaut_XL", 69 | "RealVisXL", 70 | "ZavyChromaXL", 71 | "NightVision_XL", 72 | "Realistic_Stock_Photo", 73 | "DreamShaper", 74 | "MBBXL_Ultimate", 75 | "Mysterious", 76 | "Copax_TimeLessXL", 77 | "SDXL_Niji", 78 | "Pixel_Art_XL", 79 | "ProtoVision_XL", 80 | "DucHaiten_AIart_SDXL", 81 | "CounterfeitXL", 82 | "vibrant_glass", 83 | "dreamy_stickers", 84 | "ultra_lighting", 85 | "watercolor", 86 | "macro_realism", 87 | "delicate_detail", 88 | "radiant_symmetry", 89 | "lush_illustration", 90 | "saturated_space", 91 | "neon_mecha", 92 | "ethereal_low_poly", 93 | "warm_box", 94 | "cinematic", 95 | "cinematic_warm", 96 | "wasteland", 97 | "flat_palette", 98 | "ominous_escape", 99 | "spielberg", 100 | "royalistic", 101 | "masterpiece", 102 | "wall_art", 103 | "haze", 104 | "black_and_white_3d", 105 | } 106 | ) 107 | 108 | type pg struct { 109 | inter.BaseAdapter 110 | env *env.Environment 111 | } 112 | 113 | func (p *pg) Match(ctx *gin.Context, model string) (ok bool, err error) { 114 | token := ctx.GetString("token") 115 | if model == "dall-e-3" { 116 | ok, _ = regexp.MatchString(`\w{8,10}-\w{4}-\w{4}-\w{4}-\w{10,15}`, token) 117 | } 118 | return 119 | } 120 | 121 | func (p *pg) Generation(ctx *gin.Context) (err error) { 122 | 123 | var ( 124 | hash = emit.GioHash() 125 | cookie = ctx.GetString("token") 126 | generation = common.GetGinGeneration(ctx) 127 | ) 128 | 129 | mod := matchModel(generation.Style) 130 | var payload = modelPayload{ 131 | BatchId: hash, 132 | CfgScale: 8, 133 | GuidanceScale: 8, 134 | Width: 1024, 135 | Height: 1024, 136 | HighNoiseFrac: 0.8, 137 | GenerateVariants: false, 138 | InitImageFromPlayground: false, 139 | IsPrivate: false, 140 | ModelType: "stable-diffusion-xl", 141 | Filter: mod, 142 | BoothModel: mod, 143 | NegativePrompt: "ugly, deformed, noisy, blurry, distorted, out of focus, bad anatomy, extra limbs, poorly drawn face, poorly drawn hands, missing fingers, ugly, deformed, noisy, blurry, distorted, out of focus, bad anatomy, extra limbs, poorly drawn face, poorly drawn hands, missing fingers, photo, realistic, text, watermark, signature, username, artist name", 144 | NumImages: 1, 145 | Prompt: generation.Message, 146 | Sampler: 9, 147 | Seed: int32(rand.Intn(100000000) + 429650152), 148 | StatusUUID: uuid.NewString(), 149 | Steps: 30, 150 | Strength: 1.45, 151 | } 152 | 153 | marshal, _ := json.Marshal(payload) 154 | r, err := fetch(ctx, "", cookie, marshal) 155 | if err != nil { 156 | logger.Error(err) 157 | return 158 | } 159 | 160 | data, err := io.ReadAll(r.Body) 161 | if err != nil { 162 | logger.Error(err) 163 | return 164 | } 165 | 166 | // {"errorCode": 167 | if bytes.HasPrefix(data, []byte("{\"errorCode\":")) { 168 | logger.Error(err) 169 | return 170 | } 171 | 172 | var mc modelCompleted 173 | if err = json.Unmarshal(data, &mc); err != nil { 174 | logger.Error(err) 175 | response.Error(ctx, -1, err) 176 | return 177 | } 178 | 179 | if len(mc.Images) == 0 { 180 | err = errors.New("generate images failed") 181 | return 182 | } 183 | 184 | ctx.JSON(http.StatusOK, gin.H{ 185 | "created": time.Now().Unix(), 186 | "styles": models, 187 | "data": []map[string]string{ 188 | {"url": mc.Images[0].Url}, 189 | }, 190 | "currStyle": mod, 191 | }) 192 | return 193 | } 194 | 195 | func matchModel(style string) string { 196 | if slices.Contains(models, style) { 197 | return style 198 | } 199 | return models[rand.Intn(len(models))] 200 | } 201 | 202 | func fetch(ctx context.Context, proxies, cookie string, marshal []byte) (*http.Response, error) { 203 | if !strings.Contains(cookie, "__Secure-next-auth.session-token=") { 204 | cookie = "__Secure-next-auth.session-token=" + cookie 205 | } 206 | 207 | baseUrl := "https://playground.com" 208 | return emit.ClientBuilder(common.HTTPClient). 209 | Proxies(proxies). 210 | Context(ctx). 211 | POST(baseUrl+"/api/models"). 212 | Header("host", "playground.com"). 213 | Header("origin", "https://playground.com"). 214 | Header("referer", "https://playground.com/create"). 215 | Header("accept-language", "en-US,en;q=0.9"). 216 | Header("user-agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"). 217 | Header("x-forwarded-for", emit.RandIP()). 218 | Header("cookie", cookie). 219 | JSONHeader(). 220 | Bytes(marshal). 221 | DoS(http.StatusOK) 222 | } 223 | -------------------------------------------------------------------------------- /relay/pg/ctor.go: -------------------------------------------------------------------------------- 1 | package pg 2 | 3 | import ( 4 | "chatgpt-adapter/core/gin/inter" 5 | "github.com/iocgo/sdk/env" 6 | 7 | _ "github.com/iocgo/sdk" 8 | ) 9 | 10 | // @Inject(name = "pg-adapter") 11 | func New(env *env.Environment) inter.Adapter { return &pg{env: env} } 12 | -------------------------------------------------------------------------------- /relay/scan/3rd.go: -------------------------------------------------------------------------------- 1 | //go:build !3rd 2 | 3 | package scan 4 | 5 | import "github.com/iocgo/sdk" 6 | 7 | func rejects(container *sdk.Container) (err error) { return } 8 | -------------------------------------------------------------------------------- /relay/scan/3rd_e.go: -------------------------------------------------------------------------------- 1 | //go:build 3rd 2 | 3 | package scan 4 | 5 | import ( 6 | "github.com/iocgo/sdk" 7 | 8 | "chatgpt-adapter/relay/3rd/llm/kilo" 9 | "chatgpt-adapter/relay/3rd/llm/trae" 10 | "chatgpt-adapter/relay/3rd/llm/zed" 11 | ) 12 | 13 | func rejects(container *sdk.Container) (err error) { 14 | err = trae.Injects(container) 15 | if err != nil { 16 | return 17 | } 18 | 19 | err = kilo.Injects(container) 20 | if err != nil { 21 | return 22 | } 23 | 24 | err = zed.Injects(container) 25 | if err != nil { 26 | return 27 | } 28 | 29 | return 30 | } 31 | -------------------------------------------------------------------------------- /relay/scan/export.go: -------------------------------------------------------------------------------- 1 | // 该包下仅提供给iocgo工具使用的,不需要理会 Injects 的错误,在编译过程中生成 2 | package scan 3 | 4 | import ( 5 | "github.com/iocgo/sdk" 6 | 7 | _ "chatgpt-adapter/relay/alloc/bing" 8 | _ "chatgpt-adapter/relay/alloc/coze" 9 | _ "chatgpt-adapter/relay/alloc/grok" 10 | _ "chatgpt-adapter/relay/alloc/you" 11 | 12 | "chatgpt-adapter/relay/hf" 13 | "chatgpt-adapter/relay/llm/bing" 14 | "chatgpt-adapter/relay/llm/blackbox" 15 | "chatgpt-adapter/relay/llm/coze" 16 | "chatgpt-adapter/relay/llm/cursor" 17 | "chatgpt-adapter/relay/llm/deepseek" 18 | "chatgpt-adapter/relay/llm/grok" 19 | "chatgpt-adapter/relay/llm/lmsys" 20 | "chatgpt-adapter/relay/llm/qodo" 21 | "chatgpt-adapter/relay/llm/v1" 22 | "chatgpt-adapter/relay/llm/windsurf" 23 | "chatgpt-adapter/relay/llm/you" 24 | "chatgpt-adapter/relay/pg" 25 | ) 26 | 27 | func Injects(container *sdk.Container) (err error) { 28 | err = v1.Injects(container) 29 | if err != nil { 30 | return 31 | } 32 | 33 | err = bing.Injects(container) 34 | if err != nil { 35 | return 36 | } 37 | 38 | err = coze.Injects(container) 39 | if err != nil { 40 | return 41 | } 42 | 43 | err = cursor.Injects(container) 44 | if err != nil { 45 | return 46 | } 47 | 48 | err = windsurf.Injects(container) 49 | if err != nil { 50 | return 51 | } 52 | 53 | err = you.Injects(container) 54 | if err != nil { 55 | return 56 | } 57 | 58 | err = deepseek.Injects(container) 59 | if err != nil { 60 | return 61 | } 62 | 63 | err = grok.Injects(container) 64 | if err != nil { 65 | return 66 | } 67 | 68 | err = lmsys.Injects(container) 69 | if err != nil { 70 | return 71 | } 72 | 73 | err = qodo.Injects(container) 74 | if err != nil { 75 | return 76 | } 77 | 78 | err = pg.Injects(container) 79 | if err != nil { 80 | return 81 | } 82 | 83 | err = hf.Injects(container) 84 | if err != nil { 85 | return 86 | } 87 | 88 | err = blackbox.Injects(container) 89 | if err != nil { 90 | return 91 | } 92 | 93 | err = rejects(container) 94 | if err != nil { 95 | return 96 | } 97 | 98 | return 99 | } 100 | -------------------------------------------------------------------------------- /wire/container.go: -------------------------------------------------------------------------------- 1 | // ------>>> 将需要托管的实例构造器所属的包导入 <<<----- 2 | // 3 | // 使用了 Cobra / Inject / Bean 注解的包内会生成一个 `func Injects(container *sdk.Container) error` 函数 4 | // 在此文件下:由于会扫描下划线的包并调用Injects,所以没有使用ioc注解的包请不要使用下划线别名 5 | // 6 | 7 | package wire 8 | 9 | import ( 10 | "github.com/iocgo/sdk" 11 | 12 | _ "github.com/iocgo/sdk/cobra/scan" /* cobra 自动装配 */ 13 | _ "github.com/iocgo/sdk/scan" /* 内置环境变量 */ 14 | 15 | // 类似一些启动器装载 16 | _ "chatgpt-adapter/core/scan" /* core自动装配 */ 17 | _ "chatgpt-adapter/relay/scan" /* 适配器自动装配 */ 18 | ) 19 | 20 | // @Gen() 21 | func Injects(*sdk.Container) error { 22 | panic("auto implements") 23 | } 24 | --------------------------------------------------------------------------------