├── typings-custom └── replicate-js.d.ts ├── jest.config.js ├── src ├── util │ └── auth.ts ├── tracing.ts ├── replicate.js ├── index.ts ├── queue.ts └── prediction.ts ├── package.json ├── wrangler.toml ├── .gitignore ├── README.md └── tsconfig.json /typings-custom/replicate-js.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'replicate-js' -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | testEnvironment: 'miniflare', 3 | testMatch: ['**/test/**/*.+(ts|tsx|js)', '**/src/**/(*.)+(spec|test).+(ts|tsx|js)'], 4 | transform: { 5 | '^.+\\.(ts|tsx)$': 'esbuild-jest', 6 | }, 7 | } -------------------------------------------------------------------------------- /src/util/auth.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface User { 3 | allow: boolean, 4 | replicate: string, 5 | runpod: string, 6 | worker: string 7 | } 8 | 9 | export default { 10 | async auth(request: Request, tokens: KVNamespace): Promise { 11 | try { 12 | let token; 13 | let authorization = request.headers.get("Authorization"); 14 | if (authorization && authorization.startsWith("Token ")) { 15 | token = authorization.substring(6); 16 | } else { 17 | token = request.headers.get("X-Cogflare-Token"); 18 | } 19 | if (token) { 20 | let auth = await tokens.get(token); 21 | if (auth) { 22 | return JSON.parse(auth); 23 | } 24 | } 25 | } 26 | catch (ex) { console.log("error parsing auth: " + ex) } 27 | return null; 28 | } 29 | } 30 | 31 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cogflare", 3 | "version": "0.0.0", 4 | "devDependencies": { 5 | "@cloudflare/workers-types": "^3.14.0", 6 | "@types/jest": "^28.1.4", 7 | "@types/uuid": "^8.3.4", 8 | "esbuild": "^0.14.48", 9 | "esbuild-jest": "^0.5.0", 10 | "jest": "^28.1.2", 11 | "jest-environment-miniflare": "^2.5.1", 12 | "miniflare": "^2.5.1", 13 | "typescript": "^4.7.4", 14 | "wrangler": "2.0.15" 15 | }, 16 | "private": true, 17 | "type": "module", 18 | "scripts": { 19 | "test": "jest --verbose", 20 | "start": "wrangler dev", 21 | "deploy": "wrangler publish" 22 | }, 23 | "dependencies": { 24 | "@opentelemetry/api": "^1.1.0", 25 | "@opentelemetry/instrumentation": "^0.30.0", 26 | "@opentelemetry/resources": "^1.4.0", 27 | "@opentelemetry/sdk-trace-web": "^1.4.0", 28 | "@opentelemetry/semantic-conventions": "^1.4.0", 29 | "denque": "^2.0.1", 30 | "realm-web": "^1.7.1", 31 | "uuid": "^8.3.2" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/tracing.ts: -------------------------------------------------------------------------------- 1 | const opentelemetry = require("@opentelemetry/api"); 2 | import { Resource } from "@opentelemetry/resources"; 3 | import { SemanticResourceAttributes } from "@opentelemetry/semantic-conventions"; 4 | import { WebTracerProvider } from "@opentelemetry/sdk-trace-web" 5 | import { registerInstrumentations } from "@opentelemetry/instrumentation" 6 | import { ConsoleSpanExporter, BatchSpanProcessor } from "@opentelemetry/sdk-trace-base" 7 | 8 | registerInstrumentations({ 9 | instrumentations: [], 10 | }); 11 | 12 | const resource = 13 | Resource.default().merge( 14 | new Resource({ 15 | [SemanticResourceAttributes.SERVICE_NAME]: "cogflare-queue", 16 | [SemanticResourceAttributes.SERVICE_VERSION]: "0.1.0", 17 | }) 18 | ); 19 | const provider = new WebTracerProvider({ 20 | resource: resource 21 | }); 22 | const exporter = new ConsoleSpanExporter(); 23 | const processor = new BatchSpanProcessor(exporter); 24 | provider.addSpanProcessor(processor); 25 | provider.register(); 26 | 27 | 28 | -------------------------------------------------------------------------------- /wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "cogflare-dev" 2 | main = "src/index.ts" 3 | compatibility_date = "2022-07-04" 4 | account_id = "0c52d0e521e103c3f330970a53c76fd6" 5 | workers_dev = true 6 | node_compat = false 7 | usage_model = "unbound" 8 | r2_buckets = [ { binding = "COG_OUTPUTS", bucket_name="cog-outputs", preview_bucket_name="cog-outputs-dev" } ] 9 | kv_namespaces = [ 10 | { binding = "PREDICTIONS_KV", id="9cba400d365c40bda3967624cb6a81fc", preview_id="90ef13f96c9448b2af70a2c912b80035" }, 11 | { binding = "TOKENS_KV", id="a706ad4144f44b52b96929d26797c3b2", preview_id="b88a209b079448f08097787b5b4ef67c" } 12 | ] 13 | 14 | [dev] 15 | ip = "localhost" 16 | port = 8422 17 | local_protocol = "http" 18 | 19 | [durable_objects] 20 | bindings = [ 21 | { name = "QUEUE", class_name = "Queue" }, 22 | { name = "PREDICTION", class_name = "Prediction" } 23 | ] 24 | 25 | 26 | [vars] 27 | COGFLARE_URL = "http://localhost:8422/v1" 28 | ENVIRONMENT = "dev" 29 | REALM_APP_ID = "cogflare-jzpry" 30 | 31 | [env.production] 32 | name = "cogflare" 33 | vars = { COGFLARE_URL = "https://cog.nmb.ai/v1", ENVIRONMENT="production", REALM_APP_ID = "cogflare-jzpry" } 34 | durable_objects = { bindings = [ { name = "QUEUE", class_name = "Queue" }, { name = "PREDICTION", class_name = "Prediction" } ]} 35 | r2_buckets = [ { binding = "COG_OUTPUTS", bucket_name="cog-outputs", preview_bucket_name="cog-outputs-dev" } ] 36 | kv_namespaces = [ 37 | { binding = "PREDICTIONS_KV", id="9cba400d365c40bda3967624cb6a81fc", preview_id="90ef13f96c9448b2af70a2c912b80035" }, 38 | { binding = "TOKENS_KV", id="a706ad4144f44b52b96929d26797c3b2", preview_id="b88a209b079448f08097787b5b4ef67c" } 39 | ] 40 | 41 | [[migrations]] 42 | tag = "v1" 43 | new_classes = ["Cogflare"] 44 | 45 | [[migrations]] 46 | tag = "v2" 47 | new_classes = ["Queue"] 48 | 49 | [[migrations]] 50 | tag = "v3" 51 | new_classes = ["Prediction"] 52 | deleted_classes = ["Cogflare"] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | 3 | logs 4 | _.log 5 | npm-debug.log_ 6 | yarn-debug.log* 7 | yarn-error.log* 8 | lerna-debug.log* 9 | .pnpm-debug.log* 10 | 11 | # Diagnostic reports (https://nodejs.org/api/report.html) 12 | 13 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 14 | 15 | # Runtime data 16 | 17 | pids 18 | _.pid 19 | _.seed 20 | \*.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | 28 | coverage 29 | \*.lcov 30 | 31 | # nyc test coverage 32 | 33 | .nyc_output 34 | 35 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 36 | 37 | .grunt 38 | 39 | # Bower dependency directory (https://bower.io/) 40 | 41 | bower_components 42 | 43 | # node-waf configuration 44 | 45 | .lock-wscript 46 | 47 | # Compiled binary addons (https://nodejs.org/api/addons.html) 48 | 49 | build/Release 50 | 51 | # Dependency directories 52 | 53 | node_modules/ 54 | jspm_packages/ 55 | 56 | # Snowpack dependency directory (https://snowpack.dev/) 57 | 58 | web_modules/ 59 | 60 | # TypeScript cache 61 | 62 | \*.tsbuildinfo 63 | 64 | # Optional npm cache directory 65 | 66 | .npm 67 | 68 | # Optional eslint cache 69 | 70 | .eslintcache 71 | 72 | # Optional stylelint cache 73 | 74 | .stylelintcache 75 | 76 | # Microbundle cache 77 | 78 | .rpt2_cache/ 79 | .rts2_cache_cjs/ 80 | .rts2_cache_es/ 81 | .rts2_cache_umd/ 82 | 83 | # Optional REPL history 84 | 85 | .node_repl_history 86 | 87 | # Output of 'npm pack' 88 | 89 | \*.tgz 90 | 91 | # Yarn Integrity file 92 | 93 | .yarn-integrity 94 | 95 | # dotenv environment variable files 96 | 97 | .env 98 | .env.development.local 99 | .env.test.local 100 | .env.production.local 101 | .env.local 102 | 103 | # parcel-bundler cache (https://parceljs.org/) 104 | 105 | .cache 106 | .parcel-cache 107 | 108 | # Next.js build output 109 | 110 | .next 111 | out 112 | 113 | # Nuxt.js build / generate output 114 | 115 | .nuxt 116 | dist 117 | 118 | # Gatsby files 119 | 120 | .cache/ 121 | 122 | # Comment in the public line in if your project uses Gatsby and not Next.js 123 | 124 | # https://nextjs.org/blog/next-9-1#public-directory-support 125 | 126 | # public 127 | 128 | # vuepress build output 129 | 130 | .vuepress/dist 131 | 132 | # vuepress v2.x temp and cache directory 133 | 134 | .temp 135 | .cache 136 | 137 | # Docusaurus cache and generated files 138 | 139 | .docusaurus 140 | 141 | # Serverless directories 142 | 143 | .serverless/ 144 | 145 | # FuseBox cache 146 | 147 | .fusebox/ 148 | 149 | # DynamoDB Local files 150 | 151 | .dynamodb/ 152 | 153 | # TernJS port file 154 | 155 | .tern-port 156 | 157 | # Stores VSCode versions used for testing VSCode extensions 158 | 159 | .vscode-test 160 | 161 | # yarn v2 162 | 163 | .yarn/cache 164 | .yarn/unplugged 165 | .yarn/build-state.yml 166 | .yarn/install-state.gz 167 | .pnp.\* 168 | 169 | # wrangler project 170 | 171 | .dev.vars 172 | -------------------------------------------------------------------------------- /src/replicate.js: -------------------------------------------------------------------------------- 1 | const BASE_URL = "https://api.replicate.com/v1" 2 | 3 | const POLLING_INTERVAL = 5000 4 | 5 | const sleep = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms)) 6 | class Model { 7 | constructor(options) { 8 | if (!options.path) 9 | throw 'Missing Replicate model path' 10 | this.path = options.path; 11 | this.token = options.token; 12 | this.version = options.version; 13 | this.headers = { 'Authorization': `Token ${this.token}`, 'Content-Type': 'application/json', 'Accept': 'application/json' }; 14 | } 15 | 16 | async getModelDetails() { 17 | let response = await fetch(`${BASE_URL}/models/${this.path}/versions`, { headers: this.headers }); 18 | let data = await response.json(); 19 | let modelVersions = data.results; 20 | let mostRecentVersion = modelVersions[0]; 21 | let explicitlySelectedVersion = modelVersions.find((m) => m.id == this.version); 22 | this.modelDetails = explicitlySelectedVersion ? explicitlySelectedVersion : mostRecentVersion; 23 | } 24 | 25 | async *predictor(input) { 26 | let startResponse = await this.startPrediction(input); 27 | let predictionStatus; 28 | let startJson = await startResponse.json(); 29 | do { 30 | let predictionId = startJson.id; 31 | let data = await this.getPrediction(predictionId); 32 | predictionStatus = data.status; 33 | let latestPrediction = data.output; 34 | //let latestPrediction = { output: data.output, logs: data.logs, status: data.status, error: data.error }; 35 | await sleep(POLLING_INTERVAL); 36 | yield latestPrediction; 37 | 38 | } while (['starting', 'processing'].includes(predictionStatus)) 39 | } 40 | 41 | async startPrediction(input) { 42 | if (!this.modelDetails) 43 | await this.getModelDetails() 44 | let startRequest = { "version": this.modelDetails.id, "input": input } 45 | let startResponse = await fetch(`${BASE_URL}/predictions`, { method: 'POST', headers: this.headers, body: JSON.stringify(startRequest) }); 46 | return await startResponse.json(); 47 | } 48 | 49 | async getPrediction(predictionId) { 50 | let checkResponse = await fetch(`${BASE_URL}/predictions/${predictionId}`, { headers: this.headers }); 51 | return await checkResponse.json(); 52 | } 53 | 54 | async predict(input) { 55 | let predictor = this.predictor(input); 56 | let prediction; 57 | for await (prediction of predictor) { 58 | // console.log(prediction); 59 | } 60 | return prediction; 61 | } 62 | } 63 | 64 | class Replicate { 65 | constructor(options) { 66 | options = options ?? {}; 67 | this.options = options; 68 | if (options.token) 69 | this.token = options.token 70 | if (!this.token) 71 | throw 'Missing Replicate token' 72 | this.models = { get: this.getModel.bind(this) } 73 | } 74 | 75 | async getModel(path, version) { 76 | let model = new Model({ path: path, version: version, token: this.token }); 77 | await model.getModelDetails(); 78 | return model; 79 | } 80 | } 81 | 82 | export default Replicate 83 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cogflare 2 | *(Working title)* 3 | 4 | **Cogflare** is a [Cloudflare Workers](https://workers.cloudflare.com/) application that aims to simplify running distributed ML inference jobs through a central API. The jobs can (currently) be run using [Replicate](https://www.replicate.com/) or anywhere that Docker and a GPU is available, such as [RunPod](https://runpod.io) or your own hardware. 5 | 6 | ## How does it work? 7 | Cogflare provides an HTTP API that is similar in surface area to [Replicate's API](https://replicate.com/api), but enables additional flexibility for running predictions. It provides a websocket server that workers connect to, and a lightweight queue with state managed by [Durable Objects](https://www.cloudflare.com/cloudflare-workers-durable-objects-beta) for running jobs on those workers. [R2](https://www.cloudflare.com/products/r2/) is leveraged to provide storage for results with high speed and no bandwidth costs, and the workers KV store is used as a record of data. The entire setup should cost $5 + $0.015/GB of storage + GPU costs, at the scale tested. 8 | 9 | ## Why does this exist? 10 | I've been through a few iterations of the backend for [NightmareBot](https://github.com/NightmareAI/NightmareBot). I found Replicate a great solution overall but needed the flexibility to run wherever I wanted. I've found a number of people trying to solve the same problems I have so I created this project in the hopes of keeping others from having to reinvent the wheel so many times. 11 | 12 | ## What's it useful for? 13 | You tell me! Bots are the big use case so far, but anywhere you need API access to ML inference with flexibility could be a fit. 14 | 15 | ## How do I use it? 16 | Contact [palp@nmb.ai](mailto:palp@nmb.ai) if you're interested in using my hosted version - I'm not currently charging for it but I can only let people use it who are willing to act as testers. Currently there's a hard dependency on the Replicate API using your own key, so you'll first need to sign up for API access there and get a key. 17 | 18 | ## No, not how do I use yours, how do I use it? 19 | *This is a preliminary guide and needs a lot of work, operation is subject to drastic changes and it's not really recommended to do this yourself yet* 20 | 21 | If you'd like to host it yourself, it should be as simple as updating the `wrangler.toml` file with your own account and resources (which you'll need to create) and deploying - no special sauce. The setup of Cloudflare workers is beyond the scope of this document for now, but I hope to add some basic instructions soon. 22 | 23 | You'll need an entry in the TOKENS_KV, with the key acting as an authorization token and the contents being a JSON structure like this: 24 | ``` 25 | {"allow": true, "replicate": "YOUR_REPLICATE_TOKEN", "worker": "some-random-string" } 26 | ``` 27 | You can now make prediction requests by POSTing to the `/predictions` endpoint and retrieve their status with `GET /predictions/id`. 28 | 29 | Jobs will run on Replicate unless there are workers available. There is simple overflow logic place right now as well that sends jobs to Replicate if queue depth is greater than available workers, but this is temporary and subject to change. 30 | 31 | Currently workers have to run Docker images built using a [fork of Replicate's Cog](https://github.com/NightmareAI/cog), however the public image `r8.im/nightmareai/disco-diffusion` has been built using this so I'll use it as an example. To start a worker for Disco Diffusion, a typical command would look like: 32 | ``` 33 | docker run --rm --gpus=all r8.im/nightmareai/disco-diffusion python -m cog.server.websockets wss://[WORKER-URL]/v1/models/nightmareai/disco-diffusion/websockets/[WORKER-TOKEN] https://[WORKER-URL]/v1/models/nightmareai/disco-diffusion/files nightmareai/disco-diffusion 34 | ``` 35 | This attaches the worker to your queue for this model, and it will continue to reconnect and run jobs until killed. 36 | 37 | ## This all sounds really complicated, I just want to run stuff and I don't want to talk to you! 38 | Stay tuned! Easy sign up and setup are on the list! 39 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { v4 as uuidv4 } from 'uuid'; 2 | 3 | export { Queue } from './queue' 4 | export { Prediction } from './prediction' 5 | import auth, { User } from './util/auth'; 6 | 7 | export interface Env { 8 | QUEUE: DurableObjectNamespace 9 | PREDICTION: DurableObjectNamespace 10 | COGFLARE_URL: string 11 | COG_OUTPUTS: R2Bucket 12 | PREDICTIONS_KV: KVNamespace 13 | TOKENS_KV: KVNamespace 14 | } 15 | 16 | export default { 17 | async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { 18 | let url = new URL(request.url); 19 | let path = url.pathname.slice(1).split('/'); 20 | let authenticated = false; 21 | let user = await auth.auth(request, env.TOKENS_KV); 22 | authenticated = user?.allow ?? false; 23 | if (!path[0]) 24 | return new Response(); 25 | 26 | if (path[0] == "outputs") { 27 | const key = path.slice(1).join('/'); 28 | const value = await env.COG_OUTPUTS.get(key); 29 | if (value === null) { 30 | return new Response("Not found", { status: 404 }); 31 | } 32 | return new Response(value.body); 33 | } 34 | 35 | if (path[0] != 'v1') 36 | return new Response("Not found", { status: 404 }); 37 | 38 | switch (path[1]) { 39 | case 'models': { 40 | if (!path[2] || !path[3]) { 41 | // TODO: Get model list 42 | return new Response("Not found", { status: 404 }); 43 | } 44 | 45 | const model = path[2] + "/" + path[3]; 46 | if (path[4] == "websocket") { 47 | let id = env.QUEUE.idFromName(path[5] + "/" + model); 48 | let stub = env.QUEUE.get(id); 49 | let newUrl = new URL(request.url); 50 | newUrl.pathname = "/" + path.slice(4).join("/"); 51 | return stub.fetch(newUrl.toString(), request); 52 | } 53 | switch (request.method) { 54 | case 'GET': { 55 | if (!path[4] || !path[5]) { 56 | return new Response("Not found", { status: 404 }); 57 | } 58 | const key = path.slice(1).join('/'); 59 | console.log(key); 60 | const value = await env.COG_OUTPUTS.get(key); 61 | if (value === null) { 62 | return new Response("Not found", { status: 404 }); 63 | } 64 | return new Response(value.body); 65 | } 66 | case 'PUT': 67 | case 'POST': 68 | { 69 | // TODO: Authentication 70 | let id = uuidv4(); 71 | const formData = await request.formData(); 72 | const file = formData.get('file') as File; 73 | const key = `models/${model}/files/${id}/${file.name}`; 74 | await env.COG_OUTPUTS.put(key, file.stream()); 75 | return new Response(JSON.stringify({ url: `${env.COGFLARE_URL}/${key}` })); 76 | } 77 | } 78 | } 79 | case 'predictions': { 80 | let id: DurableObjectId | null = null; 81 | switch (request.method) { 82 | case 'GET': { 83 | if (!path[2]) { 84 | // TODO - List predictions 85 | return new Response("Not implemented", { status: 400 }); 86 | } 87 | // Check KV for completed job 88 | let value = await env.PREDICTIONS_KV.get(path[2]) 89 | if (value) 90 | return new Response(value); 91 | 92 | id = env.PREDICTION.idFromString(path[2]); 93 | break; 94 | } 95 | case 'POST': { 96 | if (!authenticated) 97 | return new Response("Not authorized", { status: 401 }); 98 | 99 | if (!path[2]) { 100 | id = env.PREDICTION.newUniqueId(); 101 | } else { 102 | id = env.PREDICTION.idFromString(path[2]); 103 | } 104 | break; 105 | } 106 | default: 107 | return new Response("Method not supported", { status: 400 }); 108 | } 109 | if (!id) 110 | return new Response("Not found", { status: 400 }); 111 | let stub = env.PREDICTION.get(id); 112 | if (!stub) 113 | return new Response("Not found", { status: 400 }); 114 | 115 | let newUrl = new URL(request.url); 116 | newUrl.pathname = "/" + path.slice(2).join("/"); 117 | let response = await stub.fetch(newUrl.toString(), request); 118 | return response; 119 | } 120 | default: 121 | return new Response("Not found", { status: 404 }); 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/queue.ts: -------------------------------------------------------------------------------- 1 | import * as Realm from 'realm-web'; 2 | import { PredictionResult } from "./prediction" 3 | import Denque from "denque" 4 | 5 | type Session = { 6 | id: string, 7 | connected: Date, 8 | quit?: boolean, 9 | request?: PredictionResult 10 | } 11 | 12 | export class Queue { 13 | state: DurableObjectState 14 | storage: DurableObjectStorage 15 | queuedItems: Denque 16 | sessions: Session[] 17 | sockets: Map 18 | baseUrl: string 19 | predictions: KVNamespace 20 | app: Realm.App 21 | user?: Realm.User 22 | constructor(state: DurableObjectState, env: Env) { 23 | this.state = state; 24 | this.storage = state.storage; 25 | this.sessions = [] 26 | this.sockets = new Map() 27 | this.baseUrl = env.COGFLARE_URL; 28 | this.predictions = env.PREDICTIONS_KV; 29 | this.queuedItems = new Denque() 30 | this.app = new Realm.App(env.REALM_APP_ID); 31 | this.state.blockConcurrencyWhile(async () => { 32 | this.user = await this.app.logIn(Realm.Credentials.apiKey(env.REALM_API_KEY)); 33 | try { 34 | let queue = await this.state.storage.get("queue"); 35 | if (queue) 36 | this.queuedItems = new Denque(queue); 37 | } catch { } 38 | try { 39 | this.sessions = await this.state.storage.get("sessions") || []; 40 | for (var session of this.sessions) { 41 | // no sessions are connected on startup, they will have to reconnect. 42 | // TODO: Cleanup timed out sessions 43 | session.quit = true; 44 | } 45 | // Filter out any old sessions without jobs 46 | this.sessions = this.sessions.filter(s => s.request != null); 47 | } catch { } 48 | }); 49 | } 50 | 51 | async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { 52 | let url = new URL(request.url); 53 | let path = url.pathname.slice(1).split('/'); 54 | if (path[0] == 'v1') 55 | path = path.slice(1); 56 | switch (path[0]) { 57 | case "websocket": { 58 | if (request.headers.get("Upgrade") != "websocket") { 59 | return new Response("expected websocket", { status: 400 }); 60 | } 61 | let ip = request.headers.get("CF-Connecting-IP"); 62 | let session_id = url.searchParams.get("session_id"); 63 | if (!session_id) { 64 | return new Response("session_id parameter required", { status: 400 }); 65 | } 66 | let pair = new WebSocketPair(); 67 | await this.handleSession(pair[1], ip, session_id); 68 | if (!this.queuedItems.isEmpty()) 69 | await this.storage.setAlarm(Date.now() + 1000); 70 | return new Response(null, { status: 101, webSocket: pair[0] }); 71 | } 72 | case "predictions": { 73 | if (path[1]) { 74 | let kvItem = await this.predictions.get(path[1]); 75 | if (kvItem) { 76 | await this.storage.delete(path[1]); 77 | return new Response(kvItem); 78 | } 79 | let item = await this.storage.get(path[1]); 80 | if (!item) 81 | return new Response("Not found", { status: 404 }); 82 | else 83 | return new Response(JSON.stringify(item)); 84 | } 85 | let req = await request.json(); 86 | req.urls = { get: this.baseUrl + "/predictions/" + req.id, cancel: this.baseUrl + "/predictions/" + req.id + "/cancel" }; 87 | req.cogflare = true; 88 | req.created_at = new Date(); 89 | console.log("adding to queue"); 90 | req.status = "starting"; 91 | this.queuedItems.push(req); 92 | await this.storage.put(req.id, req); 93 | await this.storage.put("queue", this.queuedItems.toArray()); 94 | if (!(await this.storage.getAlarm())) 95 | await this.storage.setAlarm(Date.now() + 1000); 96 | return new Response(JSON.stringify(req)); 97 | } 98 | case "status": { 99 | return new Response(JSON.stringify({ available: this.sessions.filter(member => !member.quit && !member.request).length, total: this.sessions.length, queued: this.queuedItems.length })); 100 | } 101 | default: 102 | return new Response("Queue: Not found", { status: 404 }); 103 | } 104 | } 105 | 106 | async alarm() { 107 | let availableSessions: Session[] = this.sessions.filter(member => !member.quit && !member.request); 108 | for (let session of availableSessions) { 109 | try { 110 | let socket = this.sockets.get(session.id); 111 | if (socket) { 112 | let req = this.queuedItems.shift(); 113 | if (req) { 114 | console.log(`request ${req.id} dequeued`); 115 | socket.send(JSON.stringify(req)); 116 | await this.storage.put("queue", JSON.stringify(this.queuedItems.toArray())); 117 | } 118 | } 119 | } catch { 120 | 121 | } 122 | } 123 | } 124 | 125 | async handleSession(webSocket: WebSocket, ip: string | null, session_id: string) { 126 | webSocket.accept(); 127 | let session: Session = this.sessions.find(s => s.id == session_id) as Session; 128 | if (!session) { 129 | session = { id: session_id, connected: new Date() }; 130 | this.sessions.push(session); 131 | } 132 | else { 133 | // TODO: auth or some kind of duplicate protection 134 | let oldSocket = this.sockets.get(session_id); 135 | if (oldSocket) { 136 | oldSocket.close() 137 | } 138 | session.connected = new Date() 139 | session.quit = false 140 | } 141 | 142 | this.sockets.set(session_id, webSocket); 143 | await this.storage.put("sessions", this.sessions); 144 | webSocket.addEventListener("message", async msg => { 145 | 146 | if (session.quit) { 147 | webSocket.close(1011, "Websocket broken."); 148 | return; 149 | } 150 | 151 | let data = JSON.parse(msg.data.toString()); 152 | let req: PredictionResult = await this.storage.get(data.id) || data; 153 | try { 154 | session.request = req; 155 | session.request.output = data.output; 156 | session.request.logs = data.logs; 157 | session.request.error = data.error; 158 | session.request.completed_at = data.completed_at; 159 | session.request.metrics = data.metrics; 160 | session.request.status = data.status; 161 | if (["succeeded", "cancelled", "failed"].includes(data.status)) { 162 | await this.predictions.put(session.request.id, JSON.stringify(session.request)); 163 | session.request = undefined; 164 | if (!this.queuedItems.isEmpty() && !(await this.storage.getAlarm())) 165 | await this.storage.setAlarm(Date.now() + 1000); 166 | } 167 | } catch (err: any) { 168 | console.log("Failed to handle response:\n" + JSON.stringify({ error: err.stack })); 169 | } finally { 170 | await this.storage.put(req.id, req); 171 | } 172 | }); 173 | 174 | let closeOrErrorHandler = async () => { 175 | session.quit = true; 176 | console.log("Session closed/error"); 177 | this.sockets.delete(session_id); 178 | this.sessions = this.sessions.filter(member => member !== session); 179 | await this.storage.put("sessions", this.sessions); 180 | }; 181 | 182 | webSocket.addEventListener("close", closeOrErrorHandler); 183 | webSocket.addEventListener("error", closeOrErrorHandler); 184 | } 185 | } 186 | 187 | interface Env { 188 | COG_OUTPUTS: R2Bucket 189 | COGFLARE_URL: string 190 | PREDICTIONS_KV: KVNamespace 191 | REALM_APP_ID: string 192 | REALM_API_KEY: string 193 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, 15 | "lib": [ 16 | "es2021" 17 | ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */, 18 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 19 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 20 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 21 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 22 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 23 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 24 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 25 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 26 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 27 | 28 | /* Modules */ 29 | "module": "es2022" /* Specify what module code is generated. */, 30 | // "rootDir": "./", /* Specify the root folder within your source files. */ 31 | "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, 32 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 33 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 34 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 35 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 36 | "types": [ 37 | "@cloudflare/workers-types" 38 | ] /* Specify type package names to be included without being referenced in a source file. */, 39 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 40 | "resolveJsonModule": true /* Enable importing .json files */, 41 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 42 | 43 | /* JavaScript Support */ 44 | "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, 45 | "checkJs": true /* Enable error reporting in type-checked JavaScript files. */, 46 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 47 | 48 | /* Emit */ 49 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 50 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 51 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 52 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 53 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 54 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 55 | // "removeComments": true, /* Disable emitting comments. */ 56 | "noEmit": true /* Disable emitting files from a compilation. */, 57 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 58 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 59 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 60 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 61 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 62 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 63 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 64 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 65 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 66 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 67 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 68 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 69 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 70 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 71 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 72 | 73 | /* Interop Constraints */ 74 | "isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */, 75 | "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, 76 | // "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, 77 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 78 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, 79 | 80 | /* Type Checking */ 81 | "strict": true /* Enable all strict type-checking options. */, 82 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 83 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 84 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 85 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 86 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 87 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 88 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 89 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 90 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ 91 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 92 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 93 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 94 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 95 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 96 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 97 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 98 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 99 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 100 | 101 | /* Completeness */ 102 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 103 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/prediction.ts: -------------------------------------------------------------------------------- 1 | import * as Realm from 'realm-web'; 2 | import Replicate from "./replicate"; 3 | import { Queue } from "./queue"; 4 | 5 | import { v4 as uuidv4 } from 'uuid'; 6 | const POLLING_INTERVAL = 5000; 7 | import auth, { User } from './util/auth'; 8 | 9 | // Represents a single prediction 10 | export class Prediction { 11 | state: DurableObjectState 12 | storage: DurableObjectStorage 13 | kv: KVNamespace 14 | tokens: KVNamespace 15 | result?: PredictionResult 16 | baseUrl: string 17 | model?: any 18 | outputBucket: R2Bucket 19 | queueNamespace: DurableObjectNamespace 20 | app: Realm.App 21 | user?: User | null 22 | constructor(state: DurableObjectState, env: Env) { 23 | this.state = state 24 | this.storage = state.storage 25 | this.kv = env.PREDICTIONS_KV 26 | this.tokens = env.TOKENS_KV 27 | this.baseUrl = env.COGFLARE_URL 28 | this.outputBucket = env.COG_OUTPUTS 29 | this.queueNamespace = env.QUEUE 30 | this.app = new Realm.App(env.REALM_APP_ID) 31 | } 32 | 33 | async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { 34 | let url = new URL(request.url); 35 | let path = url.pathname.slice(1).split('/'); 36 | this.user = await auth.auth(request, this.tokens); 37 | if (this.user) { 38 | this.storage.put("user", this.user); 39 | } 40 | switch (request.method) { 41 | case 'GET': { 42 | if (path[0] && path[0] == this.state.id.toString() && this.result) 43 | return new Response(JSON.stringify(this.result)); 44 | return new Response("Not found", { status: 404 }); 45 | } 46 | case 'POST': { 47 | if (!this.user || !this.user.allow) 48 | return new Response("Not authorized", { status: 401 }); 49 | if (!this.user.replicate) 50 | return new Response("Replicate token not configured", { status: 400 }); 51 | let replicate = new Replicate({ token: this.user.replicate }); 52 | if (!path[0]) { 53 | let req = await request.json(); 54 | let modelName = req.model; 55 | let version = req.version; 56 | let model = null; 57 | if (!req) 58 | return new Response("Request body missing or invalid", { status: 400 }); 59 | if (!req["version"] && !req["model"]) 60 | return new Response("Model and/or version must be specified", { status: 400 }); 61 | if (!version) { 62 | model = await replicate.models.get(modelName); 63 | await model.getModelDetails(); 64 | version = model.modelDetails.id; 65 | } else { 66 | if (!modelName) { 67 | // TODO: Cache versions? Pull them all from the site somehow? 68 | return new Response("Version requests not implemented, please supply model name", { status: 400 }); 69 | } 70 | model = await replicate.models.get(modelName, version); 71 | await model.getModelDetails(); 72 | } 73 | 74 | if (!model) 75 | return new Response("Not found", { status: 404 }); 76 | 77 | const id = this.state.id.toString(); 78 | 79 | this.result = { 80 | id: id, 81 | urls: { 82 | get: this.baseUrl + "/predictions/" + id, 83 | cancel: this.baseUrl + "/predictions/" + id + "/cancel" 84 | }, 85 | source: "cogflare", 86 | created_at: new Date() 87 | } 88 | 89 | this.model = model; 90 | this.result.version = version; 91 | this.result.model = modelName; 92 | this.result.status = "creating"; 93 | this.result.input = req.input; 94 | 95 | await this.storage.put("result", this.result); 96 | 97 | if (req["callbackUrl"]) { 98 | let callbackUrl = req["callbackUrl"]; 99 | await this.storage.put("callbackUrl", callbackUrl); 100 | try { 101 | let callbackResult = await fetch(callbackUrl, { method: "POST", body: JSON.stringify(this.result), headers: { "content-type": "application/json" } }); 102 | if (callbackResult.status != 200) { 103 | console.log(`callback invoke ${callbackUrl} failed with ${callbackResult.status} ${callbackResult.statusText}, data follows`); 104 | console.log(JSON.stringify(this.result)); 105 | } 106 | } 107 | catch (ex) { 108 | console.log("callback url error" + ex); 109 | } 110 | } 111 | 112 | this.storage.setAlarm(Date.now() + 100); 113 | 114 | return new Response(JSON.stringify(this.result)); 115 | } 116 | } 117 | } 118 | 119 | return new Response("Not found", { status: 404 }); 120 | } 121 | 122 | async rehost(imageUrl: string, model: string): Promise { 123 | let url = new URL(imageUrl); 124 | if (!url) 125 | return imageUrl; 126 | if (url.host == new URL(this.baseUrl).host) 127 | return imageUrl; 128 | let headers = {}; 129 | let path = url.pathname.slice(1).split('/'); 130 | let id = uuidv4(); 131 | let key = `models/${model}/files/${id}/${path.slice(-1)}`; 132 | if (url.host.includes("replicate.com")) 133 | headers = { 'Authorization': `Token ${this.user?.replicate}` }; 134 | let imageResult = await fetch(imageUrl, { headers: headers }); 135 | if (imageResult.status != 200) { 136 | console.log(`failed to get ${url}: ${imageResult.status} ${imageResult.statusText}`); 137 | return imageUrl; 138 | } 139 | await this.outputBucket.put(key, imageResult.body); 140 | const newUrl = this.baseUrl + "/" + key; 141 | console.log(`${imageUrl} -> ${newUrl}`); 142 | return newUrl; 143 | } 144 | 145 | async alarm() { 146 | this.result = await this.storage.get("result"); 147 | this.user = await this.storage.get("user"); 148 | const callbackUrl = await this.storage.get("callbackUrl") as string; 149 | if (!this.result || !this.result.model || !this.result.version || !this.result.input) { 150 | console.log("alarm fired with no data for prediction " + this.state.id); 151 | return; 152 | } 153 | if (this.result.completed_at) { 154 | console.log("alarm fired on completed job" + this.state.id); 155 | return; 156 | } 157 | 158 | let replicate = new Replicate({ token: this.user?.replicate }); 159 | if (!this.model) 160 | this.model = await replicate.getModel(this.result.model, this.result.version); 161 | 162 | if (!this.result.replicateId && !this.result.cogflare) { 163 | console.log("Starting prediction " + this.state.id); 164 | let queueId = this.queueNamespace.idFromName(this.user?.worker + "/" + this.result.model); 165 | let queueStub = this.queueNamespace.get(queueId); 166 | let status = await (await queueStub.fetch(`${this.baseUrl}/status`, {})).json(); 167 | console.log(status); 168 | let startResult; 169 | if (status.total > 0 && status.queued <= status.total) { 170 | this.result.cogflare = true; 171 | this.result.runner = "runpod"; 172 | const startResponse = await queueStub.fetch(`${this.baseUrl}/predictions`, { body: JSON.stringify(this.result), method: "POST", headers: { "content-type": "application/json" } }); 173 | try { 174 | startResult = startResponse.json(); 175 | } catch (ex) { 176 | console.log("Exception parsing start response: " + ex); 177 | return; 178 | } 179 | } 180 | else { 181 | this.result.runner = "replicate"; 182 | startResult = await this.model.startPrediction(this.result.input); 183 | this.result.replicateId = startResult.id; 184 | } 185 | this.result.status = startResult.status; 186 | await this.storage.put("result", this.result); 187 | this.storage.setAlarm(Date.now() + POLLING_INTERVAL); 188 | return; 189 | } 190 | let result; 191 | if (this.result.replicateId) { 192 | result = await this.model.getPrediction(this.result.replicateId); 193 | } else { 194 | let queueId = this.queueNamespace.idFromName(this.user?.worker + "/" + this.result.model); 195 | let queueStub = this.queueNamespace.get(queueId); 196 | result = await (await queueStub.fetch(`${this.baseUrl}/predictions/${this.result.id}`)).json(); 197 | } 198 | console.log(result.status); 199 | let updated = false; 200 | try { 201 | if (this.result.output == null && result.output != null) { 202 | updated = true; 203 | if (typeof result.output == 'object') { 204 | this.result.output = {}; 205 | for (const prop in result.output) { 206 | let newUrl = await this.rehost(`${result.output[prop]}`, this.result.model); 207 | this.result.output[prop] = newUrl; 208 | } 209 | } else if (typeof result.output == 'string') { 210 | this.result.output = await this.rehost(result.output, this.result.model); 211 | } 212 | } else if (typeof this.result.output == 'object') { 213 | for (const prop in result.output) { 214 | if (!this.result.output[prop]) { 215 | updated = true; 216 | this.result.output[prop] = await this.rehost(result.output[prop], this.result.model) 217 | } 218 | } 219 | } 220 | 221 | if (updated || this.result.logs != result.logs || this.result.error != result.error || this.result.status != result.status || this.result.metrics != result.metrics) { 222 | this.result.logs = result.logs; 223 | this.result.error = result.error; 224 | this.result.status = result.status; 225 | this.result.metrics = result.metrics; 226 | this.result.completed_at = result.completed_at; 227 | try { 228 | if (callbackUrl) { 229 | let callbackResult = await fetch(callbackUrl, { method: "POST", body: JSON.stringify(this.result), headers: { "content-type": "application/json" } }); 230 | if (callbackResult.status != 200) { 231 | console.log(`callback invoke ${callbackUrl} failed with ${callbackResult.status} ${callbackResult.statusText}, data follows`); 232 | console.log(JSON.stringify(this.result)); 233 | } 234 | } 235 | } 236 | catch (ex) { 237 | console.log("callback url error" + ex); 238 | } 239 | } 240 | } catch (ex) { 241 | console.log("error updating result" + ex); 242 | } 243 | 244 | 245 | switch (this.result.status) { 246 | case "succeeded": 247 | case "failed": 248 | case "cancelled": 249 | await this.kv.put(this.state.id.toString(), JSON.stringify(this.result)); 250 | await this.storage.delete("result"); 251 | if (callbackUrl) 252 | await this.storage.delete("callbackUrl"); 253 | break; 254 | case "starting": 255 | case "processing": 256 | if (!this.result.created_at) 257 | this.result.created_at = new Date(); 258 | console.log(`${new Date(this.result.created_at).valueOf() + (30 * 60000)} ${new Date().valueOf()}`) 259 | if ((new Date(this.result.created_at).valueOf() + (30 * 60000)) > new Date().valueOf()) { 260 | await this.storage.put("result", this.result); 261 | this.storage.setAlarm(Date.now() + POLLING_INTERVAL); 262 | } else { 263 | console.log(`prediction timed out: ${this.state.id}`); 264 | // TODO: Cancel 265 | } 266 | break; 267 | default: 268 | console.log("unknown status: " + this.result.status) 269 | } 270 | } 271 | } 272 | 273 | export interface PredictionResult { 274 | id: string; 275 | runner?: string; 276 | replicateId?: string; 277 | cogflare?: boolean; 278 | version?: string; 279 | model?: string; 280 | urls?: { 281 | get?: string; 282 | cancel?: string; 283 | }, 284 | created_at?: Date, 285 | completed_at?: Date, 286 | source?: string, 287 | status?: string, 288 | input?: any, 289 | output?: any, 290 | error?: string, 291 | logs?: any, 292 | metrics?: any 293 | } 294 | 295 | interface Env { 296 | QUEUE: DurableObjectNamespace 297 | COG_OUTPUTS: R2Bucket 298 | COGFLARE_URL: string 299 | PREDICTIONS_KV: KVNamespace 300 | REALM_APP_ID: string 301 | TOKENS_KV: KVNamespace 302 | } --------------------------------------------------------------------------------