├── wrangler.toml ├── README.md ├── package.json ├── src └── index.js └── .gitignore /wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "langchain-workers" 2 | main = "src/index.js" 3 | compatibility_date = "2023-04-25" 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # langchainjs-workers 2 | 3 | [Setup instructions and guide](https://blog.cloudflare.com/langchain-and-cloudflare/) 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "langchain-workers", 3 | "version": "0.0.0", 4 | "devDependencies": { 5 | "wrangler": "2.15.0" 6 | }, 7 | "private": true, 8 | "scripts": { 9 | "start": "wrangler dev", 10 | "deploy": "wrangler publish" 11 | }, 12 | "dependencies": { 13 | "cheerio": "^1.0.0-rc.12", 14 | "chromadb": "^1.4.1", 15 | "langchain": "^0.0.63", 16 | "puppeteer": "^19.11.1" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Welcome to Cloudflare Workers! This is your first worker. 3 | * 4 | * - Run `npx wrangler dev src/index.js` in your terminal to start a development server 5 | * - Open a browser tab at http://localhost:8787/ to see your worker in action 6 | * - Run `npx wrangler publish src/index.js --name my-worker` to publish your worker 7 | * 8 | * Learn more at https://developers.cloudflare.com/workers/ 9 | */ 10 | 11 | import { CheerioWebBaseLoader } from "langchain/document_loaders/web/cheerio"; 12 | import { MemoryVectorStore } from "langchain/vectorstores/memory"; 13 | import { OpenAI } from "langchain/llms/openai"; 14 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 15 | import { RetrievalQAChain } from "langchain/chains"; 16 | 17 | export default { 18 | async fetch(request, env, ctx) { 19 | const loader = new CheerioWebBaseLoader( 20 | "https://en.wikipedia.org/wiki/Brooklyn" 21 | ); 22 | const docs = await loader.loadAndSplit(); 23 | console.log(docs); 24 | 25 | const store = await MemoryVectorStore.fromDocuments(docs, new OpenAIEmbeddings({ openAIApiKey: env.OPENAI_API_KEY})); 26 | 27 | const model = new OpenAI({ openAIApiKey: env.OPENAI_API_KEY }); 28 | const chain = RetrievalQAChain.fromLLM(model, store.asRetriever()); 29 | 30 | const { searchParams } = new URL(request.url); 31 | const question = searchParams.get('question') ?? "What is this article about? Can you give me 3 facts about it?"; 32 | 33 | const res = await chain.call({ 34 | query: question, 35 | }); 36 | console.log(res.text); 37 | 38 | return new Response(res.text); 39 | }, 40 | }; 41 | 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | 3 | logs 4 | _.log 5 | npm-debug.log_ 6 | yarn-debug.log* 7 | yarn-error.log* 8 | lerna-debug.log* 9 | .pnpm-debug.log* 10 | 11 | # Diagnostic reports (https://nodejs.org/api/report.html) 12 | 13 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 14 | 15 | # Runtime data 16 | 17 | pids 18 | _.pid 19 | _.seed 20 | \*.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | 28 | coverage 29 | \*.lcov 30 | 31 | # nyc test coverage 32 | 33 | .nyc_output 34 | 35 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 36 | 37 | .grunt 38 | 39 | # Bower dependency directory (https://bower.io/) 40 | 41 | bower_components 42 | 43 | # node-waf configuration 44 | 45 | .lock-wscript 46 | 47 | # Compiled binary addons (https://nodejs.org/api/addons.html) 48 | 49 | build/Release 50 | 51 | # Dependency directories 52 | 53 | node_modules/ 54 | jspm_packages/ 55 | 56 | # Snowpack dependency directory (https://snowpack.dev/) 57 | 58 | web_modules/ 59 | 60 | # TypeScript cache 61 | 62 | \*.tsbuildinfo 63 | 64 | # Optional npm cache directory 65 | 66 | .npm 67 | 68 | # Optional eslint cache 69 | 70 | .eslintcache 71 | 72 | # Optional stylelint cache 73 | 74 | .stylelintcache 75 | 76 | # Microbundle cache 77 | 78 | .rpt2_cache/ 79 | .rts2_cache_cjs/ 80 | .rts2_cache_es/ 81 | .rts2_cache_umd/ 82 | 83 | # Optional REPL history 84 | 85 | .node_repl_history 86 | 87 | # Output of 'npm pack' 88 | 89 | \*.tgz 90 | 91 | # Yarn Integrity file 92 | 93 | .yarn-integrity 94 | 95 | # dotenv environment variable files 96 | 97 | .env 98 | .env.development.local 99 | .env.test.local 100 | .env.production.local 101 | .env.local 102 | 103 | # parcel-bundler cache (https://parceljs.org/) 104 | 105 | .cache 106 | .parcel-cache 107 | 108 | # Next.js build output 109 | 110 | .next 111 | out 112 | 113 | # Nuxt.js build / generate output 114 | 115 | .nuxt 116 | dist 117 | 118 | # Gatsby files 119 | 120 | .cache/ 121 | 122 | # Comment in the public line in if your project uses Gatsby and not Next.js 123 | 124 | # https://nextjs.org/blog/next-9-1#public-directory-support 125 | 126 | # public 127 | 128 | # vuepress build output 129 | 130 | .vuepress/dist 131 | 132 | # vuepress v2.x temp and cache directory 133 | 134 | .temp 135 | .cache 136 | 137 | # Docusaurus cache and generated files 138 | 139 | .docusaurus 140 | 141 | # Serverless directories 142 | 143 | .serverless/ 144 | 145 | # FuseBox cache 146 | 147 | .fusebox/ 148 | 149 | # DynamoDB Local files 150 | 151 | .dynamodb/ 152 | 153 | # TernJS port file 154 | 155 | .tern-port 156 | 157 | # Stores VSCode versions used for testing VSCode extensions 158 | 159 | .vscode-test 160 | 161 | # yarn v2 162 | 163 | .yarn/cache 164 | .yarn/unplugged 165 | .yarn/build-state.yml 166 | .yarn/install-state.gz 167 | .pnp.\* 168 | 169 | # wrangler project 170 | 171 | .dev.vars 172 | .wrangler/ 173 | --------------------------------------------------------------------------------