├── .env.example ├── .gitignore ├── JSONChatHistory └── index.ts ├── README.md ├── agent.ipynb ├── assets └── poster.jpg ├── basic-langchain.ipynb ├── chat_data ├── test-history.json ├── test-server.json └── test.json ├── data ├── github-copliot.pdf ├── kong.txt └── qiu.txt ├── db ├── kongyiji │ ├── docstore.json │ └── faiss.index └── qiu │ ├── docstore.json │ └── faiss.index ├── deno.json ├── deno.lock ├── lc-tools.ipynb ├── learn-notebook-basic.ipynb ├── loader.ipynb ├── memory-2.ipynb ├── memory-3.ipynb ├── memory.ipynb ├── node ├── .env.example ├── LLMChainExtractor.ts ├── ScoreThresholdRetriever.ts ├── agent-react-rmb.ts ├── agent-tool-customized.ts ├── agent-tool.ts ├── gua │ ├── gua.json │ └── index.ts ├── load-kong-faiss.ts ├── mbti │ ├── index.ts │ └── mbti-info.json ├── multiQueryRetriever.ts ├── package.json ├── prepare-kong-faiss.ts ├── rag │ ├── client.ts │ ├── index.ts │ ├── prepare-qiu.ts │ └── server.ts ├── tsconfig.json └── yarn.lock ├── output-parser.ipynb ├── prompt-template.ipynb ├── rag.ipynb ├── splitter.ipynb ├── test └── data │ ├── 05-versions-space.pdf │ └── 05-versions-space.pdf.txt ├── tool-lesson.ipynb └── vector-store.ipynb /.env.example: -------------------------------------------------------------------------------- 1 | AZURE_OPENAI_API_KEY=abc 2 | AZURE_OPENAI_API_VERSION=2023-07-01-preview 3 | AZURE_OPENAI_API_DEPLOYMENT_NAME=abc 4 | AZURE_OPENAI_API_INSTANCE_NAME=abc 5 | AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME=abc 6 | GITHUB_TOKEN=abc 7 | SERP_KEY=abc 8 | 9 | SMI_KEY=abc 10 | TAVILY_KEY=abc -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints 2 | .env 3 | node_modules -------------------------------------------------------------------------------- /JSONChatHistory/index.ts: -------------------------------------------------------------------------------- 1 | import { BaseListChatMessageHistory } from "@langchain/core/chat_history"; 2 | import { 3 | BaseMessage, 4 | StoredMessage, 5 | mapChatMessagesToStoredMessages, 6 | mapStoredMessagesToChatMessages, 7 | } from "@langchain/core/messages"; 8 | import fs from "node:fs"; 9 | import path from "node:path"; 10 | 11 | export interface JSONChatHistoryInput { 12 | sessionId: string; 13 | dir: string; 14 | } 15 | 16 | export class JSONChatHistory extends BaseListChatMessageHistory { 17 | lc_namespace = ["langchain", "stores", "message"]; 18 | 19 | sessionId: string; 20 | dir: string; 21 | 22 | constructor(fields: JSONChatHistoryInput) { 23 | super(fields); 24 | this.sessionId = fields.sessionId; 25 | this.dir = fields.dir; 26 | } 27 | 28 | async getMessages(): Promise { 29 | const filePath = path.join(this.dir, `${this.sessionId}.json`); 30 | try { 31 | if (!fs.existsSync(filePath)) { 32 | this.saveMessagesToFile([]); 33 | return []; 34 | } 35 | 36 | const data = fs.readFileSync(filePath, { encoding: "utf-8" }); 37 | const storedMessages = JSON.parse(data) as StoredMessage[]; 38 | return mapStoredMessagesToChatMessages(storedMessages); 39 | } catch (error) { 40 | console.error(`Failed to read chat history from ${filePath}`, error); 41 | return []; 42 | } 43 | } 44 | 45 | async addMessage(message: BaseMessage): Promise { 46 | const messages = await this.getMessages(); 47 | messages.push(message); 48 | await this.saveMessagesToFile(messages); 49 | } 50 | 51 | async addMessages(messages: BaseMessage[]): Promise { 52 | const existingMessages = await this.getMessages(); 53 | const allMessages = existingMessages.concat(messages); 54 | await this.saveMessagesToFile(allMessages); 55 | } 56 | 57 | async clear(): Promise { 58 | const filePath = path.join(this.dir, `${this.sessionId}.json`); 59 | try { 60 | fs.unlinkSync(filePath); 61 | } catch (error) { 62 | console.error(`Failed to clear chat history from ${filePath}`, error); 63 | } 64 | } 65 | 66 | private async saveMessagesToFile(messages: BaseMessage[]): Promise { 67 | const filePath = path.join(this.dir, `${this.sessionId}.json`); 68 | const serializedMessages = mapChatMessagesToStoredMessages(messages); 69 | try { 70 | fs.writeFileSync(filePath, JSON.stringify(serializedMessages, null, 2), { 71 | encoding: "utf-8", 72 | }); 73 | } catch (error) { 74 | console.error(`Failed to save chat history to ${filePath}`, error); 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## 小册链接: 2 | 3 | [从前端到 AI:langchain.js 入门和实战](https://s.juejin.cn/ds/i2qw5dKG/) 4 | 5 | ## 你会学到什么? 6 | 7 | - 从 0 到 1,掌握复杂大模型应用的构建思路; 8 | - 深入真实业务场景,打造有价值的大模型应用; 9 | - 打破常规,趣学大模型编程范式与交互; 10 | - 基于 JS,前端工程师也能玩转 AI 开发。 11 | 12 | ## 作者介绍 13 | 14 | ![作者简介(2100x389).jpg](https://p1-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/c362ac7e0a684ef9863d03cf275702fc~tplv-k3u1fbpfcp-jj-mark:0:0:0:0:q75.image#?w=2000&h=389&s=229447&e=jpg&b=cfcfd1) 15 | 16 | Kai,现某知名外企软件工程师,曾工作于字节跳动、Disney Streaming(Hulu)。GitHub 累计获得 `15k+` star,《Web Worker 播客》主播,前沿技术、开源爱好者。 17 | 18 | ## 小册介绍 19 | 20 | ![课程介绍(1624x623).jpg](https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/04626202d65c44d690002d45a5acccc3~tplv-k3u1fbpfcp-jj-mark:0:0:0:0:q75.image#?w=1624&h=623&s=355579&e=jpg) 21 | 22 | 大模型时代已经到来,大量基础模型的质量、速度和价格已经到了高度可用的程度,曾经无数难以解决的语义理解、逻辑推断、复杂对话任务,现在通过非常简单的 prompt 就能解决。这给应用层近乎无限的想象力,所有应用和交互模式都值得用大模型的思考方式重新审视。**这是不亚于移动互联网浪潮的机会,一次新的全民黑客松,一次新的重构现有应用格局的机会!** 23 | 24 | 大模型时代的语言将是 Python 和 JavaScript(后简称 JS),这是 OpenAI 官方 API 仅支持的两种语言,也是大模型最流行的应用框架 LangChain 仅支持的两种语言。前者的地位不言而喻。而 JS 是最接近应用层的语言,无论是网页、移动端,还是桌面端都可以使用 JS 进行构建,它也是唯一的全栈语言。 25 | 26 | 通过 JS,我们可以借助各种 Sass/Pass 云服务,快速构建出极高可用度的应用给用户,迅速验证自己的 idea 并获得市场反馈。现在,在 JS 丰富的生态又加入了大模型这一强大的工具,LangChain.js 并不是 Python 版本的简单封装,而是全部由 JS 构建,可见其对 JS 生态的重视。 27 | 28 | 这是给所有前端朋友的一次新机会,我们可以充分利用各种成熟的云服务,加上我们对大模型的理解和对用户需求的洞察,迅速构建出解决垂直领域的应用,并根据反馈持续更新和改进,**占领新时代的用户心智**。 29 | 30 | **那么,为什么是 LangChain 呢?** 31 | 32 | LangChain 就是目前最流行的大模型应用框架,适合 JS 选手去入门和上手大模型应用的开发。其`高度封装`的特点将复杂隔离,我们可以利用其提供的各种高质量工具模块化地构建 App。其最新推出的 `LCEL` 新编程范式更是将模块化构建推到了一个全新的高度。 33 | 34 | 使用 LangChain.js 构建复杂的 RAG Chatbot 或者是 Agents,往往只需要不到百行代码,就能生成一个高度可用的大模型应用基础。然后我们就可以将更多精力投入到更熟悉的 UI 和交互的构建之中,迅速交付给用户使用了。 35 | 36 | 本小册基于面向应用侧工程师去编写,仅预设读者有 JS 相关知识。 37 | 38 | 我们会从构建大模型应用的基础概念、LangChain 基础概念讲起,循序渐进地带大家从 0 到 1 构建复杂的大模型应用。我们也会深入业务,带大家写一些实际工程会用的自定义方案,例如,如何将 LangChian 中的 Memory 接入到现有的数据库存储体系中,或者在业务初期和测试中,如何将 memory 以文件形式进行持久化等,帮助大家迅速走上大模型应用开发之路。另外,我们的小册也会贴近开发实际情况,例如带大家申请 OpenAI 的服务,以及如何在 LangChain 中接入这些非 OpenAI 官方的服务。 39 | 40 |

从前端到 AI:langchain.js 入门和实战.png

41 | 42 | 在实战章节,我们会突破传统的 ChatBot 常规实战,带大家尝试大模型的编程范式,讲解在图形化交互界面(GUI)下一个时代的大模型交互(LLM_UI)应该是什么样的,应该如何交互和实现,相信会给大家很多大模型时代应用模式的启发。同时,我也会带大家做一些贴合大模型优势的实战,去分享我对大模型优势和擅长点的思考,并带大家做一些好玩且有趣的实战出来。我相信大家做完后会玩得不亦乐乎,也会兴奋地去跟朋友分享,就像我一样。 43 | 44 | 最后,大模型所展示出来的强大智能和编码能力让无数程序员开始紧张。它对所有人来说都是新物种,对深入模型侧的研发和工程的背景要求较高。事实上,编程和工程的发展就是将不需要关心的复杂度封装起来,给上层工程师减少思维复杂度,将更多的精力投入到自己擅长的领域。 45 | 46 | 大模型亦是这样,我们需要迅速掌握其使用方式和特点,然后思考如何将其与我们的技能结合起来,搭配成有价值的应用给用户。用户并不关心你是如何实现的、你是否是从 0 自己训练的模型,用户只关心应用是否有价值,以及我能否在下一秒就能用到。而这恰恰是我们应用侧工程师所擅长的,**我们懂用户、懂需求、懂交互,只需要掌握构建大模型应用的能力,就能 “交付” 高价值的应用。** 47 | 48 | 相信这本小册能学到你所需要的,一起加油吧! 49 | 50 | ## 适宜人群 51 | 52 | 1. 有一定 JS 基础的前端开发者,并想动手尝试大模型应用开发; 53 | 2. 想快速落地大模型应用的独立开发者; 54 | 3. 想体验大模型开发,更深入地理解其效果和优势; 55 | 4. 想要学会大模型技能增强自己职业竞争力。 56 | 57 | ## 购买须知 58 | 59 | 1. 本小册为图文形式内容服务,共计 **`24`** 节; 60 | 2. 小册于 2024 年 5 月 8 日上线,全部文章已更新完成; 61 | 3. 购买用户可享有小册永久的阅读权限; 62 | 4. 购买用户可进入小册微信群,与作者互动; 63 | 5. 掘金小册为虚拟内容服务,一经购买成功概不退款; 64 | 6. 掘金小册版权归北京北比信息技术有限公司所有,任何机构、媒体、网站或个人未经本网协议授权不得转载、链接、转贴或以其他方式复制发布/发表,违者将依法追究责任; 65 | 7. 在掘金小册阅读过程中,如有任何问题,请邮件联系 xiaoce@xitu.io 66 | 67 | ## 小册海报 68 | 69 | ![poster](/assets/poster.jpg) 70 | -------------------------------------------------------------------------------- /agent.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "4e51ba54-3bfd-4384-b476-e793544191d0", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import { load } from \"dotenv\";\n", 11 | "const env = await load();\n", 12 | "\n", 13 | "const process = {\n", 14 | " env\n", 15 | "}" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "id": "a8216718-c062-4755-a749-c9b80ff8467c", 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import { z } from \"zod\";\n", 26 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 27 | "import { JsonOutputToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\n", 28 | "import { RunnableSequence, RunnableBranch, RunnablePassthrough } from \"@langchain/core/runnables\";\n", 29 | "import { zodToJsonSchema } from \"zod-to-json-schema\";\n", 30 | "import { ChatPromptTemplate, PromptTemplate } from \"@langchain/core/prompts\";\n", 31 | " \n", 32 | "const classifySchema = z.object({\n", 33 | " type: z.enum([\"科普\", \"编程\", \"一般问题\"]).describe(\"用户提问的分类\")\n", 34 | "})\n", 35 | "\n", 36 | "const model = new ChatOpenAI({\n", 37 | " temperature: 0 \n", 38 | "})\n", 39 | "\n", 40 | "const modelWithTools = model.bind({\n", 41 | " tools: [\n", 42 | " {\n", 43 | " type: \"function\",\n", 44 | " function: {\n", 45 | " name: \"classifyQuestion\",\n", 46 | " description: \"对用户的提问进行分类\",\n", 47 | " parameters: zodToJsonSchema(classifySchema),\n", 48 | " }\n", 49 | " }\n", 50 | " ],\n", 51 | " tool_choice: {\n", 52 | " type: \"function\",\n", 53 | " function: {\n", 54 | " name: \"classifyQuestion\"\n", 55 | " }\n", 56 | " }\n", 57 | "})\n", 58 | "\n", 59 | "const prompt = ChatPromptTemplate.fromMessages([\n", 60 | " [\"system\", `仔细思考,你有充足的时间进行严谨的思考,然后对用户的问题进行分类,\n", 61 | " 当你无法分类到特定分类时,可以分类到 \"一般问题\"`],\n", 62 | " [\"human\", \"{input}\"]\n", 63 | "])\n", 64 | "\n", 65 | "const classifyChain = RunnableSequence.from([\n", 66 | " prompt,\n", 67 | " modelWithTools,\n", 68 | " new JsonOutputToolsParser(),\n", 69 | " (input) => {\n", 70 | " const type = input[0]?.args?.type\n", 71 | " return type ? type : \"一般问题\"\n", 72 | " }\n", 73 | "])" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "id": "4dd65030-e4a8-4dae-b859-42c54251012e", 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "await classifyChain.invoke({\n", 84 | " \"input\": \"鲸鱼是哺乳动物么?\"\n", 85 | "})" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "id": "b3817a63-926c-4d2c-bc15-b630d372668d", 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 96 | "\n", 97 | "const answeringModel = new ChatOpenAI({\n", 98 | " temperature: 0.7,\n", 99 | "})\n", 100 | "\n", 101 | "const sciencePrompt = PromptTemplate.fromTemplate(\n", 102 | " `作为一位科普专家,你需要解答以下问题,尽可能提供详细、准确和易于理解的答案:\n", 103 | "\n", 104 | "问题:{input}\n", 105 | "答案:`\n", 106 | ")\n", 107 | " \n", 108 | "const programmingPrompt = PromptTemplate.fromTemplate(\n", 109 | " `作为一位编程专家,你需要解答以下编程相关的问题,尽可能提供详细、准确和实用的答案:\n", 110 | "\n", 111 | "问题:{input}\n", 112 | "答案:`\n", 113 | ")\n", 114 | "\n", 115 | "const generalPrompt = PromptTemplate.fromTemplate(\n", 116 | " `请回答以下一般性问题,尽可能提供全面和有深度的答案:\n", 117 | "\n", 118 | "问题:{input}\n", 119 | "答案:`\n", 120 | ")\n", 121 | "\n", 122 | "\n", 123 | "const scienceChain = RunnableSequence.from([\n", 124 | " sciencePrompt,\n", 125 | " answeringModel,\n", 126 | " new StringOutputParser(),\n", 127 | " {\n", 128 | " output: input => input,\n", 129 | " role: () => \"科普专家\"\n", 130 | " }\n", 131 | " \n", 132 | "])\n", 133 | "\n", 134 | "const programmingChain = RunnableSequence.from([\n", 135 | " programmingPrompt,\n", 136 | " answeringModel,\n", 137 | " new StringOutputParser(),\n", 138 | " {\n", 139 | " output: input => input,\n", 140 | " role: () => \"编程大师\"\n", 141 | " }\n", 142 | " \n", 143 | "])\n", 144 | "\n", 145 | "const generalChain = RunnableSequence.from([\n", 146 | " generalPrompt,\n", 147 | " answeringModel,\n", 148 | " new StringOutputParser(),\n", 149 | " {\n", 150 | " output: input => input,\n", 151 | " role: () => \"通识专家\"\n", 152 | " }\n", 153 | " \n", 154 | "])" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "id": "6e76cf62-cbdf-4a72-aa0f-9173b7e81aa5", 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "const branch = RunnableBranch.from([\n", 165 | " [\n", 166 | " (input => input.type.includes(\"科普\")),\n", 167 | " scienceChain,\n", 168 | " ],\n", 169 | " [\n", 170 | " (input => input.type.includes(\"编程\")),\n", 171 | " programmingChain,\n", 172 | " ],\n", 173 | " generalChain\n", 174 | "]);" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": null, 180 | "id": "da1fe98b-8887-452e-8007-a2aa78992e98", 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [ 184 | "const outputTemplate = PromptTemplate.fromTemplate(\n", 185 | "`感谢您的提问,这是来自 {role} 的专业回答:\n", 186 | "\n", 187 | "{output}\n", 188 | "`)\n", 189 | "\n", 190 | "\n", 191 | "const finalChain = RunnableSequence.from([\n", 192 | " {\n", 193 | " type: classifyChain,\n", 194 | " input: input => input.input\n", 195 | " },\n", 196 | " branch,\n", 197 | " (input) => outputTemplate.format(input),\n", 198 | "])" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "id": "543fb03e-c1ad-404a-a5a8-36ed84d726be", 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "const res = await finalChain.invoke({\n", 209 | " \"input\": \"鲸鱼是哺乳动物么?\"\n", 210 | "})\n", 211 | "\n", 212 | "console.log(res)" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": null, 218 | "id": "57dafeab-a265-4db2-8111-0691486cf6da", 219 | "metadata": {}, 220 | "outputs": [], 221 | "source": [ 222 | "const route = ({ type }) => {\n", 223 | " if(type.includes(\"科普\")){\n", 224 | " return scienceChain\n", 225 | " }else if(type.includes(\"编程\")){\n", 226 | " return programmingChain\n", 227 | " }\n", 228 | "\n", 229 | " return generalChain\n", 230 | "}" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": null, 236 | "id": "bb9a09a0-9d4a-4a1c-a259-15d4cabd9492", 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "const finalChain2 = RunnableSequence.from([\n", 241 | " {\n", 242 | " type: classifyChain,\n", 243 | " input: input => input.input\n", 244 | " },\n", 245 | " route,\n", 246 | " (input) => outputTemplate.format(input),\n", 247 | "])" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": null, 253 | "id": "ffd3d7ee-85c1-4ae8-8e59-88d214e2e8ed", 254 | "metadata": {}, 255 | "outputs": [], 256 | "source": [ 257 | "await finalChain2.invoke({\n", 258 | " \"input\": \"鲸鱼是哺乳动物么?\"\n", 259 | "})" 260 | ] 261 | } 262 | ], 263 | "metadata": { 264 | "kernelspec": { 265 | "display_name": "Deno", 266 | "language": "typescript", 267 | "name": "deno" 268 | }, 269 | "language_info": { 270 | "file_extension": ".ts", 271 | "mimetype": "text/x.typescript", 272 | "name": "typescript", 273 | "nb_converter": "script", 274 | "pygments_lexer": "typescript", 275 | "version": "5.3.3" 276 | } 277 | }, 278 | "nbformat": 4, 279 | "nbformat_minor": 5 280 | } 281 | -------------------------------------------------------------------------------- /assets/poster.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealKai42/langchainjs-juejin/49b8764d37f76e02861bc3d8b01508fe4805ca64/assets/poster.jpg -------------------------------------------------------------------------------- /basic-langchain.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { load } from \"dotenv\";\n", 10 | "const env = await load();\n", 11 | "\n", 12 | "const process = {\n", 13 | " env\n", 14 | "}" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 24 | "import { HumanMessage } from \"@langchain/core/messages\";\n", 25 | "\n", 26 | "const model = new ChatOpenAI();\n", 27 | "\n", 28 | "await model.invoke([\n", 29 | " new HumanMessage(\"Tell me a joke\")\n", 30 | "])" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 40 | "import { HumanMessage } from \"@langchain/core/messages\";\n", 41 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 42 | "\n", 43 | "const chatModel = new ChatOpenAI();\n", 44 | "const outputPrase = new StringOutputParser();\n", 45 | "\n", 46 | "const simpleChain = chatModel.pipe(outputPrase)\n", 47 | "\n", 48 | "await simpleChain.invoke([\n", 49 | " new HumanMessage(\"Tell me a joke\")\n", 50 | "])" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "await simpleChain.batch([\n", 60 | " [ new HumanMessage(\"Tell me a joke\") ],\n", 61 | " [ new HumanMessage(\"Hi, Who are you?\") ],\n", 62 | "])" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "const stream = await simpleChain.stream([\n", 72 | " new HumanMessage(\"Tell me a joke\")\n", 73 | "])\n", 74 | "\n", 75 | "for await (const chunk of stream){\n", 76 | " console.log(chunk)\n", 77 | "}" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "const stream = await simpleChain.streamLog([\n", 87 | " new HumanMessage(\"Tell me a joke\")\n", 88 | "])\n", 89 | "\n", 90 | "for await (const chunk of stream){\n", 91 | " console.log(chunk)\n", 92 | "}" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 102 | "\n", 103 | "const fakeLLM = new ChatOpenAI({\n", 104 | " azureOpenAIApiKey: \"123\",\n", 105 | " maxRetries: 0,\n", 106 | "});\n", 107 | "\n", 108 | "await fakeLLM.invoke(\"你好\")\n", 109 | "\n", 110 | "const realLLM = new ChatOpenAI()\n", 111 | "const llmWithFallback = fakeLLM.withFallbacks({\n", 112 | " fallbacks: [realLLM]\n", 113 | "})\n", 114 | "\n", 115 | "await llmWithFallback.invoke(\"你好\")\n" 116 | ] 117 | } 118 | ], 119 | "metadata": { 120 | "kernelspec": { 121 | "display_name": "Deno", 122 | "language": "typescript", 123 | "name": "deno" 124 | }, 125 | "language_info": { 126 | "file_extension": ".ts", 127 | "mimetype": "text/x.typescript", 128 | "name": "typescript", 129 | "nb_converter": "script", 130 | "pygments_lexer": "typescript", 131 | "version": "5.3.3" 132 | } 133 | }, 134 | "nbformat": 4, 135 | "nbformat_minor": 4 136 | } 137 | -------------------------------------------------------------------------------- /chat_data/test-history.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "human", 4 | "data": { 5 | "content": "什么是球状闪电", 6 | "additional_kwargs": {}, 7 | "response_metadata": {} 8 | } 9 | }, 10 | { 11 | "type": "ai", 12 | "data": { 13 | "content": "根据原文,球状闪电是一种极其罕见的现象,是一个充盈着能量的弯曲的空间,一个似有似无的空泡,一个足球大小的电子。它被描述为一个超现实的小东西,仿佛梦之乡溢出的一粒灰尘,暗示着宇宙的博大和神秘,暗示着可能存在的与我们现实完全不同的其他世界。球状闪电的确切性质和构成目前仍然是科学之谜,但它不是小说中所描述的那种东西,而是一种真实存在的自然现象。", 14 | "additional_kwargs": {}, 15 | "response_metadata": {} 16 | } 17 | }, 18 | { 19 | "type": "human", 20 | "data": { 21 | "content": "这个现象在文中有什么故事", 22 | "additional_kwargs": {}, 23 | "response_metadata": {} 24 | } 25 | }, 26 | { 27 | "type": "ai", 28 | "data": { 29 | "content": "球状闪电在《球状闪电》这本小说中有着丰富的故事情节。小说中描述了一个年轻人因为观察到球状闪电而开始对它展开研究的旅程。他发现球状闪电的特性和行为与以往所知的闪电形式有着明显不同,它具有弯曲的空间、充盈的能量和神秘的存在状态。在寻求解释和了解球状闪电的过程中,他秘密调查了死去科学家的笔迹,探索了前苏联的地下科技城,还遭遇了次世代的世界大战的种种阻碍。最终,他发现球状闪电並非只是自然现象,而是一种可以用作战争武器的存在,成为了决定祖国存亡的终极武器。\n\n这个故事展示了球状闪电的不寻常和神秘之处,以及对它进行研究和利用的影响和后果。球状闪电在小说中被描绘为一种引人入胜的现象,同时也成为了战争中的重要元素,改变了整个世界的格局。", 30 | "additional_kwargs": {}, 31 | "response_metadata": {} 32 | } 33 | } 34 | ] -------------------------------------------------------------------------------- /chat_data/test-server.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "human", 4 | "data": { 5 | "content": "什么是球状闪电", 6 | "additional_kwargs": {}, 7 | "response_metadata": {} 8 | } 9 | }, 10 | { 11 | "type": "ai", 12 | "data": { 13 | "content": "根据原文描写,球状闪电是一个充盈着能量的弯曲的空间,似有似无的空泡,大约足球大小的电子。它被形容为一个超现实的存在,仿佛梦之乡溢出的一粒灰尘,暗示着宇宙的博大和神秘。", 14 | "additional_kwargs": {}, 15 | "response_metadata": {} 16 | } 17 | } 18 | ] -------------------------------------------------------------------------------- /chat_data/test.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "human", 4 | "data": { 5 | "content": "Hi, 我叫小明", 6 | "additional_kwargs": {}, 7 | "response_metadata": {} 8 | } 9 | }, 10 | { 11 | "type": "ai", 12 | "data": { 13 | "content": "你好", 14 | "additional_kwargs": {}, 15 | "response_metadata": {} 16 | } 17 | }, 18 | { 19 | "type": "human", 20 | "data": { 21 | "content": "我叫什么?", 22 | "additional_kwargs": {}, 23 | "response_metadata": {} 24 | } 25 | }, 26 | { 27 | "type": "ai", 28 | "data": { 29 | "content": "你叫小明。", 30 | "additional_kwargs": {}, 31 | "response_metadata": {} 32 | } 33 | } 34 | ] -------------------------------------------------------------------------------- /data/github-copliot.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealKai42/langchainjs-juejin/49b8764d37f76e02861bc3d8b01508fe4805ca64/data/github-copliot.pdf -------------------------------------------------------------------------------- /data/kong.txt: -------------------------------------------------------------------------------- 1 | 鲁镇的酒店的格局,是和别处不同的:都是当街一个曲尺形的大柜台,柜里面预备着热水,可以随时温酒。做工的人,傍午傍晚散了工,每每花四文铜钱,买一碗酒,——这是二十多年前的事,现在每碗要涨到十文,——靠柜外站着,热热的喝了休息;倘肯多花一文,便可以买一碟盐煮笋,或者茴香豆,做下酒物了,如果出到十几文,那就能买一样荤菜,但这些顾客,多是短衣帮,大抵没有这样阔绰。只有穿长衫的,才踱进店面隔壁的房子里,要酒要菜,慢慢地坐喝。 2 | 3 | 我从十二岁起,便在镇口的咸亨酒店里当伙计,掌柜说,我样子太傻,怕侍候不了长衫主顾,就在外面做点事罢。外面的短衣主顾,虽然容易说话,但唠唠叨叨缠夹不清的也很不少。他们往往要亲眼看着黄酒从坛子里舀出,看过壶子底里有水没有,又亲看将壶子放在热水里,然后放心:在这严重监督下,羼水也很为难。所以过了几天,掌柜又说我干不了这事。幸亏荐头的情面大,辞退不得,便改为专管温酒的一种无聊职务了。 4 | 5 | 我从此便整天的站在柜台里,专管我的职务。虽然没有什么失职,但总觉得有些单调,有些无聊。掌柜是一副凶脸孔,主顾也没有好声气,教人活泼不得;只有孔乙己到店,才可以笑几声,所以至今还记得。 6 | 7 | 孔乙己是站着喝酒而穿长衫的唯一的人。他身材很高大;青白脸色,皱纹间时常夹些伤痕;一部乱蓬蓬的花白的胡子。穿的虽然是长衫,可是又脏又破,似乎十多年没有补,也没有洗。他对人说话,总是满口之乎者也,叫人半懂不懂的。因为他姓孔,别人便从描红纸上的“上大人孔乙己”这半懂不懂的话里,替他取下一个绰号,叫作孔乙己。孔乙己一到店,所有喝酒的人便都看着他笑,有的叫道,“孔乙己,你脸上又添上新伤疤了!”他不回答,对柜里说,“温两碗酒,要一碟茴香豆。”便排出九文大钱。他们又故意的高声嚷道,“你一定又偷了人家的东西了!”孔乙己睁大眼睛说,“你怎么这样凭空污人清白……”“什么清白?我前天亲眼见你偷了何家的书,吊着打。”孔乙己便涨红了脸,额上的青筋条条绽出,争辩道,“窃书不能算偷……窃书!……读书人的事,能算偷么?”接连便是难懂的话,什么“君子固穷”,什么“者乎”之类,引得众人都哄笑起来:店内外充满了快活的空气。 8 | 9 | 听人家背地里谈论,孔乙己原来也读过书,但终于没有进学,又不会营生;于是愈过愈穷,弄到将要讨饭了。幸而写得一笔好字,便替人家抄抄书,换一碗饭吃。可惜他又有一样坏脾气,便是好喝懒做。坐不到几天,便连人和书籍纸张笔砚,一齐失踪。如是几次,叫他抄书的人也没有了。孔乙己没有法,便免不了偶然做些偷窃的事。但他在我们店里,品行却比别人都好,就是从不拖欠;虽然间或没有现钱,暂时记在粉板上,但不出一月,定然还清,从粉板上拭去了孔乙己的名字。 10 | 11 | 孔乙己喝过半碗酒,涨红的脸色渐渐复了原,旁人便又问道,“孔乙己,你当真认识字么?”孔乙己看着问他的人,显出不屑置辩的神气。他们便接着说道,“你怎的连半个秀才也捞不到呢?”孔乙己立刻显出颓唐不安模样,脸上笼上了一层灰色,嘴里说些话;这回可是全是之乎者也之类,一些不懂了。在这时候,众人也都哄笑起来:店内外充满了快活的空气。 12 | 13 | “多乎哉?不多也。” 14 | 15 | 有几回,邻居孩子听得笑声,也赶热闹,围住了孔乙己。他便给他们一人一颗。孩子吃完豆,仍然不散,眼睛都望着碟子。孔乙己着了慌,伸开五指将碟子罩住,弯腰下去说道,“不多了,我已经不多了。”直起身又看一看豆,自己摇头说,“不多不多!多乎哉?不多也。”于是这一群孩子都在笑声里走散了。 16 | 17 | 孔乙己是这样的使人快活,可是没有他,别人也便这么过。 18 | 19 | 有一天,大约是中秋前的两三天,掌柜正在慢慢的结账,取下粉板,忽然说,“孔乙己长久没有来了。还欠十九个钱呢!”我才也觉得他的确长久没有来了。一个喝酒的人说道,“他怎么会来?……他打折了腿了。”掌柜说,“哦!”“他总仍旧是偷。这一回,是自己发昏,竟偷到丁举人家里去了。他家的东西,偷得的吗?”“后来怎么样?”“怎么样?先写服辩,后来是打,打了大半夜,再打折了腿。”“后来呢?”“后来打折了腿了。”“打折了怎样呢?”“怎样?……谁晓得?许是死了。”掌柜也不再问,仍然慢慢的算他的账。 20 | 21 | 中秋过后,秋风是一天凉比一天,看看将近初冬;我整天的靠着火,也须穿上棉袄了。一天的下半天,没有一个顾客,我正合了眼坐着。忽然间听得一个声音,“温一碗酒。”这声音虽然极低,却很耳熟。看时又全没有人。站起来向外一望,那孔乙己便在柜台下对了门槛坐着。他脸上黑而且瘦,已经不成样子;穿一件破夹袄,盘着两腿,下面垫一个蒲包,用草绳在肩上挂住;见了我,又说道,“温一碗酒。”掌柜也伸出头去,一面说,“孔乙己么?你还欠十九个钱呢!”孔乙己很颓唐的仰面答道,“这……下回还清罢。这一回是现钱,酒要好。”掌柜仍然同平常一样,笑着对他说,“孔乙己,你又偷了东西了!”但他这回却不十分分辩,单说了一句“不要取笑!”“取笑?要是不偷,怎么会打断腿?”孔乙己低声说道,“跌断,跌,跌……”他的眼色,很像恳求掌柜,不要再提。此时已经聚集了几个人,便和掌柜都笑了。我温了酒,端出去,放在门槛上。他从破衣袋里摸出四文大钱,放在我手里,见他满手是泥,原来他便用这手走来的。不一会,他喝完酒,便又在旁人的说笑声中,坐着用这手慢慢走去了。 22 | 23 | 自此以后,又长久没有看见孔乙己。到了年关,掌柜取下粉板说,“孔乙己还欠十九个钱呢!”到第二年的端午,又说“孔乙己还欠十九个钱呢!”到中秋可是没有说,再到年关也没有看见他。 24 | 25 | 我到现在终于没有见——大约孔乙己的确死了。 -------------------------------------------------------------------------------- /db/kongyiji/docstore.json: -------------------------------------------------------------------------------- 1 | [[["7c6355d8-496d-4b26-8230-753bdf6f813b",{"pageContent":"鲁镇的酒店的格局,是和别处不同的:都是当街一个曲尺形的大柜台,柜里面预备着热水,可以随时温酒。做工的人,傍午傍晚散了工,每每花四文铜钱,买一碗酒,——这是二十多年前的事,现在每碗要涨到十文,——靠柜外","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":1,"to":1}}}}],["73611811-4339-44cd-85fc-07965644c7e0",{"pageContent":"年前的事,现在每碗要涨到十文,——靠柜外站着,热热的喝了休息;倘肯多花一文,便可以买一碟盐煮笋,或者茴香豆,做下酒物了,如果出到十几文,那就能买一样荤菜,但这些顾客,多是短衣帮,大抵没有这样阔绰。只有","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":1,"to":1}}}}],["f1cf3311-b5bf-4340-a58a-276b0dd58e4a",{"pageContent":"顾客,多是短衣帮,大抵没有这样阔绰。只有穿长衫的,才踱进店面隔壁的房子里,要酒要菜,慢慢地坐喝。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":1,"to":1}}}}],["25425e44-05fd-4077-88d6-5230d15a4a9b",{"pageContent":"我从十二岁起,便在镇口的咸亨酒店里当伙计,掌柜说,我样子太傻,怕侍候不了长衫主顾,就在外面做点事罢。外面的短衣主顾,虽然容易说话,但唠唠叨叨缠夹不清的也很不少。他们往往要亲眼看着黄酒从坛子里舀出,看","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":3,"to":3}}}}],["8fae72e0-b236-4a48-a1a9-26de87f97328",{"pageContent":"。他们往往要亲眼看着黄酒从坛子里舀出,看过壶子底里有水没有,又亲看将壶子放在热水里,然后放心:在这严重监督下,羼水也很为难。所以过了几天,掌柜又说我干不了这事。幸亏荐头的情面大,辞退不得,便改为专管温","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":3,"to":3}}}}],["8daea425-33aa-4f79-88c8-55b2167027de",{"pageContent":"幸亏荐头的情面大,辞退不得,便改为专管温酒的一种无聊职务了。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":3,"to":3}}}}],["0506b6ce-1f78-4913-944c-bd05c2727c55",{"pageContent":"我从此便整天的站在柜台里,专管我的职务。虽然没有什么失职,但总觉得有些单调,有些无聊。掌柜是一副凶脸孔,主顾也没有好声气,教人活泼不得;只有孔乙己到店,才可以笑几声,所以至今还记得。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":5,"to":5}}}}],["bf96613c-47dc-4ff4-a141-4042f7ed9c97",{"pageContent":"孔乙己是站着喝酒而穿长衫的唯一的人。他身材很高大;青白脸色,皱纹间时常夹些伤痕;一部乱蓬蓬的花白的胡子。穿的虽然是长衫,可是又脏又破,似乎十多年没有补,也没有洗。他对人说话,总是满口之乎者也,叫人半","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":7,"to":7}}}}],["460f1143-9df1-4bba-9df6-b7eba5e7a4f8",{"pageContent":"洗。他对人说话,总是满口之乎者也,叫人半懂不懂的。因为他姓孔,别人便从描红纸上的“上大人孔乙己”这半懂不懂的话里,替他取下一个绰号,叫作孔乙己。孔乙己一到店,所有喝酒的人便都看着他笑,有的叫道,“孔乙","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":7,"to":7}}}}],["b7146b56-8da4-4dc8-b0cc-19087c951f48",{"pageContent":"有喝酒的人便都看着他笑,有的叫道,“孔乙己,你脸上又添上新伤疤了!”他不回答,对柜里说,“温两碗酒,要一碟茴香豆。”便排出九文大钱。他们又故意的高声嚷道,“你一定又偷了人家的东西了!”孔乙己睁大眼睛说","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":7,"to":7}}}}],["94ef7191-3766-4c6c-93b7-9eacdf55b0d2",{"pageContent":"定又偷了人家的东西了!”孔乙己睁大眼睛说,“你怎么这样凭空污人清白……”“什么清白?我前天亲眼见你偷了何家的书,吊着打。”孔乙己便涨红了脸,额上的青筋条条绽出,争辩道,“窃书不能算偷……窃书!……读书","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":7,"to":7}}}}],["7fb4598f-d218-4542-97a4-e9d919520a85",{"pageContent":"争辩道,“窃书不能算偷……窃书!……读书人的事,能算偷么?”接连便是难懂的话,什么“君子固穷”,什么“者乎”之类,引得众人都哄笑起来:店内外充满了快活的空气。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":7,"to":7}}}}],["eff15b64-890a-45c1-bbbb-0e59fe9685b5",{"pageContent":"听人家背地里谈论,孔乙己原来也读过书,但终于没有进学,又不会营生;于是愈过愈穷,弄到将要讨饭了。幸而写得一笔好字,便替人家抄抄书,换一碗饭吃。可惜他又有一样坏脾气,便是好喝懒做。坐不到几天,便连人和","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":9,"to":9}}}}],["11aef83a-1b75-4fd6-bcc8-4a3cf692a336",{"pageContent":"脾气,便是好喝懒做。坐不到几天,便连人和书籍纸张笔砚,一齐失踪。如是几次,叫他抄书的人也没有了。孔乙己没有法,便免不了偶然做些偷窃的事。但他在我们店里,品行却比别人都好,就是从不拖欠;虽然间或没有现钱","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":9,"to":9}}}}],["25e3a3e2-b4b0-4bbc-9c6b-1a9e1a5a6168",{"pageContent":"别人都好,就是从不拖欠;虽然间或没有现钱,暂时记在粉板上,但不出一月,定然还清,从粉板上拭去了孔乙己的名字。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":9,"to":9}}}}],["be5ac9a6-f9c2-48e2-86d0-72cd205b09f2",{"pageContent":"孔乙己喝过半碗酒,涨红的脸色渐渐复了原,旁人便又问道,“孔乙己,你当真认识字么?”孔乙己看着问他的人,显出不屑置辩的神气。他们便接着说道,“你怎的连半个秀才也捞不到呢?”孔乙己立刻显出颓唐不安模样,","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":11,"to":11}}}}],["2a13135b-ed23-4a7e-8d9d-8a2ef2005f97",{"pageContent":"捞不到呢?”孔乙己立刻显出颓唐不安模样,脸上笼上了一层灰色,嘴里说些话;这回可是全是之乎者也之类,一些不懂了。在这时候,众人也都哄笑起来:店内外充满了快活的空气。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":11,"to":11}}}}],["a43161fb-9204-4f3f-b41f-2fd3befec304",{"pageContent":"“多乎哉?不多也。”","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":13,"to":13}}}}],["1526f8bd-685e-406f-8933-ac7983d2b463",{"pageContent":"有几回,邻居孩子听得笑声,也赶热闹,围住了孔乙己。他便给他们一人一颗。孩子吃完豆,仍然不散,眼睛都望着碟子。孔乙己着了慌,伸开五指将碟子罩住,弯腰下去说道,“不多了,我已经不多了。”直起身又看一看豆","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":15,"to":15}}}}],["d95e5457-65d1-4e50-a4f6-cc2c25bd68f3",{"pageContent":"不多了,我已经不多了。”直起身又看一看豆,自己摇头说,“不多不多!多乎哉?不多也。”于是这一群孩子都在笑声里走散了。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":15,"to":15}}}}],["10a1c13d-574b-4923-93f2-f95d726e94df",{"pageContent":"孔乙己是这样的使人快活,可是没有他,别人也便这么过。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":17,"to":17}}}}],["97e6e2d9-bad2-4cbc-bdb9-12cb46b07f0d",{"pageContent":"有一天,大约是中秋前的两三天,掌柜正在慢慢的结账,取下粉板,忽然说,“孔乙己长久没有来了。还欠十九个钱呢!”我才也觉得他的确长久没有来了。一个喝酒的人说道,“他怎么会来?……他打折了腿了。”掌柜说,","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":19,"to":19}}}}],["b30eb821-87c9-4606-bedf-b580157eab6b",{"pageContent":"他怎么会来?……他打折了腿了。”掌柜说,“哦!”“他总仍旧是偷。这一回,是自己发昏,竟偷到丁举人家里去了。他家的东西,偷得的吗?”“后来怎么样?”“怎么样?先写服辩,后来是打,打了大半夜,再打折了腿。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":19,"to":19}}}}],["cb3b888a-3071-4828-8af9-a8a8e90ec7c5",{"pageContent":"服辩,后来是打,打了大半夜,再打折了腿。”“后来呢?”“后来打折了腿了。”“打折了怎样呢?”“怎样?……谁晓得?许是死了。”掌柜也不再问,仍然慢慢的算他的账。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":19,"to":19}}}}],["99ec6a28-089b-432c-84df-2f1901a13a08",{"pageContent":"中秋过后,秋风是一天凉比一天,看看将近初冬;我整天的靠着火,也须穿上棉袄了。一天的下半天,没有一个顾客,我正合了眼坐着。忽然间听得一个声音,“温一碗酒。”这声音虽然极低,却很耳熟。看时又全没有人。站","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["40c1a642-f2d6-4a0a-8859-8f341bffc5b3",{"pageContent":"音虽然极低,却很耳熟。看时又全没有人。站起来向外一望,那孔乙己便在柜台下对了门槛坐着。他脸上黑而且瘦,已经不成样子;穿一件破夹袄,盘着两腿,下面垫一个蒲包,用草绳在肩上挂住;见了我,又说道,“温一碗酒","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["f751f461-c09b-4766-9ad0-b02fb8d2524a",{"pageContent":"绳在肩上挂住;见了我,又说道,“温一碗酒。”掌柜也伸出头去,一面说,“孔乙己么?你还欠十九个钱呢!”孔乙己很颓唐的仰面答道,“这……下回还清罢。这一回是现钱,酒要好。”掌柜仍然同平常一样,笑着对他说,","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["eca71fc0-80aa-4ac7-823a-477f45aa83bd",{"pageContent":"要好。”掌柜仍然同平常一样,笑着对他说,“孔乙己,你又偷了东西了!”但他这回却不十分分辩,单说了一句“不要取笑!”“取笑?要是不偷,怎么会打断腿?”孔乙己低声说道,“跌断,跌,跌……”他的眼色,很像恳","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["687a89f6-e84b-4a12-8895-ab1d8ec96afc",{"pageContent":"道,“跌断,跌,跌……”他的眼色,很像恳求掌柜,不要再提。此时已经聚集了几个人,便和掌柜都笑了。我温了酒,端出去,放在门槛上。他从破衣袋里摸出四文大钱,放在我手里,见他满手是泥,原来他便用这手走来的。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["7c7cc392-8a07-4d05-9f90-af1bdc8a601c",{"pageContent":"里,见他满手是泥,原来他便用这手走来的。不一会,他喝完酒,便又在旁人的说笑声中,坐着用这手慢慢走去了。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":21,"to":21}}}}],["14e7a808-25dc-4227-92cd-9977ce4a933f",{"pageContent":"自此以后,又长久没有看见孔乙己。到了年关,掌柜取下粉板说,“孔乙己还欠十九个钱呢!”到第二年的端午,又说“孔乙己还欠十九个钱呢!”到中秋可是没有说,再到年关也没有看见他。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":23,"to":23}}}}],["42144b86-fa11-4b80-b7e3-e259eb65b033",{"pageContent":"我到现在终于没有见——大约孔乙己的确死了。","metadata":{"source":"../data/kong.txt","loc":{"lines":{"from":25,"to":25}}}}]],{"0":"7c6355d8-496d-4b26-8230-753bdf6f813b","1":"73611811-4339-44cd-85fc-07965644c7e0","2":"f1cf3311-b5bf-4340-a58a-276b0dd58e4a","3":"25425e44-05fd-4077-88d6-5230d15a4a9b","4":"8fae72e0-b236-4a48-a1a9-26de87f97328","5":"8daea425-33aa-4f79-88c8-55b2167027de","6":"0506b6ce-1f78-4913-944c-bd05c2727c55","7":"bf96613c-47dc-4ff4-a141-4042f7ed9c97","8":"460f1143-9df1-4bba-9df6-b7eba5e7a4f8","9":"b7146b56-8da4-4dc8-b0cc-19087c951f48","10":"94ef7191-3766-4c6c-93b7-9eacdf55b0d2","11":"7fb4598f-d218-4542-97a4-e9d919520a85","12":"eff15b64-890a-45c1-bbbb-0e59fe9685b5","13":"11aef83a-1b75-4fd6-bcc8-4a3cf692a336","14":"25e3a3e2-b4b0-4bbc-9c6b-1a9e1a5a6168","15":"be5ac9a6-f9c2-48e2-86d0-72cd205b09f2","16":"2a13135b-ed23-4a7e-8d9d-8a2ef2005f97","17":"a43161fb-9204-4f3f-b41f-2fd3befec304","18":"1526f8bd-685e-406f-8933-ac7983d2b463","19":"d95e5457-65d1-4e50-a4f6-cc2c25bd68f3","20":"10a1c13d-574b-4923-93f2-f95d726e94df","21":"97e6e2d9-bad2-4cbc-bdb9-12cb46b07f0d","22":"b30eb821-87c9-4606-bedf-b580157eab6b","23":"cb3b888a-3071-4828-8af9-a8a8e90ec7c5","24":"99ec6a28-089b-432c-84df-2f1901a13a08","25":"40c1a642-f2d6-4a0a-8859-8f341bffc5b3","26":"f751f461-c09b-4766-9ad0-b02fb8d2524a","27":"eca71fc0-80aa-4ac7-823a-477f45aa83bd","28":"687a89f6-e84b-4a12-8895-ab1d8ec96afc","29":"7c7cc392-8a07-4d05-9f90-af1bdc8a601c","30":"14e7a808-25dc-4227-92cd-9977ce4a933f","31":"42144b86-fa11-4b80-b7e3-e259eb65b033"}] -------------------------------------------------------------------------------- /db/kongyiji/faiss.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealKai42/langchainjs-juejin/49b8764d37f76e02861bc3d8b01508fe4805ca64/db/kongyiji/faiss.index -------------------------------------------------------------------------------- /db/qiu/faiss.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealKai42/langchainjs-juejin/49b8764d37f76e02861bc3d8b01508fe4805ca64/db/qiu/faiss.index -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "imports": { 3 | "lodash": "npm:/lodash@4.17.21", 4 | "dotenv": "https://deno.land/std@0.220.0/dotenv/mod.ts", 5 | "langchain": "npm:/langchain@0.1.29", 6 | "langchain/": "npm:/langchain@0.1.29/", 7 | "@langchain/openai": "npm:/@langchain/openai@0.0.22", 8 | "@langchain/openai/": "npm:/@langchain/openai@0.0.22/", 9 | "@langchain/core": "npm:/@langchain/core@0.1.48", 10 | "@langchain/core/": "npm:/@langchain/core@0.1.48/", 11 | "zod": "npm:/zod", 12 | "pdf-parse": "npm:/pdf-parse/lib/pdf-parse.js", 13 | "ignore": "npm:/ignore", 14 | "cheerio": "npm:/cheerio", 15 | "vectordb": "npm:/vectordb", 16 | "@lancedb/lancedb": "npm:/@lancedb/lancedb", 17 | "@langchain/community/": "npm:/@langchain/community/", 18 | "faiss-node": "npm:/faiss-node", 19 | "openai": "npm:/openai", 20 | "zod-to-json-schema": "npm:/zod-to-json-schema" 21 | }, 22 | "deno.enable": true 23 | } -------------------------------------------------------------------------------- /deno.lock: -------------------------------------------------------------------------------- 1 | { 2 | "version": "3", 3 | "packages": { 4 | "specifiers": { 5 | "npm:@langchain/community": "npm:@langchain/community@0.0.42", 6 | "npm:@langchain/core@0.1.48": "npm:@langchain/core@0.1.48_zod@3.22.4", 7 | "npm:@langchain/openai@0.0.22": "npm:@langchain/openai@0.0.22_zod@3.22.4", 8 | "npm:faiss-node": "npm:faiss-node@0.5.1", 9 | "npm:ignore": "npm:ignore@5.3.1", 10 | "npm:langchain@0.1.29": "npm:langchain@0.1.29_ignore@5.3.1_zod@3.22.4", 11 | "npm:vectordb": "npm:vectordb@0.4.13_@apache-arrow+ts@14.0.2_apache-arrow@14.0.2" 12 | }, 13 | "npm": { 14 | "@75lb/deep-merge@1.1.1": { 15 | "integrity": "sha512-xvgv6pkMGBA6GwdyJbNAnDmfAIR/DfWhrj9jgWh3TY7gRm3KO46x/GPjRg6wJ0nOepwqrNxFfojebh0Df4h4Tw==", 16 | "dependencies": { 17 | "lodash.assignwith": "lodash.assignwith@4.2.0", 18 | "typical": "typical@7.1.1" 19 | } 20 | }, 21 | "@anthropic-ai/sdk@0.9.1": { 22 | "integrity": "sha512-wa1meQ2WSfoY8Uor3EdrJq0jTiZJoKoSii2ZVWRY1oN4Tlr5s59pADg9T79FTbPe1/se5c3pBeZgJL63wmuoBA==", 23 | "dependencies": { 24 | "@types/node": "@types/node@18.19.26", 25 | "@types/node-fetch": "@types/node-fetch@2.6.11", 26 | "abort-controller": "abort-controller@3.0.0", 27 | "agentkeepalive": "agentkeepalive@4.5.0", 28 | "digest-fetch": "digest-fetch@1.3.0", 29 | "form-data-encoder": "form-data-encoder@1.7.2", 30 | "formdata-node": "formdata-node@4.4.1", 31 | "node-fetch": "node-fetch@2.7.0", 32 | "web-streams-polyfill": "web-streams-polyfill@3.3.3" 33 | } 34 | }, 35 | "@apache-arrow/ts@14.0.2": { 36 | "integrity": "sha512-CtwAvLkK0CZv7xsYeCo91ml6PvlfzAmAJZkRYuz2GNBwfYufj5SVi0iuSMwIMkcU/szVwvLdzORSLa5PlF/2ug==", 37 | "dependencies": { 38 | "@types/command-line-args": "@types/command-line-args@5.2.0", 39 | "@types/command-line-usage": "@types/command-line-usage@5.0.2", 40 | "@types/node": "@types/node@20.3.0", 41 | "@types/pad-left": "@types/pad-left@2.1.1", 42 | "command-line-args": "command-line-args@5.2.1", 43 | "command-line-usage": "command-line-usage@7.0.1", 44 | "flatbuffers": "flatbuffers@23.5.26", 45 | "json-bignum": "json-bignum@0.0.3", 46 | "pad-left": "pad-left@2.1.0", 47 | "tslib": "tslib@2.6.2" 48 | } 49 | }, 50 | "@lancedb/vectordb-darwin-arm64@0.4.13": { 51 | "integrity": "sha512-JfroNCG8yKIU931Y+x8d0Fp8C9DHUSC5j+CjI+e5err7rTWtie4j3JbsXlWAnPFaFEOg0Xk3BWkSikCvhPGJGg==", 52 | "dependencies": {} 53 | }, 54 | "@lancedb/vectordb-darwin-x64@0.4.13": { 55 | "integrity": "sha512-dG6IMvfpHpnHdbJ0UffzJ7cZfMiC02MjIi6YJzgx+hKz2UNXWNBIfTvvhqli85mZsGRXL1OYDdYv0K1YzNjXlA==", 56 | "dependencies": {} 57 | }, 58 | "@lancedb/vectordb-linux-arm64-gnu@0.4.13": { 59 | "integrity": "sha512-BRR1VzaMviXby7qmLm0axNZM8eUZF3ZqfvnDKdVRpC3LaRueD6pMXHuC2IUKaFkn7xktf+8BlDZb6foFNEj8bQ==", 60 | "dependencies": {} 61 | }, 62 | "@lancedb/vectordb-linux-x64-gnu@0.4.13": { 63 | "integrity": "sha512-WnekZ7ZMlria+NODZ6aBCljCFQSe2bBNUS9ZpyFl/Y1vHduSQPuBxM6V7vp2QubC0daq/rifgjDob89DF+x3xw==", 64 | "dependencies": {} 65 | }, 66 | "@lancedb/vectordb-win32-x64-msvc@0.4.13": { 67 | "integrity": "sha512-3NDpMWBL2ksDHXAraXhowiLqQcNWM5bdbeHwze4+InYMD54hyQ2ODNc+4usxp63Nya9biVnFS27yXULqkzIEqQ==", 68 | "dependencies": {} 69 | }, 70 | "@langchain/community@0.0.42": { 71 | "integrity": "sha512-Xp3H0w23X9viHRpb3GUiFl+qjB68h/V7ztPOxR1AtaKlmXkjrSwiLqhMjlVsJ9jyPaKIr5ZOV07gYAXJUz/P8Q==", 72 | "dependencies": { 73 | "@langchain/core": "@langchain/core@0.1.48_zod@3.22.4", 74 | "@langchain/openai": "@langchain/openai@0.0.23_zod@3.22.4", 75 | "expr-eval": "expr-eval@2.0.2", 76 | "flat": "flat@5.0.2", 77 | "langsmith": "langsmith@0.1.13", 78 | "uuid": "uuid@9.0.1", 79 | "zod": "zod@3.22.4" 80 | } 81 | }, 82 | "@langchain/core@0.1.48_zod@3.22.4": { 83 | "integrity": "sha512-kGggyDbaYzCIPGkzvMvm/v0+lcTy1jlX6QZ7PSzUQFYJg5JK399x3AOYIDkbbUVxBAyHRgrWlxVQXH0FW3N6Bg==", 84 | "dependencies": { 85 | "ansi-styles": "ansi-styles@5.2.0", 86 | "camelcase": "camelcase@6.3.0", 87 | "decamelize": "decamelize@1.2.0", 88 | "js-tiktoken": "js-tiktoken@1.0.10", 89 | "langsmith": "langsmith@0.1.13", 90 | "ml-distance": "ml-distance@4.0.1", 91 | "p-queue": "p-queue@6.6.2", 92 | "p-retry": "p-retry@4.6.2", 93 | "uuid": "uuid@9.0.1", 94 | "zod": "zod@3.22.4", 95 | "zod-to-json-schema": "zod-to-json-schema@3.22.4_zod@3.22.4" 96 | } 97 | }, 98 | "@langchain/openai@0.0.22_zod@3.22.4": { 99 | "integrity": "sha512-mkXRSeedwCzPnfvp1kvaGBXyVWTB86n77pUyw5bC9ka43/cPBHZdHlSnTZMuJi3H4RnV4CMxunaOlm+s7NWfxQ==", 100 | "dependencies": { 101 | "@langchain/core": "@langchain/core@0.1.48_zod@3.22.4", 102 | "js-tiktoken": "js-tiktoken@1.0.10", 103 | "openai": "openai@4.29.2", 104 | "zod": "zod@3.22.4", 105 | "zod-to-json-schema": "zod-to-json-schema@3.22.4_zod@3.22.4" 106 | } 107 | }, 108 | "@langchain/openai@0.0.23_zod@3.22.4": { 109 | "integrity": "sha512-H5yv2hKQ5JVa6jC1wQxiN2299lJbPc5JUv93c6IUw+0jr0kFqH48NWbcythz1UFj2rOpZdaFJSYJs2nr9bhVLg==", 110 | "dependencies": { 111 | "@langchain/core": "@langchain/core@0.1.48_zod@3.22.4", 112 | "js-tiktoken": "js-tiktoken@1.0.10", 113 | "openai": "openai@4.29.2", 114 | "zod": "zod@3.22.4", 115 | "zod-to-json-schema": "zod-to-json-schema@3.22.4_zod@3.22.4" 116 | } 117 | }, 118 | "@neon-rs/load@0.0.74": { 119 | "integrity": "sha512-/cPZD907UNz55yrc/ud4wDgQKtU1TvkD9jeqZWG6J4IMmZkp6zgjkQcKA8UvpkZlcpPHvc8J17sGzLFbP/LUYg==", 120 | "dependencies": {} 121 | }, 122 | "@types/command-line-args@5.2.0": { 123 | "integrity": "sha512-UuKzKpJJ/Ief6ufIaIzr3A/0XnluX7RvFgwkV89Yzvm77wCh1kFaFmqN8XEnGcN62EuHdedQjEMb8mYxFLGPyA==", 124 | "dependencies": {} 125 | }, 126 | "@types/command-line-usage@5.0.2": { 127 | "integrity": "sha512-n7RlEEJ+4x4TS7ZQddTmNSxP+zziEG0TNsMfiRIxcIVXt71ENJ9ojeXmGO3wPoTdn7pJcU2xc3CJYMktNT6DPg==", 128 | "dependencies": {} 129 | }, 130 | "@types/node-fetch@2.6.11": { 131 | "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", 132 | "dependencies": { 133 | "@types/node": "@types/node@18.16.19", 134 | "form-data": "form-data@4.0.0" 135 | } 136 | }, 137 | "@types/node@18.16.19": { 138 | "integrity": "sha512-IXl7o+R9iti9eBW4Wg2hx1xQDig183jj7YLn8F7udNceyfkbn1ZxmzZXuak20gR40D7pIkIY1kYGx5VIGbaHKA==", 139 | "dependencies": {} 140 | }, 141 | "@types/node@18.19.26": { 142 | "integrity": "sha512-+wiMJsIwLOYCvUqSdKTrfkS8mpTp+MPINe6+Np4TAGFWWRWiBQ5kSq9nZGCSPkzx9mvT+uEukzpX4MOSCydcvw==", 143 | "dependencies": { 144 | "undici-types": "undici-types@5.26.5" 145 | } 146 | }, 147 | "@types/node@20.3.0": { 148 | "integrity": "sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==", 149 | "dependencies": {} 150 | }, 151 | "@types/pad-left@2.1.1": { 152 | "integrity": "sha512-Xd22WCRBydkGSApl5Bw0PhAOHKSVjNL3E3AwzKaps96IMraPqy5BvZIsBVK6JLwdybUzjHnuWVwpDd0JjTfHXA==", 153 | "dependencies": {} 154 | }, 155 | "@types/retry@0.12.0": { 156 | "integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==", 157 | "dependencies": {} 158 | }, 159 | "@types/uuid@9.0.8": { 160 | "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", 161 | "dependencies": {} 162 | }, 163 | "abort-controller@3.0.0": { 164 | "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", 165 | "dependencies": { 166 | "event-target-shim": "event-target-shim@5.0.1" 167 | } 168 | }, 169 | "agentkeepalive@4.5.0": { 170 | "integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==", 171 | "dependencies": { 172 | "humanize-ms": "humanize-ms@1.2.1" 173 | } 174 | }, 175 | "ansi-styles@4.3.0": { 176 | "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", 177 | "dependencies": { 178 | "color-convert": "color-convert@2.0.1" 179 | } 180 | }, 181 | "ansi-styles@5.2.0": { 182 | "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", 183 | "dependencies": {} 184 | }, 185 | "apache-arrow@14.0.2": { 186 | "integrity": "sha512-EBO2xJN36/XoY81nhLcwCJgFwkboDZeyNQ+OPsG7bCoQjc2BT0aTyH/MR6SrL+LirSNz+cYqjGRlupMMlP1aEg==", 187 | "dependencies": { 188 | "@types/command-line-args": "@types/command-line-args@5.2.0", 189 | "@types/command-line-usage": "@types/command-line-usage@5.0.2", 190 | "@types/node": "@types/node@20.3.0", 191 | "@types/pad-left": "@types/pad-left@2.1.1", 192 | "command-line-args": "command-line-args@5.2.1", 193 | "command-line-usage": "command-line-usage@7.0.1", 194 | "flatbuffers": "flatbuffers@23.5.26", 195 | "json-bignum": "json-bignum@0.0.3", 196 | "pad-left": "pad-left@2.1.0", 197 | "tslib": "tslib@2.6.2" 198 | } 199 | }, 200 | "argparse@2.0.1": { 201 | "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", 202 | "dependencies": {} 203 | }, 204 | "array-back@3.1.0": { 205 | "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==", 206 | "dependencies": {} 207 | }, 208 | "array-back@6.2.2": { 209 | "integrity": "sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==", 210 | "dependencies": {} 211 | }, 212 | "asynckit@0.4.0": { 213 | "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", 214 | "dependencies": {} 215 | }, 216 | "axios@1.6.8": { 217 | "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==", 218 | "dependencies": { 219 | "follow-redirects": "follow-redirects@1.15.6", 220 | "form-data": "form-data@4.0.0", 221 | "proxy-from-env": "proxy-from-env@1.1.0" 222 | } 223 | }, 224 | "base-64@0.1.0": { 225 | "integrity": "sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA==", 226 | "dependencies": {} 227 | }, 228 | "base64-js@1.5.1": { 229 | "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", 230 | "dependencies": {} 231 | }, 232 | "binary-extensions@2.3.0": { 233 | "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", 234 | "dependencies": {} 235 | }, 236 | "binary-search@1.3.6": { 237 | "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==", 238 | "dependencies": {} 239 | }, 240 | "bindings@1.5.0": { 241 | "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", 242 | "dependencies": { 243 | "file-uri-to-path": "file-uri-to-path@1.0.0" 244 | } 245 | }, 246 | "bl@4.1.0": { 247 | "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", 248 | "dependencies": { 249 | "buffer": "buffer@5.7.1", 250 | "inherits": "inherits@2.0.4", 251 | "readable-stream": "readable-stream@3.6.2" 252 | } 253 | }, 254 | "buffer@5.7.1": { 255 | "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", 256 | "dependencies": { 257 | "base64-js": "base64-js@1.5.1", 258 | "ieee754": "ieee754@1.2.1" 259 | } 260 | }, 261 | "camelcase@6.3.0": { 262 | "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", 263 | "dependencies": {} 264 | }, 265 | "chalk-template@0.4.0": { 266 | "integrity": "sha512-/ghrgmhfY8RaSdeo43hNXxpoHAtxdbskUHjPpfqUWGttFgycUhYPGx3YZBCnUCvOa7Doivn1IZec3DEGFoMgLg==", 267 | "dependencies": { 268 | "chalk": "chalk@4.1.2" 269 | } 270 | }, 271 | "chalk@4.1.2": { 272 | "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", 273 | "dependencies": { 274 | "ansi-styles": "ansi-styles@4.3.0", 275 | "supports-color": "supports-color@7.2.0" 276 | } 277 | }, 278 | "charenc@0.0.2": { 279 | "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==", 280 | "dependencies": {} 281 | }, 282 | "chownr@1.1.4": { 283 | "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", 284 | "dependencies": {} 285 | }, 286 | "color-convert@2.0.1": { 287 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 288 | "dependencies": { 289 | "color-name": "color-name@1.1.4" 290 | } 291 | }, 292 | "color-name@1.1.4": { 293 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 294 | "dependencies": {} 295 | }, 296 | "combined-stream@1.0.8": { 297 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 298 | "dependencies": { 299 | "delayed-stream": "delayed-stream@1.0.0" 300 | } 301 | }, 302 | "command-line-args@5.2.1": { 303 | "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", 304 | "dependencies": { 305 | "array-back": "array-back@3.1.0", 306 | "find-replace": "find-replace@3.0.0", 307 | "lodash.camelcase": "lodash.camelcase@4.3.0", 308 | "typical": "typical@4.0.0" 309 | } 310 | }, 311 | "command-line-usage@7.0.1": { 312 | "integrity": "sha512-NCyznE//MuTjwi3y84QVUGEOT+P5oto1e1Pk/jFPVdPPfsG03qpTIl3yw6etR+v73d0lXsoojRpvbru2sqePxQ==", 313 | "dependencies": { 314 | "array-back": "array-back@6.2.2", 315 | "chalk-template": "chalk-template@0.4.0", 316 | "table-layout": "table-layout@3.0.2", 317 | "typical": "typical@7.1.1" 318 | } 319 | }, 320 | "commander@10.0.1": { 321 | "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", 322 | "dependencies": {} 323 | }, 324 | "crypt@0.0.2": { 325 | "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==", 326 | "dependencies": {} 327 | }, 328 | "decamelize@1.2.0": { 329 | "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", 330 | "dependencies": {} 331 | }, 332 | "decompress-response@6.0.0": { 333 | "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", 334 | "dependencies": { 335 | "mimic-response": "mimic-response@3.1.0" 336 | } 337 | }, 338 | "deep-extend@0.6.0": { 339 | "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", 340 | "dependencies": {} 341 | }, 342 | "delayed-stream@1.0.0": { 343 | "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", 344 | "dependencies": {} 345 | }, 346 | "detect-libc@2.0.3": { 347 | "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", 348 | "dependencies": {} 349 | }, 350 | "digest-fetch@1.3.0": { 351 | "integrity": "sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA==", 352 | "dependencies": { 353 | "base-64": "base-64@0.1.0", 354 | "md5": "md5@2.3.0" 355 | } 356 | }, 357 | "end-of-stream@1.4.4": { 358 | "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", 359 | "dependencies": { 360 | "once": "once@1.4.0" 361 | } 362 | }, 363 | "event-target-shim@5.0.1": { 364 | "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", 365 | "dependencies": {} 366 | }, 367 | "eventemitter3@4.0.7": { 368 | "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", 369 | "dependencies": {} 370 | }, 371 | "expand-template@2.0.3": { 372 | "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", 373 | "dependencies": {} 374 | }, 375 | "expr-eval@2.0.2": { 376 | "integrity": "sha512-4EMSHGOPSwAfBiibw3ndnP0AvjDWLsMvGOvWEZ2F96IGk0bIVdjQisOHxReSkE13mHcfbuCiXw+G4y0zv6N8Eg==", 377 | "dependencies": {} 378 | }, 379 | "faiss-node@0.5.1": { 380 | "integrity": "sha512-zD8wobJn8C6OLWo68Unho+Ih8l6nSRB2w3Amj01a+xc4bsEvd2mBDLklAn7VocA9XO3WDvQL/bLpi5flkCn/XQ==", 381 | "dependencies": { 382 | "bindings": "bindings@1.5.0", 383 | "node-addon-api": "node-addon-api@6.1.0", 384 | "prebuild-install": "prebuild-install@7.1.2" 385 | } 386 | }, 387 | "file-uri-to-path@1.0.0": { 388 | "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", 389 | "dependencies": {} 390 | }, 391 | "find-replace@3.0.0": { 392 | "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==", 393 | "dependencies": { 394 | "array-back": "array-back@3.1.0" 395 | } 396 | }, 397 | "flat@5.0.2": { 398 | "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", 399 | "dependencies": {} 400 | }, 401 | "flatbuffers@23.5.26": { 402 | "integrity": "sha512-vE+SI9vrJDwi1oETtTIFldC/o9GsVKRM+s6EL0nQgxXlYV1Vc4Tk30hj4xGICftInKQKj1F3up2n8UbIVobISQ==", 403 | "dependencies": {} 404 | }, 405 | "follow-redirects@1.15.6": { 406 | "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", 407 | "dependencies": {} 408 | }, 409 | "form-data-encoder@1.7.2": { 410 | "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", 411 | "dependencies": {} 412 | }, 413 | "form-data@4.0.0": { 414 | "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", 415 | "dependencies": { 416 | "asynckit": "asynckit@0.4.0", 417 | "combined-stream": "combined-stream@1.0.8", 418 | "mime-types": "mime-types@2.1.35" 419 | } 420 | }, 421 | "formdata-node@4.4.1": { 422 | "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", 423 | "dependencies": { 424 | "node-domexception": "node-domexception@1.0.0", 425 | "web-streams-polyfill": "web-streams-polyfill@4.0.0-beta.3" 426 | } 427 | }, 428 | "fs-constants@1.0.0": { 429 | "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", 430 | "dependencies": {} 431 | }, 432 | "github-from-package@0.0.0": { 433 | "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", 434 | "dependencies": {} 435 | }, 436 | "has-flag@4.0.0": { 437 | "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", 438 | "dependencies": {} 439 | }, 440 | "humanize-ms@1.2.1": { 441 | "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", 442 | "dependencies": { 443 | "ms": "ms@2.1.3" 444 | } 445 | }, 446 | "ieee754@1.2.1": { 447 | "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", 448 | "dependencies": {} 449 | }, 450 | "ignore@5.3.1": { 451 | "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", 452 | "dependencies": {} 453 | }, 454 | "inherits@2.0.4": { 455 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 456 | "dependencies": {} 457 | }, 458 | "ini@1.3.8": { 459 | "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", 460 | "dependencies": {} 461 | }, 462 | "is-any-array@2.0.1": { 463 | "integrity": "sha512-UtilS7hLRu++wb/WBAw9bNuP1Eg04Ivn1vERJck8zJthEvXCBEBpGR/33u/xLKWEQf95803oalHrVDptcAvFdQ==", 464 | "dependencies": {} 465 | }, 466 | "is-buffer@1.1.6": { 467 | "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", 468 | "dependencies": {} 469 | }, 470 | "js-tiktoken@1.0.10": { 471 | "integrity": "sha512-ZoSxbGjvGyMT13x6ACo9ebhDha/0FHdKA+OsQcMOWcm1Zs7r90Rhk5lhERLzji+3rA7EKpXCgwXcM5fF3DMpdA==", 472 | "dependencies": { 473 | "base64-js": "base64-js@1.5.1" 474 | } 475 | }, 476 | "js-yaml@4.1.0": { 477 | "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", 478 | "dependencies": { 479 | "argparse": "argparse@2.0.1" 480 | } 481 | }, 482 | "json-bignum@0.0.3": { 483 | "integrity": "sha512-2WHyXj3OfHSgNyuzDbSxI1w2jgw5gkWSWhS7Qg4bWXx1nLk3jnbwfUeS0PSba3IzpTUWdHxBieELUzXRjQB2zg==", 484 | "dependencies": {} 485 | }, 486 | "jsonpointer@5.0.1": { 487 | "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", 488 | "dependencies": {} 489 | }, 490 | "langchain@0.1.29_ignore@5.3.1_zod@3.22.4": { 491 | "integrity": "sha512-IVS+eyQXp/9oy3+yFoCFgi2ZAZPak9nQJARvcjn8a0PwSXRCUIy8OkNjcV2kdoypbUhGhTyDw19BzFhLKq4vYg==", 492 | "dependencies": { 493 | "@anthropic-ai/sdk": "@anthropic-ai/sdk@0.9.1", 494 | "@langchain/community": "@langchain/community@0.0.42", 495 | "@langchain/core": "@langchain/core@0.1.48_zod@3.22.4", 496 | "@langchain/openai": "@langchain/openai@0.0.23_zod@3.22.4", 497 | "binary-extensions": "binary-extensions@2.3.0", 498 | "ignore": "ignore@5.3.1", 499 | "js-tiktoken": "js-tiktoken@1.0.10", 500 | "js-yaml": "js-yaml@4.1.0", 501 | "jsonpointer": "jsonpointer@5.0.1", 502 | "langchainhub": "langchainhub@0.0.8", 503 | "langsmith": "langsmith@0.1.13", 504 | "ml-distance": "ml-distance@4.0.1", 505 | "openapi-types": "openapi-types@12.1.3", 506 | "p-retry": "p-retry@4.6.2", 507 | "uuid": "uuid@9.0.1", 508 | "yaml": "yaml@2.4.1", 509 | "zod": "zod@3.22.4", 510 | "zod-to-json-schema": "zod-to-json-schema@3.22.4_zod@3.22.4" 511 | } 512 | }, 513 | "langchainhub@0.0.8": { 514 | "integrity": "sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ==", 515 | "dependencies": {} 516 | }, 517 | "langsmith@0.1.13": { 518 | "integrity": "sha512-iyGrsaWhZ70F1aG8T8Nd4iH33Z0JFMdxbfBbaRV/+LkJDH4PByZHNJbApT6G2pQmmYD0cei9oW7kXp89N5SXXQ==", 519 | "dependencies": { 520 | "@types/uuid": "@types/uuid@9.0.8", 521 | "commander": "commander@10.0.1", 522 | "p-queue": "p-queue@6.6.2", 523 | "p-retry": "p-retry@4.6.2", 524 | "uuid": "uuid@9.0.1" 525 | } 526 | }, 527 | "lodash.assignwith@4.2.0": { 528 | "integrity": "sha512-ZznplvbvtjK2gMvnQ1BR/zqPFZmS6jbK4p+6Up4xcRYA7yMIwxHCfbTcrYxXKzzqLsQ05eJPVznEW3tuwV7k1g==", 529 | "dependencies": {} 530 | }, 531 | "lodash.camelcase@4.3.0": { 532 | "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", 533 | "dependencies": {} 534 | }, 535 | "lru-cache@6.0.0": { 536 | "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", 537 | "dependencies": { 538 | "yallist": "yallist@4.0.0" 539 | } 540 | }, 541 | "md5@2.3.0": { 542 | "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", 543 | "dependencies": { 544 | "charenc": "charenc@0.0.2", 545 | "crypt": "crypt@0.0.2", 546 | "is-buffer": "is-buffer@1.1.6" 547 | } 548 | }, 549 | "mime-db@1.52.0": { 550 | "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", 551 | "dependencies": {} 552 | }, 553 | "mime-types@2.1.35": { 554 | "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", 555 | "dependencies": { 556 | "mime-db": "mime-db@1.52.0" 557 | } 558 | }, 559 | "mimic-response@3.1.0": { 560 | "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", 561 | "dependencies": {} 562 | }, 563 | "minimist@1.2.8": { 564 | "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", 565 | "dependencies": {} 566 | }, 567 | "mkdirp-classic@0.5.3": { 568 | "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", 569 | "dependencies": {} 570 | }, 571 | "ml-array-mean@1.1.6": { 572 | "integrity": "sha512-MIdf7Zc8HznwIisyiJGRH9tRigg3Yf4FldW8DxKxpCCv/g5CafTw0RRu51nojVEOXuCQC7DRVVu5c7XXO/5joQ==", 573 | "dependencies": { 574 | "ml-array-sum": "ml-array-sum@1.1.6" 575 | } 576 | }, 577 | "ml-array-sum@1.1.6": { 578 | "integrity": "sha512-29mAh2GwH7ZmiRnup4UyibQZB9+ZLyMShvt4cH4eTK+cL2oEMIZFnSyB3SS8MlsTh6q/w/yh48KmqLxmovN4Dw==", 579 | "dependencies": { 580 | "is-any-array": "is-any-array@2.0.1" 581 | } 582 | }, 583 | "ml-distance-euclidean@2.0.0": { 584 | "integrity": "sha512-yC9/2o8QF0A3m/0IXqCTXCzz2pNEzvmcE/9HFKOZGnTjatvBbsn4lWYJkxENkA4Ug2fnYl7PXQxnPi21sgMy/Q==", 585 | "dependencies": {} 586 | }, 587 | "ml-distance@4.0.1": { 588 | "integrity": "sha512-feZ5ziXs01zhyFUUUeZV5hwc0f5JW0Sh0ckU1koZe/wdVkJdGxcP06KNQuF0WBTj8FttQUzcvQcpcrOp/XrlEw==", 589 | "dependencies": { 590 | "ml-array-mean": "ml-array-mean@1.1.6", 591 | "ml-distance-euclidean": "ml-distance-euclidean@2.0.0", 592 | "ml-tree-similarity": "ml-tree-similarity@1.0.0" 593 | } 594 | }, 595 | "ml-tree-similarity@1.0.0": { 596 | "integrity": "sha512-XJUyYqjSuUQkNQHMscr6tcjldsOoAekxADTplt40QKfwW6nd++1wHWV9AArl0Zvw/TIHgNaZZNvr8QGvE8wLRg==", 597 | "dependencies": { 598 | "binary-search": "binary-search@1.3.6", 599 | "num-sort": "num-sort@2.1.0" 600 | } 601 | }, 602 | "ms@2.1.3": { 603 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", 604 | "dependencies": {} 605 | }, 606 | "napi-build-utils@1.0.2": { 607 | "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", 608 | "dependencies": {} 609 | }, 610 | "node-abi@3.57.0": { 611 | "integrity": "sha512-Dp+A9JWxRaKuHP35H77I4kCKesDy5HUDEmScia2FyncMTOXASMyg251F5PhFoDA5uqBrDDffiLpbqnrZmNXW+g==", 612 | "dependencies": { 613 | "semver": "semver@7.6.0" 614 | } 615 | }, 616 | "node-addon-api@6.1.0": { 617 | "integrity": "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==", 618 | "dependencies": {} 619 | }, 620 | "node-domexception@1.0.0": { 621 | "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", 622 | "dependencies": {} 623 | }, 624 | "node-fetch@2.7.0": { 625 | "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", 626 | "dependencies": { 627 | "whatwg-url": "whatwg-url@5.0.0" 628 | } 629 | }, 630 | "num-sort@2.1.0": { 631 | "integrity": "sha512-1MQz1Ed8z2yckoBeSfkQHHO9K1yDRxxtotKSJ9yvcTUUxSvfvzEq5GwBrjjHEpMlq/k5gvXdmJ1SbYxWtpNoVg==", 632 | "dependencies": {} 633 | }, 634 | "once@1.4.0": { 635 | "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", 636 | "dependencies": { 637 | "wrappy": "wrappy@1.0.2" 638 | } 639 | }, 640 | "openai@4.29.2": { 641 | "integrity": "sha512-cPkT6zjEcE4qU5OW/SoDDuXEsdOLrXlAORhzmaguj5xZSPlgKvLhi27sFWhLKj07Y6WKNWxcwIbzm512FzTBNQ==", 642 | "dependencies": { 643 | "@types/node": "@types/node@18.19.26", 644 | "@types/node-fetch": "@types/node-fetch@2.6.11", 645 | "abort-controller": "abort-controller@3.0.0", 646 | "agentkeepalive": "agentkeepalive@4.5.0", 647 | "digest-fetch": "digest-fetch@1.3.0", 648 | "form-data-encoder": "form-data-encoder@1.7.2", 649 | "formdata-node": "formdata-node@4.4.1", 650 | "node-fetch": "node-fetch@2.7.0", 651 | "web-streams-polyfill": "web-streams-polyfill@3.3.3" 652 | } 653 | }, 654 | "openapi-types@12.1.3": { 655 | "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==", 656 | "dependencies": {} 657 | }, 658 | "p-finally@1.0.0": { 659 | "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", 660 | "dependencies": {} 661 | }, 662 | "p-queue@6.6.2": { 663 | "integrity": "sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==", 664 | "dependencies": { 665 | "eventemitter3": "eventemitter3@4.0.7", 666 | "p-timeout": "p-timeout@3.2.0" 667 | } 668 | }, 669 | "p-retry@4.6.2": { 670 | "integrity": "sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ==", 671 | "dependencies": { 672 | "@types/retry": "@types/retry@0.12.0", 673 | "retry": "retry@0.13.1" 674 | } 675 | }, 676 | "p-timeout@3.2.0": { 677 | "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", 678 | "dependencies": { 679 | "p-finally": "p-finally@1.0.0" 680 | } 681 | }, 682 | "pad-left@2.1.0": { 683 | "integrity": "sha512-HJxs9K9AztdIQIAIa/OIazRAUW/L6B9hbQDxO4X07roW3eo9XqZc2ur9bn1StH9CnbbI9EgvejHQX7CBpCF1QA==", 684 | "dependencies": { 685 | "repeat-string": "repeat-string@1.6.1" 686 | } 687 | }, 688 | "prebuild-install@7.1.2": { 689 | "integrity": "sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==", 690 | "dependencies": { 691 | "detect-libc": "detect-libc@2.0.3", 692 | "expand-template": "expand-template@2.0.3", 693 | "github-from-package": "github-from-package@0.0.0", 694 | "minimist": "minimist@1.2.8", 695 | "mkdirp-classic": "mkdirp-classic@0.5.3", 696 | "napi-build-utils": "napi-build-utils@1.0.2", 697 | "node-abi": "node-abi@3.57.0", 698 | "pump": "pump@3.0.0", 699 | "rc": "rc@1.2.8", 700 | "simple-get": "simple-get@4.0.1", 701 | "tar-fs": "tar-fs@2.1.1", 702 | "tunnel-agent": "tunnel-agent@0.6.0" 703 | } 704 | }, 705 | "proxy-from-env@1.1.0": { 706 | "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", 707 | "dependencies": {} 708 | }, 709 | "pump@3.0.0": { 710 | "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", 711 | "dependencies": { 712 | "end-of-stream": "end-of-stream@1.4.4", 713 | "once": "once@1.4.0" 714 | } 715 | }, 716 | "rc@1.2.8": { 717 | "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", 718 | "dependencies": { 719 | "deep-extend": "deep-extend@0.6.0", 720 | "ini": "ini@1.3.8", 721 | "minimist": "minimist@1.2.8", 722 | "strip-json-comments": "strip-json-comments@2.0.1" 723 | } 724 | }, 725 | "readable-stream@3.6.2": { 726 | "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", 727 | "dependencies": { 728 | "inherits": "inherits@2.0.4", 729 | "string_decoder": "string_decoder@1.3.0", 730 | "util-deprecate": "util-deprecate@1.0.2" 731 | } 732 | }, 733 | "repeat-string@1.6.1": { 734 | "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", 735 | "dependencies": {} 736 | }, 737 | "retry@0.13.1": { 738 | "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", 739 | "dependencies": {} 740 | }, 741 | "safe-buffer@5.2.1": { 742 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", 743 | "dependencies": {} 744 | }, 745 | "semver@7.6.0": { 746 | "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", 747 | "dependencies": { 748 | "lru-cache": "lru-cache@6.0.0" 749 | } 750 | }, 751 | "simple-concat@1.0.1": { 752 | "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", 753 | "dependencies": {} 754 | }, 755 | "simple-get@4.0.1": { 756 | "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", 757 | "dependencies": { 758 | "decompress-response": "decompress-response@6.0.0", 759 | "once": "once@1.4.0", 760 | "simple-concat": "simple-concat@1.0.1" 761 | } 762 | }, 763 | "stream-read-all@3.0.1": { 764 | "integrity": "sha512-EWZT9XOceBPlVJRrYcykW8jyRSZYbkb/0ZK36uLEmoWVO5gxBOnntNTseNzfREsqxqdfEGQrD8SXQ3QWbBmq8A==", 765 | "dependencies": {} 766 | }, 767 | "string_decoder@1.3.0": { 768 | "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", 769 | "dependencies": { 770 | "safe-buffer": "safe-buffer@5.2.1" 771 | } 772 | }, 773 | "strip-json-comments@2.0.1": { 774 | "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", 775 | "dependencies": {} 776 | }, 777 | "supports-color@7.2.0": { 778 | "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", 779 | "dependencies": { 780 | "has-flag": "has-flag@4.0.0" 781 | } 782 | }, 783 | "table-layout@3.0.2": { 784 | "integrity": "sha512-rpyNZYRw+/C+dYkcQ3Pr+rLxW4CfHpXjPDnG7lYhdRoUcZTUt+KEsX+94RGp/aVp/MQU35JCITv2T/beY4m+hw==", 785 | "dependencies": { 786 | "@75lb/deep-merge": "@75lb/deep-merge@1.1.1", 787 | "array-back": "array-back@6.2.2", 788 | "command-line-args": "command-line-args@5.2.1", 789 | "command-line-usage": "command-line-usage@7.0.1", 790 | "stream-read-all": "stream-read-all@3.0.1", 791 | "typical": "typical@7.1.1", 792 | "wordwrapjs": "wordwrapjs@5.1.0" 793 | } 794 | }, 795 | "tar-fs@2.1.1": { 796 | "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", 797 | "dependencies": { 798 | "chownr": "chownr@1.1.4", 799 | "mkdirp-classic": "mkdirp-classic@0.5.3", 800 | "pump": "pump@3.0.0", 801 | "tar-stream": "tar-stream@2.2.0" 802 | } 803 | }, 804 | "tar-stream@2.2.0": { 805 | "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", 806 | "dependencies": { 807 | "bl": "bl@4.1.0", 808 | "end-of-stream": "end-of-stream@1.4.4", 809 | "fs-constants": "fs-constants@1.0.0", 810 | "inherits": "inherits@2.0.4", 811 | "readable-stream": "readable-stream@3.6.2" 812 | } 813 | }, 814 | "tr46@0.0.3": { 815 | "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", 816 | "dependencies": {} 817 | }, 818 | "tslib@2.6.2": { 819 | "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", 820 | "dependencies": {} 821 | }, 822 | "tunnel-agent@0.6.0": { 823 | "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", 824 | "dependencies": { 825 | "safe-buffer": "safe-buffer@5.2.1" 826 | } 827 | }, 828 | "typical@4.0.0": { 829 | "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw==", 830 | "dependencies": {} 831 | }, 832 | "typical@7.1.1": { 833 | "integrity": "sha512-T+tKVNs6Wu7IWiAce5BgMd7OZfNYUndHwc5MknN+UHOudi7sGZzuHdCadllRuqJ3fPtgFtIH9+lt9qRv6lmpfA==", 834 | "dependencies": {} 835 | }, 836 | "undici-types@5.26.5": { 837 | "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", 838 | "dependencies": {} 839 | }, 840 | "util-deprecate@1.0.2": { 841 | "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", 842 | "dependencies": {} 843 | }, 844 | "uuid@9.0.1": { 845 | "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", 846 | "dependencies": {} 847 | }, 848 | "vectordb@0.4.13_@apache-arrow+ts@14.0.2_apache-arrow@14.0.2": { 849 | "integrity": "sha512-8M1/KAKLsGp50wqPcf+UmPjWQP6T74fWaOCCDnUMVEPsu39Q4w/tGbVlAbqVLpb6ChUlQvEpz+Cq0cLOaOw8xQ==", 850 | "dependencies": { 851 | "@apache-arrow/ts": "@apache-arrow/ts@14.0.2", 852 | "@lancedb/vectordb-darwin-arm64": "@lancedb/vectordb-darwin-arm64@0.4.13", 853 | "@lancedb/vectordb-darwin-x64": "@lancedb/vectordb-darwin-x64@0.4.13", 854 | "@lancedb/vectordb-linux-arm64-gnu": "@lancedb/vectordb-linux-arm64-gnu@0.4.13", 855 | "@lancedb/vectordb-linux-x64-gnu": "@lancedb/vectordb-linux-x64-gnu@0.4.13", 856 | "@lancedb/vectordb-win32-x64-msvc": "@lancedb/vectordb-win32-x64-msvc@0.4.13", 857 | "@neon-rs/load": "@neon-rs/load@0.0.74", 858 | "apache-arrow": "apache-arrow@14.0.2", 859 | "axios": "axios@1.6.8" 860 | } 861 | }, 862 | "web-streams-polyfill@3.3.3": { 863 | "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", 864 | "dependencies": {} 865 | }, 866 | "web-streams-polyfill@4.0.0-beta.3": { 867 | "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", 868 | "dependencies": {} 869 | }, 870 | "webidl-conversions@3.0.1": { 871 | "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", 872 | "dependencies": {} 873 | }, 874 | "whatwg-url@5.0.0": { 875 | "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", 876 | "dependencies": { 877 | "tr46": "tr46@0.0.3", 878 | "webidl-conversions": "webidl-conversions@3.0.1" 879 | } 880 | }, 881 | "wordwrapjs@5.1.0": { 882 | "integrity": "sha512-JNjcULU2e4KJwUNv6CHgI46UvDGitb6dGryHajXTDiLgg1/RiGoPSDw4kZfYnwGtEXf2ZMeIewDQgFGzkCB2Sg==", 883 | "dependencies": {} 884 | }, 885 | "wrappy@1.0.2": { 886 | "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", 887 | "dependencies": {} 888 | }, 889 | "yallist@4.0.0": { 890 | "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", 891 | "dependencies": {} 892 | }, 893 | "yaml@2.4.1": { 894 | "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", 895 | "dependencies": {} 896 | }, 897 | "zod-to-json-schema@3.22.4_zod@3.22.4": { 898 | "integrity": "sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==", 899 | "dependencies": { 900 | "zod": "zod@3.22.4" 901 | } 902 | }, 903 | "zod@3.22.4": { 904 | "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==", 905 | "dependencies": {} 906 | } 907 | } 908 | }, 909 | "remote": { 910 | "https://deno.land/std@0.220.0/dotenv/mod.ts": "0180eaeedaaf88647318811cdaa418cc64dc51fb08354f91f5f480d0a1309f7d", 911 | "https://deno.land/std@0.220.0/dotenv/parse.ts": "09977ff88dfd1f24f9973a338f0f91bbdb9307eb5ff6085446e7c423e4c7ba0c", 912 | "https://deno.land/std@0.220.0/dotenv/stringify.ts": "0047ad7068289735d08964046aea267a750c141b494ca0e38831b89be6c020c2" 913 | }, 914 | "workspace": { 915 | "dependencies": [ 916 | "npm:@lancedb/lancedb", 917 | "npm:@langchain/community", 918 | "npm:@langchain/core@0.1.48", 919 | "npm:@langchain/openai@0.0.22", 920 | "npm:cheerio", 921 | "npm:faiss-node", 922 | "npm:ignore", 923 | "npm:langchain@0.1.29", 924 | "npm:lodash@4.17.21", 925 | "npm:openai", 926 | "npm:pdf-parse", 927 | "npm:vectordb", 928 | "npm:zod", 929 | "npm:zod-to-json-schema" 930 | ] 931 | } 932 | } 933 | -------------------------------------------------------------------------------- /lc-tools.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "11ee0f16-747d-463b-ba59-f6a22fc13d1e", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import { load } from \"dotenv\";\n", 11 | "const env = await load();\n", 12 | "\n", 13 | "const process = {\n", 14 | " env\n", 15 | "}" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "id": "cd1f52e3-98ec-4277-aae8-560dc36cb492", 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import { z } from \"zod\";\n", 26 | "\n", 27 | "// 简单使用\n", 28 | "const stringSchema = z.string();\n", 29 | "stringSchema.parse(\"Hello, Zod!\");" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "id": "f3362645-bc52-4ca0-8718-4fca2905914b", 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "stringSchema.parse(2323);" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "id": "79d2ea09-ec90-4592-992b-2e5635f39cb1", 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "// 基础类型\n", 50 | "const stringSchema = z.string();\n", 51 | "const numberSchema = z.number();\n", 52 | "const booleanSchema = z.boolean();\n", 53 | "\n", 54 | "// 数组\n", 55 | "const stringArraySchema = z.array(z.string());\n", 56 | "stringArraySchema.parse([\"apple\", \"banana\", \"cherry\"]); \n", 57 | "\n", 58 | "// 对象\n", 59 | "const personSchema = z.object({\n", 60 | " name: z.string(),\n", 61 | " age: z.number(),\n", 62 | " // 可选类型\n", 63 | " isStudent: z.boolean().optional(),\n", 64 | " // 默认值\n", 65 | " home: z.string().default(\"no home\")\n", 66 | "});\n", 67 | "\n", 68 | "// 联合类型\n", 69 | "const mixedTypeSchema = z.union([z.string(), z.number()]);\n", 70 | "mixedTypeSchema.parse(\"hello\"); \n", 71 | "mixedTypeSchema.parse(42); " 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "id": "356243dc-0e87-45c6-a66b-9b4149f4fb4f", 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "import { z } from \"zod\";\n", 82 | "\n", 83 | "const getCurrentWeatherSchema = z.object({\n", 84 | " location: z.string().describe(\"The city and state, e.g. San Francisco, CA\"),\n", 85 | " unit: z.enum([\"celsius\", \"fahrenheit\"]).describe(\"The unit of temperature\"),\n", 86 | "});" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "id": "b8a9a882-c08a-431a-aea2-6062773ac532", 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "import { zodToJsonSchema } from \"zod-to-json-schema\";\n", 97 | "\n", 98 | "const paramSchema = zodToJsonSchema(getCurrentWeatherSchema)\n", 99 | "paramSchema" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "id": "63fc3643-a7d7-437a-b474-45ca7d72ba3a", 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "const model = new ChatOpenAI({\n", 110 | " temperature: 0 \n", 111 | "})\n", 112 | "\n", 113 | "const modelWithTools = model.bind({\n", 114 | " tools: [\n", 115 | " {\n", 116 | " type: \"function\",\n", 117 | " function: {\n", 118 | " name: \"getCurrentWeather\",\n", 119 | " description: \"Get the current weather in a given location\",\n", 120 | " parameters: zodToJsonSchema(getCurrentWeatherSchema),\n", 121 | " }\n", 122 | " }\n", 123 | " ]\n", 124 | "})\n", 125 | "\n", 126 | "await modelWithTools.invoke(\"北京的天气怎么样\");" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "id": "d9683945-c0de-4bf0-b1e3-28f8190ab943", 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "import { ChatPromptTemplate } from \"@langchain/core/prompts\";\n", 137 | "\n", 138 | "const prompt = ChatPromptTemplate.fromMessages([\n", 139 | " [\"system\", \"You are a helpful assistant\"],\n", 140 | " [\"human\", \"{input}\"]\n", 141 | "])\n", 142 | "\n", 143 | "const chain = prompt.pipe(modelWithTools)\n", 144 | "\n", 145 | "await chain.invoke({\n", 146 | " input: \"北京的天气怎么样\"\n", 147 | "});" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "id": "a960463a-c711-4a60-8965-7a7c8b65f336", 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "const getCurrentTimeSchema = z.object({\n", 158 | " format: z\n", 159 | " .enum([\"iso\", \"locale\", \"string\"])\n", 160 | " .optional()\n", 161 | " .describe(\"The format of the time, e.g. iso, locale, string\"),\n", 162 | "});\n", 163 | "\n", 164 | "zodToJsonSchema(getCurrentTimeSchema)" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": null, 170 | "id": "5194e71a-37a4-4339-9e91-2e1b7847d3f7", 171 | "metadata": {}, 172 | "outputs": [], 173 | "source": [ 174 | "const model = new ChatOpenAI({\n", 175 | " temperature: 0 \n", 176 | "})\n", 177 | "\n", 178 | "const modelWithMultiTools = model.bind({\n", 179 | " tools: [\n", 180 | " {\n", 181 | " type: \"function\",\n", 182 | " function: {\n", 183 | " name: \"getCurrentWeather\",\n", 184 | " description: \"Get the current weather in a given location\",\n", 185 | " parameters: zodToJsonSchema(getCurrentWeatherSchema)\n", 186 | " }\n", 187 | " },\n", 188 | " {\n", 189 | " type: \"function\",\n", 190 | " function: {\n", 191 | " name: \"getCurrentTime\",\n", 192 | " description: \"Get the current time in a given format\",\n", 193 | " parameters: zodToJsonSchema(getCurrentTimeSchema)\n", 194 | " }\n", 195 | " }\n", 196 | " ]\n", 197 | "})\n", 198 | "\n", 199 | "await modelWithMultiTools.invoke(\"现在几点了?\");\n" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "id": "e6256e6a-a433-4368-a65b-b2fb3bedca68", 206 | "metadata": {}, 207 | "outputs": [], 208 | "source": [ 209 | "await modelWithMultiTools.invoke(\"现在 iso 格式的时间是什么?\");" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "id": "50a931ec-a737-4472-a3a4-cbb72aecd192", 216 | "metadata": {}, 217 | "outputs": [], 218 | "source": [ 219 | "const model = new ChatOpenAI({\n", 220 | " temperature: 0 \n", 221 | "})\n", 222 | "\n", 223 | "const modelWithForce = model.bind({\n", 224 | " tools: [\n", 225 | " {\n", 226 | " type: \"function\",\n", 227 | " function: {\n", 228 | " name: \"getCurrentWeather\",\n", 229 | " description: \"Get the current weather in a given location\",\n", 230 | " parameters: zodToJsonSchema(getCurrentWeatherSchema)\n", 231 | " }\n", 232 | " },\n", 233 | " {\n", 234 | " type: \"function\",\n", 235 | " function: {\n", 236 | " name: \"getCurrentTime\",\n", 237 | " description: \"Get the current time in a given format\",\n", 238 | " parameters: zodToJsonSchema(getCurrentTimeSchema)\n", 239 | " }\n", 240 | " }\n", 241 | " ],\n", 242 | " tool_choice: {\n", 243 | " type: \"function\",\n", 244 | " function: {\n", 245 | " name: \"getCurrentWeather\"\n", 246 | " }\n", 247 | " }\n", 248 | "})\n", 249 | "\n", 250 | "await modelWithForce.invoke(\"现在几点了?\");" 251 | ] 252 | }, 253 | { 254 | "cell_type": "markdown", 255 | "id": "c54e4e1e-40bc-41cb-969b-c01efb2882c5", 256 | "metadata": {}, 257 | "source": [ 258 | "## Tagging" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "id": "b04f0485-eae1-462e-940d-9261edc38f29", 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "const taggingSchema = z.object({\n", 269 | " emotion:z.enum([\"pos\", \"neg\", \"neutral\"]).describe(\"文本的情感\"),\n", 270 | " language: z.string().describe(\"文本的核心语言(应为ISO 639-1代码)\"),\n", 271 | "});" 272 | ] 273 | }, 274 | { 275 | "cell_type": "code", 276 | "execution_count": null, 277 | "id": "ed1fdd8e-872c-4feb-9a42-47063f11d647", 278 | "metadata": {}, 279 | "outputs": [], 280 | "source": [ 281 | "import { JsonOutputToolsParser } from \"@langchain/core/output_parsers/openai_tools\";\n", 282 | "\n", 283 | "const model = new ChatOpenAI({\n", 284 | " temperature: 0 \n", 285 | "})\n", 286 | "\n", 287 | "const modelTagging = model.bind({\n", 288 | " tools: [\n", 289 | " {\n", 290 | " type: \"function\",\n", 291 | " function: {\n", 292 | " name: \"tagging\",\n", 293 | " description: \"为特定的文本片段打上标签\",\n", 294 | " parameters: zodToJsonSchema(taggingSchema)\n", 295 | " }\n", 296 | " }\n", 297 | " ],\n", 298 | " tool_choice: {\n", 299 | " type: \"function\",\n", 300 | " function: {\n", 301 | " name: \"tagging\"\n", 302 | " }\n", 303 | " }\n", 304 | "})\n", 305 | "\n", 306 | "const prompt = ChatPromptTemplate.fromMessages([\n", 307 | " [\"system\", \"仔细思考,你有充足的时间进行严谨的思考,然后按照指示对文本进行标记\"],\n", 308 | " [\"human\", \"{input}\"]\n", 309 | "])\n", 310 | "\n", 311 | "const chain = prompt.pipe(modelTagging).pipe(new JsonOutputToolsParser())" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": null, 317 | "id": "1c87749a-b8b7-4909-b59e-29a9f8dfbab0", 318 | "metadata": {}, 319 | "outputs": [], 320 | "source": [ 321 | "await chain.invoke({\n", 322 | " input: \"hello world\"\n", 323 | "})" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": null, 329 | "id": "1cf4ec15-b0f3-4541-8a66-32bbbd7392ef", 330 | "metadata": {}, 331 | "outputs": [], 332 | "source": [ 333 | "await chain.invoke({\n", 334 | " input: \"写代码太难了,👴 不干了\"\n", 335 | "})" 336 | ] 337 | }, 338 | { 339 | "cell_type": "code", 340 | "execution_count": null, 341 | "id": "6f81d723-ef03-4c30-a49a-7454bf6ee1c8", 342 | "metadata": {}, 343 | "outputs": [], 344 | "source": [ 345 | "await chain.invoke({\n", 346 | " // 日语,圣诞快乐\n", 347 | " input: \"メリークリスマス!\"\n", 348 | "})" 349 | ] 350 | }, 351 | { 352 | "cell_type": "code", 353 | "execution_count": null, 354 | "id": "36a04165-3f34-439d-a036-601d96a5ddd5", 355 | "metadata": {}, 356 | "outputs": [], 357 | "source": [ 358 | "await chain.invoke({\n", 359 | " input: \"我非常喜欢 AI,特别是 LLM,因为它非常 powerful\"\n", 360 | "})" 361 | ] 362 | }, 363 | { 364 | "cell_type": "markdown", 365 | "id": "a3f2ed88-24fa-4192-91c8-990b5c16184e", 366 | "metadata": {}, 367 | "source": [ 368 | "## Extraction" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": null, 374 | "id": "6934498f-ba55-4276-8430-f661f76211ed", 375 | "metadata": {}, 376 | "outputs": [], 377 | "source": [ 378 | "const personExtractionSchema = z.object({\n", 379 | " name: z.string().describe(\"人的名字\"),\n", 380 | " age: z.number().optional().describe(\"人的年龄\")\n", 381 | "}).describe(\"提取关于一个人的信息\");\n", 382 | "\n", 383 | "const relationExtractSchema = z.object({\n", 384 | " people: z.array(personExtractionSchema).describe(\"提取所有人\"),\n", 385 | " relation: z.string().describe(\"人之间的关系, 尽量简洁\")\n", 386 | "})" 387 | ] 388 | }, 389 | { 390 | "cell_type": "code", 391 | "execution_count": null, 392 | "id": "1a9709f8-8e38-46de-b311-e7252932db78", 393 | "metadata": {}, 394 | "outputs": [], 395 | "source": [ 396 | "const schema = zodToJsonSchema(relationExtractSchema)" 397 | ] 398 | }, 399 | { 400 | "cell_type": "code", 401 | "execution_count": null, 402 | "id": "4ff5d594-6aa3-4911-996a-fd2c53afbd7b", 403 | "metadata": {}, 404 | "outputs": [], 405 | "source": [ 406 | "console.log(schema)" 407 | ] 408 | }, 409 | { 410 | "cell_type": "code", 411 | "execution_count": null, 412 | "id": "adbae073-5791-4d4b-b061-b46a89318019", 413 | "metadata": {}, 414 | "outputs": [], 415 | "source": [ 416 | "console.log(schema.properties.people)" 417 | ] 418 | }, 419 | { 420 | "cell_type": "code", 421 | "execution_count": null, 422 | "id": "d0f6dfbb-add1-47be-8891-33261e547842", 423 | "metadata": {}, 424 | "outputs": [], 425 | "source": [ 426 | "const model = new ChatOpenAI({\n", 427 | " temperature: 0 \n", 428 | "})\n", 429 | "\n", 430 | "const modelExtract = model.bind({\n", 431 | " tools: [\n", 432 | " {\n", 433 | " type: \"function\",\n", 434 | " function: {\n", 435 | " name: \"relationExtract\",\n", 436 | " description: \"提取数据中人的信息和人的关系\",\n", 437 | " parameters: zodToJsonSchema(relationExtractSchema)\n", 438 | " }\n", 439 | " }\n", 440 | " ],\n", 441 | " tool_choice: {\n", 442 | " type: \"function\",\n", 443 | " function: {\n", 444 | " name: \"relationExtract\"\n", 445 | " }\n", 446 | " }\n", 447 | "})\n", 448 | "\n", 449 | "const prompt = ChatPromptTemplate.fromMessages([\n", 450 | " [\"system\", \"仔细思考,你有充足的时间进行严谨的思考,然后提取文中的相关信息,如果没有明确提供,请不要猜测,可以仅提取部分信息\"],\n", 451 | " [\"human\", \"{input}\"]\n", 452 | "])\n", 453 | "\n", 454 | "const chain = prompt.pipe(modelExtract).pipe(new JsonOutputToolsParser())" 455 | ] 456 | }, 457 | { 458 | "cell_type": "code", 459 | "execution_count": null, 460 | "id": "cb6e5ed9-4a69-4cb7-8b2f-bf12d5201d4a", 461 | "metadata": {}, 462 | "outputs": [], 463 | "source": [ 464 | "await chain.invoke({\n", 465 | " input: \"小明现在 18 岁了,她妈妈是小丽\"\n", 466 | "})" 467 | ] 468 | }, 469 | { 470 | "cell_type": "code", 471 | "execution_count": null, 472 | "id": "c6df8881-2aa2-4b6e-99b2-2e1acf97a9ff", 473 | "metadata": {}, 474 | "outputs": [], 475 | "source": [ 476 | "await chain.invoke({\n", 477 | " input: \"我是小明现在 18 岁了,我和小 A、小 B 是好朋友,都一样大\"\n", 478 | "})" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": null, 484 | "id": "375b412f-d516-4dcf-a54b-502076c7c93d", 485 | "metadata": {}, 486 | "outputs": [], 487 | "source": [ 488 | "await chain.invoke({\n", 489 | " input: \"我是小明\"\n", 490 | "})" 491 | ] 492 | } 493 | ], 494 | "metadata": { 495 | "kernelspec": { 496 | "display_name": "Deno", 497 | "language": "typescript", 498 | "name": "deno" 499 | }, 500 | "language_info": { 501 | "file_extension": ".ts", 502 | "mimetype": "text/x.typescript", 503 | "name": "typescript", 504 | "nb_converter": "script", 505 | "pygments_lexer": "typescript", 506 | "version": "5.3.3" 507 | } 508 | }, 509 | "nbformat": 4, 510 | "nbformat_minor": 5 511 | } 512 | -------------------------------------------------------------------------------- /learn-notebook-basic.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "3c1cf485-8f84-4d3b-ba4c-dad163f29c16", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "// const text = await Deno.readTextFile(\"./data/qiu.txt\");\n", 11 | "const text = \"abc\"" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "id": "85e2ae92-cad9-4b5a-96a5-0e888262f1a6", 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "text" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "id": "7461d3b1-ed9c-47db-afcd-4609934fe82f", 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "const lines = text.split(\"\\r\")\n", 32 | "lines" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "id": "49a0ec0f-10d9-47ee-b22e-600fd08bcacf", 38 | "metadata": {}, 39 | "source": [ 40 | "# Deno 测试" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "id": "c3caefb0", 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "import _ from \"npm:/lodash@4.17.21\"" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "id": "e2b01b50", 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "const a = _.random(0, 5);\n", 61 | "a" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "id": "96d7b455", 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "import _ from \"lodash\"\n", 72 | "\n", 73 | "const a = _.random(0, 5);\n", 74 | "a" 75 | ] 76 | } 77 | ], 78 | "metadata": { 79 | "kernelspec": { 80 | "display_name": "Deno", 81 | "language": "typescript", 82 | "name": "deno" 83 | }, 84 | "language_info": { 85 | "file_extension": ".ts", 86 | "mimetype": "text/x.typescript", 87 | "name": "typescript", 88 | "nb_converter": "script", 89 | "pygments_lexer": "typescript", 90 | "version": "5.3.3" 91 | } 92 | }, 93 | "nbformat": 4, 94 | "nbformat_minor": 5 95 | } 96 | -------------------------------------------------------------------------------- /loader.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { Document } from \"langchain/document\";\n", 10 | "\n", 11 | "const test = new Document({ pageContent: \"test text\", metadata: { source: \"ABC Title\" } });" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "test" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "import { TextLoader } from \"langchain/document_loaders/fs/text\";\n", 30 | "const loader = new TextLoader(\"data/qiu.txt\");\n", 31 | "\n", 32 | "const docs = await loader.load();" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "console.log(docs)" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "import * as pdfParse from \"pdf-parse\";\n", 51 | "import { PDFLoader } from \"langchain/document_loaders/fs/pdf\";\n", 52 | "\n", 53 | "const loader = new PDFLoader(\"data/github-copliot.pdf\");\n", 54 | "const pdfs = await loader.load()" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "pdfs" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "const loader = new PDFLoader(\"data/github-copliot.pdf\", { splitPages: false });\n", 73 | "const pdf = await loader.load()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "pdf" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "pdf[0]" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": null, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "import { load } from \"dotenv\";\n", 101 | "const env = await load();" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "import { GithubRepoLoader } from \"langchain/document_loaders/web/github\";\n", 111 | "import ignore from \"ignore\";\n", 112 | "\n", 113 | "const loader = new GithubRepoLoader(\n", 114 | " \"https://github.com/RealKai42/qwerty-learner\",\n", 115 | " { \n", 116 | " branch: \"master\",\n", 117 | " recursive: false, \n", 118 | " unknown: \"warn\", \n", 119 | " ignorePaths: [\"*.md\", \"yarn.lock\", \"*.json\"],\n", 120 | " accessToken: env[\"GITHUB_TOKEN\"]\n", 121 | " }\n", 122 | " );" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "const docs = await loader.load();" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": null, 137 | "metadata": {}, 138 | "outputs": [], 139 | "source": [ 140 | "console.log(docs)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": null, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "docs.length" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": null, 155 | "metadata": {}, 156 | "outputs": [], 157 | "source": [ 158 | "import \"cheerio\";\n", 159 | "import { CheerioWebBaseLoader } from \"langchain/document_loaders/web/cheerio\";\n", 160 | "\n", 161 | "const loader = new CheerioWebBaseLoader(\n", 162 | " \"https://kaiyi.cool/blog/github-copilot\",\n", 163 | " {\n", 164 | " selector: \"h3\",\n", 165 | " }\n", 166 | ");\n", 167 | "\n", 168 | "const docs = await loader.load();" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "console.log(docs[0].pageContent)" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": null, 183 | "metadata": {}, 184 | "outputs": [], 185 | "source": [ 186 | "import { load } from \"dotenv\";\n", 187 | "const env = await load();" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "import { SerpAPILoader } from \"langchain/document_loaders/web/serpapi\";\n", 197 | "\n", 198 | "const apiKey = env[\"SERP_KEY\"]\n", 199 | "const question = \"什么 github copliot\"\n", 200 | "const loader = new SerpAPILoader({ q: question, apiKey });\n", 201 | "const docs = await loader.load();" 202 | ] 203 | }, 204 | { 205 | "cell_type": "code", 206 | "execution_count": null, 207 | "metadata": {}, 208 | "outputs": [], 209 | "source": [ 210 | "console.log(docs[1].pageContent)" 211 | ] 212 | } 213 | ], 214 | "metadata": { 215 | "kernelspec": { 216 | "display_name": "Deno", 217 | "language": "typescript", 218 | "name": "deno" 219 | }, 220 | "language_info": { 221 | "file_extension": ".ts", 222 | "mimetype": "text/x.typescript", 223 | "name": "typescript", 224 | "nb_converter": "script", 225 | "pygments_lexer": "typescript", 226 | "version": "5.3.3" 227 | } 228 | }, 229 | "nbformat": 4, 230 | "nbformat_minor": 4 231 | } 232 | -------------------------------------------------------------------------------- /memory-2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "99d4e05f-8ab3-423e-826f-510c1d8d9c7e", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import { load } from \"dotenv\";\n", 11 | "const env = await load();\n", 12 | "\n", 13 | "const process = {\n", 14 | " env\n", 15 | "}" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "id": "bd0df1a9-372e-42c3-b6e5-c899197ba7eb", 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 26 | "import { BufferMemory } from \"langchain/memory\";\n", 27 | "import { ConversationChain } from \"langchain/chains\";\n", 28 | "\n", 29 | "\n", 30 | "const chatModel = new ChatOpenAI();\n", 31 | "const memory = new BufferMemory();\n", 32 | "const chain = new ConversationChain({ llm: chatModel, memory: memory, verbose: true });\n", 33 | "const res1 = await chain.call({ input: \"我是小明\" });" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": null, 39 | "id": "2c57bf9c-9ce9-411c-b7b6-cc55d6d8743c", 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "res1" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": null, 49 | "id": "493ac179-0863-4677-a69f-346a43436ea7", 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "const res2 = await chain.call({ input: \"我叫什么?\" });" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "id": "5d6253e8-40fb-4772-b781-168298085b5a", 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "res2" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "id": "6b8077dc-9a9a-48c2-a13c-a88f2a94a76c", 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 74 | "import { BufferWindowMemory } from \"langchain/memory\";\n", 75 | "import { ConversationChain } from \"langchain/chains\";\n", 76 | "\n", 77 | "const model = new ChatOpenAI();\n", 78 | "const memory = new BufferWindowMemory({ k: 1 });\n", 79 | "const chain = new ConversationChain({ llm: model, memory: memory });" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "id": "30e70614-888b-431e-ab0b-9407cd670d51", 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "import { ConversationSummaryMemory } from \"langchain/memory\";\n", 90 | "import { PromptTemplate } from \"@langchain/core/prompts\";\n", 91 | "\n", 92 | "const memory = new ConversationSummaryMemory({\n", 93 | " memoryKey: \"summary\",\n", 94 | " llm: new ChatOpenAI({\n", 95 | " verbose: true,\n", 96 | " }),\n", 97 | " });\n", 98 | "\n", 99 | "const model = new ChatOpenAI();\n", 100 | "const prompt = PromptTemplate.fromTemplate(`\n", 101 | "你是一个乐于助人的助手。尽你所能回答所有问题。\n", 102 | "\n", 103 | "这是聊天记录的摘要:\n", 104 | "{summary}\n", 105 | "Human: {input}\n", 106 | "AI:`);\n", 107 | "const chain = new ConversationChain({ llm: model, prompt, memory, verbose: true });" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "id": "21221a69-02c2-4bac-b27b-56ffcd015740", 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "const res1 = await chain.call({ input: \"我是小明\" });" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": null, 123 | "id": "53d89513-faf5-4093-929b-76bc3a38a59d", 124 | "metadata": {}, 125 | "outputs": [], 126 | "source": [ 127 | "const res2 = await chain.call({ input: \"我叫什么?\" });" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": null, 133 | "id": "b988d5dd-bf44-4d52-8380-f179059694f6", 134 | "metadata": {}, 135 | "outputs": [], 136 | "source": [ 137 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 138 | "import { ConversationSummaryBufferMemory } from \"langchain/memory\";\n", 139 | "import { ConversationChain } from \"langchain/chains\";\n", 140 | "\n", 141 | "const model = new ChatOpenAI();\n", 142 | "const memory = new ConversationSummaryBufferMemory({\n", 143 | " llm: new ChatOpenAI(),\n", 144 | " maxTokenLimit: 200\n", 145 | "});\n", 146 | "const chain = new ConversationChain({ llm: model, memory: memory, verbose: true });\n" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "id": "c856db57-7ca2-4005-8160-54e0bf61c4b3", 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "const res1 = await chain.call({ input: \"我是小明\" });" 157 | ] 158 | }, 159 | { 160 | "cell_type": "code", 161 | "execution_count": null, 162 | "id": "14948a02-2c4d-4269-9a81-aab2af9e3495", 163 | "metadata": {}, 164 | "outputs": [], 165 | "source": [ 166 | "const res2 = await chain.call({ input: \"我叫什么?\" });" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": null, 172 | "id": "9b750490-ee19-48c4-99b9-8d511d16ea5f", 173 | "metadata": {}, 174 | "outputs": [], 175 | "source": [ 176 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 177 | "import { EntityMemory, ENTITY_MEMORY_CONVERSATION_TEMPLATE } from \"langchain/memory\";\n", 178 | "import { ConversationChain } from \"langchain/chains\";\n", 179 | "\n", 180 | "const model = new ChatOpenAI();\n", 181 | "const memory = new EntityMemory({\n", 182 | " llm: new ChatOpenAI({\n", 183 | " verbose: true \n", 184 | " }),\n", 185 | " chatHistoryKey: \"history\",\n", 186 | " entitiesKey: \"entities\"\n", 187 | "});\n", 188 | "const chain = new ConversationChain({ \n", 189 | " llm: model, \n", 190 | " prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,\n", 191 | " memory: memory, \n", 192 | " verbose: true \n", 193 | "});\n" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": null, 199 | "id": "79a8928d-c265-4f8d-b7be-ca770aa94ad9", 200 | "metadata": {}, 201 | "outputs": [], 202 | "source": [ 203 | "const res1 = await chain.call({ input: \"我叫小明,今年 18 岁\" });" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": null, 209 | "id": "d2d5d7ff-5e8b-4957-86a3-90680ead06be", 210 | "metadata": {}, 211 | "outputs": [], 212 | "source": [ 213 | "const res2 = await chain.call({ input: \"ABC 是一家互联网公司,主要是售卖方便面的公司\" });" 214 | ] 215 | }, 216 | { 217 | "cell_type": "code", 218 | "execution_count": null, 219 | "id": "7f215e7a-9d5d-4ebf-bfd5-386bf659dbd2", 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "const res3 = await chain.call({ input: \"介绍小明\" });" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "id": "1e52d2ed-cb69-44f5-afd6-f1ac490af1b8", 230 | "metadata": {}, 231 | "outputs": [], 232 | "source": [ 233 | "const res3 = await chain.call({ input: \"介绍小明和 ABC\" });" 234 | ] 235 | }, 236 | { 237 | "cell_type": "code", 238 | "execution_count": null, 239 | "id": "7caf9f39-eed8-4662-af39-1f630350ef23", 240 | "metadata": {}, 241 | "outputs": [], 242 | "source": [ 243 | "const test1 = await memory.loadMemoryVariables({\n", 244 | " input: \"介绍小明和 ABC\"\n", 245 | "})" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": null, 251 | "id": "fac64626-1cb4-457a-ad56-a17ecf58d23c", 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [] 255 | } 256 | ], 257 | "metadata": { 258 | "kernelspec": { 259 | "display_name": "Deno", 260 | "language": "typescript", 261 | "name": "deno" 262 | }, 263 | "language_info": { 264 | "file_extension": ".ts", 265 | "mimetype": "text/x.typescript", 266 | "name": "typescript", 267 | "nb_converter": "script", 268 | "pygments_lexer": "typescript", 269 | "version": "5.3.3" 270 | } 271 | }, 272 | "nbformat": 4, 273 | "nbformat_minor": 5 274 | } 275 | -------------------------------------------------------------------------------- /memory-3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "2fbb379b-b1b9-4dae-8b28-21a3ed27f8d0", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import { load } from \"dotenv\";\n", 11 | "const env = await load();\n", 12 | "\n", 13 | "const process = {\n", 14 | " env\n", 15 | "}" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "id": "a6c0f656-a1f8-4741-aeeb-28566c0b8f8e", 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 26 | "import { RunnableSequence } from \"@langchain/core/runnables\";\n", 27 | "import { RunnablePassthrough } from \"@langchain/core/runnables\";\n", 28 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 29 | "import { BufferMemory } from \"langchain/memory\";\n", 30 | "import { ChatPromptTemplate } from \"@langchain/core/prompts\";" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "id": "4f05384f-e7fc-465f-a7aa-4a2779ad424a", 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "const chatModel = new ChatOpenAI({\n", 41 | " verbose:true\n", 42 | "});\n", 43 | "const memory = new BufferMemory();\n", 44 | "\n", 45 | "const TEMPLATE = `\n", 46 | "你是一个乐于助人的 ai 助手。尽你所能回答所有问题。\n", 47 | "\n", 48 | "这是跟人类沟通的聊天历史:\n", 49 | "{history}\n", 50 | "\n", 51 | "据此回答人类的问题:\n", 52 | "{input}\n", 53 | "`\n", 54 | "const prompt = ChatPromptTemplate.fromTemplate(TEMPLATE);" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "id": "dd5d7b4c-6691-4e38-9f19-fa7d60616247", 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "let tempInput = \"\"\n", 65 | "\n", 66 | "const chain = RunnableSequence.from([\n", 67 | " {\n", 68 | " input: new RunnablePassthrough(),\n", 69 | " memoryObject: async (input) => {\n", 70 | " const history = await memory.loadMemoryVariables({\n", 71 | " input\n", 72 | " })\n", 73 | " tempInput = input\n", 74 | " return history\n", 75 | " }\n", 76 | " },\n", 77 | " RunnablePassthrough.assign({\n", 78 | " history: (input) => input.memoryObject.history\n", 79 | " }),\n", 80 | " prompt,\n", 81 | " new RunnablePassthrough({\n", 82 | " func: (input) => console.log(input)\n", 83 | " }),\n", 84 | " chatModel,\n", 85 | " new StringOutputParser(),\n", 86 | " new RunnablePassthrough({\n", 87 | " func: async (output) => {\n", 88 | " await memory.saveContext({\n", 89 | " input: tempInput\n", 90 | " }, {\n", 91 | " output\n", 92 | " })\n", 93 | " }\n", 94 | " }),\n", 95 | "]);" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "id": "d7a38687-3609-4e07-84e7-8edff1be14f4", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "await chain.invoke(\"你好, 我叫小明\")" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "id": "0e1f9e21-f7a0-4a74-8fa7-2e45ce71a32f", 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "await chain.invoke(\"我叫什么?\")" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "id": "3b0f54de-55a1-4e9b-92e6-605c93b5861e", 121 | "metadata": {}, 122 | "source": [ 123 | "## 实现自定义的 chat history" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "id": "82fb5f88-b880-40cb-8c4a-3ff1eb0a97ed", 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [ 133 | "import { JSONChatHistory } from \"./JSONChatHistory/index.ts\"\n", 134 | "import { AIMessage, HumanMessage } from \"@langchain/core/messages\";\n", 135 | "\n", 136 | "const history = new JSONChatHistory({\n", 137 | " dir: \"chat_data\",\n", 138 | " sessionId: \"test\"\n", 139 | "})\n", 140 | "\n", 141 | "\n", 142 | "await history.addMessages([\n", 143 | " new HumanMessage(\"Hi, 我叫小明\"),\n", 144 | " new AIMessage(\"你好\"),\n", 145 | "]);\n", 146 | "\n", 147 | "const messages = await history.getMessages();\n", 148 | "console.log(messages)" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "id": "b883403c-f99a-4ff4-b7bc-2f64068b0286", 155 | "metadata": {}, 156 | "outputs": [], 157 | "source": [ 158 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 159 | "import { BufferMemory } from \"langchain/memory\";\n", 160 | "import { ConversationChain } from \"langchain/chains\";\n", 161 | "\n", 162 | "\n", 163 | "const chatModel = new ChatOpenAI();\n", 164 | "const memory = new BufferMemory({\n", 165 | " chatHistory: history\n", 166 | "});\n", 167 | "const chain = new ConversationChain({ llm: chatModel, memory: memory });\n", 168 | "const res1 = await chain.call({ input: \"我叫什么?\" });" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "id": "590b0825-2753-4773-a739-ae0a777d7460", 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "res1" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": null, 184 | "id": "1198a30d-2f2f-4ee2-9569-498071904a9a", 185 | "metadata": {}, 186 | "outputs": [], 187 | "source": [ 188 | "const messages = await history.getMessages()" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "id": "eb695ac8-a744-4685-914e-aad6bdb87be8", 195 | "metadata": {}, 196 | "outputs": [], 197 | "source": [ 198 | "messages" 199 | ] 200 | } 201 | ], 202 | "metadata": { 203 | "kernelspec": { 204 | "display_name": "Deno", 205 | "language": "typescript", 206 | "name": "deno" 207 | }, 208 | "language_info": { 209 | "file_extension": ".ts", 210 | "mimetype": "text/x.typescript", 211 | "name": "typescript", 212 | "nb_converter": "script", 213 | "pygments_lexer": "typescript", 214 | "version": "5.3.3" 215 | } 216 | }, 217 | "nbformat": 4, 218 | "nbformat_minor": 5 219 | } 220 | -------------------------------------------------------------------------------- /memory.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## ChatMessageHistory" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import { ChatMessageHistory } from \"langchain/stores/message/in_memory\";\n", 17 | "import { HumanMessage, AIMessage } from \"@langchain/core/messages\";\n", 18 | "\n", 19 | "const history = new ChatMessageHistory();\n", 20 | "await history.addMessage(new HumanMessage(\"hi\"));\n", 21 | "await history.addMessage(new AIMessage(\"What can I do for you?\"));\n", 22 | "\n", 23 | "const messages = await history.getMessages();\n", 24 | "\n", 25 | "console.log(messages);\n" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "## 手动维护 chat history" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "import { load } from \"dotenv\";\n", 42 | "const env = await load();\n", 43 | "\n", 44 | "const process = {\n", 45 | " env\n", 46 | "}" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "import { ChatPromptTemplate, MessagesPlaceholder } from \"@langchain/core/prompts\";\n", 56 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 57 | "\n", 58 | "const chatModel = new ChatOpenAI();\n", 59 | "const prompt = ChatPromptTemplate.fromMessages([\n", 60 | " [\"system\", `You are a helpful assistant. Answer all questions to the best of your ability.\n", 61 | " You are talkative and provides lots of specific details from its context. \n", 62 | " If the you does not know the answer to a question, it truthfully says you do not know.`],\n", 63 | " new MessagesPlaceholder(\"history_message\"),\n", 64 | "]);\n", 65 | "\n", 66 | "const chain = prompt.pipe(chatModel);" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "import { ChatMessageHistory } from \"langchain/stores/message/in_memory\";\n", 76 | "import { HumanMessage, AIMessage } from \"@langchain/core/messages\";\n", 77 | "\n", 78 | "const history = new ChatMessageHistory();\n", 79 | "await history.addMessage(new HumanMessage(\"hi, my name is Kai\"));\n", 80 | "\n", 81 | "const res1 = await chain.invoke({\n", 82 | " history_message: await history.getMessages()\n", 83 | "})\n" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "res1" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "await history.addMessage(res1)\n", 102 | "await history.addMessage(new HumanMessage(\"What is my name?\"));" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "await history.getMessages()" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "const res2 = await chain.invoke({\n", 121 | " history_message: await history.getMessages()\n", 122 | "})" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "res2" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": null, 137 | "metadata": {}, 138 | "outputs": [], 139 | "source": [ 140 | "console.log(res2.content)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "## 自动维护 chat history" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "import { RunnableWithMessageHistory } from \"@langchain/core/runnables\";\n", 157 | "import { ChatPromptTemplate, MessagesPlaceholder } from \"@langchain/core/prompts\";\n", 158 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 159 | "import { ChatMessageHistory } from \"langchain/stores/message/in_memory\";\n", 160 | "\n", 161 | "const chatModel = new ChatOpenAI();\n", 162 | "const prompt = ChatPromptTemplate.fromMessages([\n", 163 | " [\"system\", \"You are a helpful assistant. Answer all questions to the best of your ability.\"],\n", 164 | " new MessagesPlaceholder(\"history_message\"),\n", 165 | " [\"human\",\"{input}\"]\n", 166 | "]);\n", 167 | "\n", 168 | "const history = new ChatMessageHistory();\n", 169 | "const chain = prompt.pipe(chatModel)\n", 170 | "\n", 171 | "const chainWithHistory = new RunnableWithMessageHistory({\n", 172 | " runnable: chain,\n", 173 | " getMessageHistory: (_sessionId) => history,\n", 174 | " inputMessagesKey: \"input\",\n", 175 | " historyMessagesKey: \"history_message\",\n", 176 | "});" 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": null, 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "const res1 = await chainWithHistory.invoke({\n", 186 | " input: \"hi, my name is Kai\"\n", 187 | "},{\n", 188 | " configurable: { sessionId: \"none\" }\n", 189 | "})" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": null, 195 | "metadata": {}, 196 | "outputs": [], 197 | "source": [ 198 | "res1" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "metadata": {}, 205 | "outputs": [], 206 | "source": [ 207 | "const res2 = await chainWithHistory.invoke({\n", 208 | " input: \"我的名字叫什么?\"\n", 209 | "},{\n", 210 | " configurable: { sessionId: \"none\" }\n", 211 | "})" 212 | ] 213 | }, 214 | { 215 | "cell_type": "code", 216 | "execution_count": null, 217 | "metadata": {}, 218 | "outputs": [], 219 | "source": [ 220 | "res2" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": null, 226 | "metadata": {}, 227 | "outputs": [], 228 | "source": [ 229 | "await history.getMessages()" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "## 自动生成 chat history 摘要" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": null, 242 | "metadata": {}, 243 | "outputs": [], 244 | "source": [ 245 | "import { RunnableWithMessageHistory } from \"@langchain/core/runnables\";\n", 246 | "import { ChatPromptTemplate, MessagesPlaceholder } from \"@langchain/core/prompts\";\n", 247 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 248 | "import { ChatMessageHistory } from \"langchain/stores/message/in_memory\";\n", 249 | "import { RunnableSequence } from \"@langchain/core/runnables\";\n", 250 | "import { RunnablePassthrough } from \"@langchain/core/runnables\";\n", 251 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 252 | "import { getBufferString } from \"@langchain/core/messages\";" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": null, 258 | "metadata": {}, 259 | "outputs": [], 260 | "source": [ 261 | "const summaryModel = new ChatOpenAI();\n", 262 | "const summaryPrompt = ChatPromptTemplate.fromTemplate(`\n", 263 | "Progressively summarize the lines of conversation provided, adding onto the previous summary returning a new summary\n", 264 | "\n", 265 | "Current summary:\n", 266 | "{summary}\n", 267 | "\n", 268 | "New lines of conversation:\n", 269 | "{new_lines}\n", 270 | "\n", 271 | "New summary:\n", 272 | "`); \n", 273 | "\n", 274 | "const summaryChain = RunnableSequence.from([\n", 275 | " summaryPrompt,\n", 276 | " summaryModel,\n", 277 | " new StringOutputParser(),\n", 278 | "])\n" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": null, 284 | "metadata": {}, 285 | "outputs": [], 286 | "source": [ 287 | "const newSummary = await summaryChain.invoke({\n", 288 | " \"\",\n", 289 | " new_lines: \"I'm 18\"\n", 290 | "})" 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": null, 296 | "metadata": {}, 297 | "outputs": [], 298 | "source": [ 299 | "console.log(res)" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": null, 305 | "metadata": {}, 306 | "outputs": [], 307 | "source": [ 308 | "await summaryChain.invoke({\n", 309 | " summary: res,\n", 310 | " new_lines: \"I'm male\"\n", 311 | "})" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": null, 317 | "metadata": {}, 318 | "outputs": [], 319 | "source": [ 320 | "const chatModel = new ChatOpenAI();\n", 321 | "const chatPrompt = ChatPromptTemplate.fromMessages([\n", 322 | " [\"system\", `You are a helpful assistant. Answer all questions to the best of your ability.\n", 323 | "\n", 324 | " Here is the chat history summary:\n", 325 | " {history_summary}\n", 326 | " `],\n", 327 | " [\"human\",\"{input}\"]\n", 328 | "]);\n", 329 | "let summary = \"\"\n", 330 | "const history = new ChatMessageHistory();\n", 331 | "\n", 332 | "const chatChain = RunnableSequence.from([\n", 333 | " {\n", 334 | " input: new RunnablePassthrough({\n", 335 | " func: (input) => history.addUserMessage(input)\n", 336 | " })\n", 337 | " },\n", 338 | " RunnablePassthrough.assign({\n", 339 | " history_summary: () => summary\n", 340 | " }),\n", 341 | " chatPrompt,\n", 342 | " chatModel,\n", 343 | " new StringOutputParser(),\n", 344 | " new RunnablePassthrough({\n", 345 | " func: async (input) => {\n", 346 | " history.addAIChatMessage(input)\n", 347 | " const messages = await history.getMessages()\n", 348 | " const new_lines = getBufferString(messages)\n", 349 | " const newSummary = await summaryChain.invoke({\n", 350 | " summary,\n", 351 | " new_lines\n", 352 | " })\n", 353 | " console.log(summary, input, messages, new_lines, newSummary)\n", 354 | " history.clear()\n", 355 | " summary = newSummary \n", 356 | " }\n", 357 | " })\n", 358 | "])\n" 359 | ] 360 | }, 361 | { 362 | "cell_type": "code", 363 | "execution_count": null, 364 | "metadata": {}, 365 | "outputs": [], 366 | "source": [ 367 | "await chatChain.invoke(\"我现在饿了\")" 368 | ] 369 | }, 370 | { 371 | "cell_type": "code", 372 | "execution_count": null, 373 | "metadata": {}, 374 | "outputs": [], 375 | "source": [ 376 | "console.log(summary)" 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": null, 382 | "metadata": {}, 383 | "outputs": [], 384 | "source": [ 385 | "await chatChain.invoke(\"我今天想吃方便面\")" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": null, 391 | "metadata": {}, 392 | "outputs": [], 393 | "source": [ 394 | "console.log(summary)" 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": null, 400 | "metadata": {}, 401 | "outputs": [], 402 | "source": [ 403 | "import { RunnableMap } from \"@langchain/core/runnables\"\n", 404 | "\n", 405 | "const mapChain = RunnableMap.from({\n", 406 | " a: () => \"a\",\n", 407 | " b: () => \"b\"\n", 408 | "})\n", 409 | "\n", 410 | "const res = await mapChain.invoke()" 411 | ] 412 | }, 413 | { 414 | "cell_type": "code", 415 | "execution_count": null, 416 | "metadata": {}, 417 | "outputs": [], 418 | "source": [ 419 | "res" 420 | ] 421 | } 422 | ], 423 | "metadata": { 424 | "kernelspec": { 425 | "display_name": "Deno", 426 | "language": "typescript", 427 | "name": "deno" 428 | }, 429 | "language_info": { 430 | "file_extension": ".ts", 431 | "mimetype": "text/x.typescript", 432 | "name": "typescript", 433 | "nb_converter": "script", 434 | "pygments_lexer": "typescript", 435 | "version": "5.3.3" 436 | } 437 | }, 438 | "nbformat": 4, 439 | "nbformat_minor": 4 440 | } 441 | -------------------------------------------------------------------------------- /node/.env.example: -------------------------------------------------------------------------------- 1 | AZURE_OPENAI_API_KEY=abc 2 | AZURE_OPENAI_API_VERSION=2023-07-01-preview 3 | AZURE_OPENAI_API_DEPLOYMENT_NAME=abc 4 | AZURE_OPENAI_API_INSTANCE_NAME=abc 5 | AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME=abc 6 | GITHUB_TOKEN=abc 7 | SERP_KEY=abc 8 | 9 | SMI_KEY=abc 10 | TAVILY_KEY=abc -------------------------------------------------------------------------------- /node/LLMChainExtractor.ts: -------------------------------------------------------------------------------- 1 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 2 | import { OpenAIEmbeddings, ChatOpenAI } from "@langchain/openai"; 3 | import "dotenv/config"; 4 | import { LLMChainExtractor } from "langchain/retrievers/document_compressors/chain_extract"; 5 | import { ContextualCompressionRetriever } from "langchain/retrievers/contextual_compression"; 6 | 7 | process.env.LANGCHAIN_VERBOSE = "true"; 8 | 9 | async function run() { 10 | const directory = "../db/kongyiji"; 11 | const embeddings = new OpenAIEmbeddings(); 12 | const vectorstore = await FaissStore.load(directory, embeddings); 13 | 14 | const model = new ChatOpenAI(); 15 | const compressor = LLMChainExtractor.fromLLM(model); 16 | 17 | const retriever = new ContextualCompressionRetriever({ 18 | baseCompressor: compressor, 19 | baseRetriever: vectorstore.asRetriever(2), 20 | }); 21 | const res = await retriever.invoke("茴香豆是做什么用的"); 22 | console.log(res); 23 | } 24 | 25 | run(); 26 | -------------------------------------------------------------------------------- /node/ScoreThresholdRetriever.ts: -------------------------------------------------------------------------------- 1 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 2 | import { OpenAIEmbeddings } from "@langchain/openai"; 3 | import "dotenv/config"; 4 | import { ScoreThresholdRetriever } from "langchain/retrievers/score_threshold"; 5 | 6 | process.env.LANGCHAIN_VERBOSE = "true"; 7 | 8 | async function run() { 9 | const directory = "../db/kongyiji"; 10 | const embeddings = new OpenAIEmbeddings(); 11 | const vectorstore = await FaissStore.load(directory, embeddings); 12 | 13 | const retriever = ScoreThresholdRetriever.fromVectorStore(vectorstore, { 14 | minSimilarityScore: 0.45, 15 | maxK: 5, 16 | kIncrement: 1, 17 | }); 18 | const res = await retriever.invoke("茴香豆是做什么用的"); 19 | console.log(res); 20 | } 21 | 22 | run(); 23 | -------------------------------------------------------------------------------- /node/agent-react-rmb.ts: -------------------------------------------------------------------------------- 1 | import { ChatOpenAI } from "@langchain/openai"; 2 | import { SerpAPI } from "@langchain/community/tools/serpapi"; 3 | import "dotenv/config"; 4 | import { AgentExecutor, createReactAgent } from "langchain/agents"; 5 | import { pull } from "langchain/hub"; 6 | import type { PromptTemplate } from "@langchain/core/prompts"; 7 | import { Calculator } from "@langchain/community/tools/calculator"; 8 | 9 | process.env.LANGCHAIN_TRACING_V2 = "true"; 10 | 11 | async function main() { 12 | const tools = [new SerpAPI(process.env.SERP_KEY), new Calculator()]; 13 | 14 | const prompt = await pull("hwchase17/react"); 15 | const llm = new ChatOpenAI({ 16 | temperature: 0, 17 | }); 18 | 19 | const agent = await createReactAgent({ 20 | llm, 21 | tools, 22 | prompt, 23 | }); 24 | 25 | const agentExecutor = new AgentExecutor({ 26 | agent, 27 | tools, 28 | }); 29 | 30 | const result = await agentExecutor.invoke({ 31 | input: "我有 17 美元,现在相当于多少人民币?", 32 | }); 33 | 34 | console.log(result); 35 | } 36 | 37 | main(); 38 | -------------------------------------------------------------------------------- /node/agent-tool-customized.ts: -------------------------------------------------------------------------------- 1 | import { ChatPromptTemplate } from "@langchain/core/prompts"; 2 | import { ChatOpenAI } from "@langchain/openai"; 3 | import { createStuffDocumentsChain } from "langchain/chains/combine_documents"; 4 | import { OpenAIEmbeddings } from "@langchain/openai"; 5 | import path from "path"; 6 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 7 | import { createRetrievalChain } from "langchain/chains/retrieval"; 8 | import { DynamicTool, DynamicStructuredTool } from "@langchain/core/tools"; 9 | import { pull } from "langchain/hub"; 10 | import type { PromptTemplate } from "@langchain/core/prompts"; 11 | import { AgentExecutor, createOpenAIToolsAgent, createReactAgent } from "langchain/agents"; 12 | import { Calculator } from "@langchain/community/tools/calculator"; 13 | import { z } from "zod"; 14 | import "dotenv/config"; 15 | 16 | process.env.LANGCHAIN_TRACING_V2 = "true"; 17 | 18 | async function loadVectorStore() { 19 | const directory = path.join(__dirname, "../db/qiu"); 20 | const embeddings = new OpenAIEmbeddings(); 21 | const vectorStore = await FaissStore.load(directory, embeddings); 22 | 23 | return vectorStore; 24 | } 25 | 26 | async function getRetrieverChain() { 27 | const prompt = ChatPromptTemplate.fromTemplate(`将以下问题仅基于提供的上下文进行回答: 28 | 上下文: 29 | {context} 30 | 31 | 问题:{input}`); 32 | const llm = new ChatOpenAI(); 33 | 34 | const documentChain = await createStuffDocumentsChain({ 35 | llm, 36 | prompt, 37 | }); 38 | 39 | const vectorStore = await loadVectorStore(); 40 | const retriever = vectorStore.asRetriever(); 41 | 42 | const retrievalChain = await createRetrievalChain({ 43 | combineDocsChain: documentChain, 44 | retriever, 45 | }); 46 | 47 | return retrievalChain; 48 | } 49 | 50 | async function createReactAgentWithTool(tools) { 51 | const prompt = await pull("hwchase17/react"); 52 | const llm = new ChatOpenAI({ 53 | temperature: 0, 54 | }); 55 | 56 | const agent = await createReactAgent({ 57 | llm, 58 | tools, 59 | prompt, 60 | }); 61 | 62 | const agentExecutor = new AgentExecutor({ 63 | agent, 64 | tools, 65 | }); 66 | 67 | return agentExecutor; 68 | } 69 | 70 | async function createToolAgentWithTool(tools) { 71 | const prompt = await pull("hwchase17/openai-tools-agent"); 72 | 73 | const llm = new ChatOpenAI({ 74 | temperature: 0, 75 | }); 76 | const agent = await createOpenAIToolsAgent({ 77 | llm, 78 | tools, 79 | prompt, 80 | }); 81 | 82 | const agentExecutor = new AgentExecutor({ 83 | agent, 84 | tools, 85 | }); 86 | return agentExecutor; 87 | } 88 | 89 | async function testAgent() { 90 | const retrieverChain = await getRetrieverChain(); 91 | const retrieverTool = new DynamicTool({ 92 | name: "get-qiu-answer", 93 | func: async (input: string) => { 94 | const res = await retrieverChain.invoke({ input }); 95 | return res.answer; 96 | }, 97 | description: "获取小说 《球状闪电》相关问题的答案", 98 | }); 99 | 100 | const stringReverseTool = new DynamicTool({ 101 | name: "string-reverser", 102 | description: "reverses a string. input should be the string you want to reverse.", 103 | func: async (input: string) => input.split("").reverse().join(""), 104 | }); 105 | 106 | const dateDiffTool = new DynamicStructuredTool({ 107 | name: "date-difference-calculator", 108 | description: "计算两个日期之间的天数差", 109 | schema: z.object({ 110 | date1: z.string().describe("第一个日期,以YYYY-MM-DD格式表示"), 111 | date2: z.string().describe("第二个日期,以YYYY-MM-DD格式表示"), 112 | }), 113 | func: async ({ date1, date2 }) => { 114 | const d1 = new Date(date1); 115 | const d2 = new Date(date2); 116 | const difference = Math.abs(d2.getTime() - d1.getTime()); 117 | const days = Math.ceil(difference / (1000 * 60 * 60 * 24)); 118 | return days.toString(); 119 | }, 120 | }); 121 | 122 | const tools = [retrieverTool, dateDiffTool, new Calculator()]; 123 | // const agents = await createReactAgentWithTool(tools); 124 | const agents = await createToolAgentWithTool(tools); 125 | 126 | // const res = await agents.invoke({ 127 | // input: "小说球状闪电中量子玫瑰的情节", 128 | // }); 129 | 130 | // const res = await agents.invoke({ 131 | // input: "我有 17 个苹果,小明的苹果比我的三倍少 10 个,小明有多少个苹果?", 132 | // }); 133 | 134 | // const res = await agents.invoke({ 135 | // input: "今年是 2024 年,今年 5.1 和 10.1 之间有多少天?", 136 | // }); 137 | 138 | // console.log(res); 139 | } 140 | 141 | // testAgent(); 142 | 143 | async function testAgentRoute() { 144 | const retrieverChain = await getRetrieverChain(); 145 | const retrieverTool = new DynamicTool({ 146 | name: "get-qiu-answer", 147 | func: async (input: string) => { 148 | const res = await retrieverChain.invoke({ input }); 149 | return res.answer; 150 | }, 151 | description: "获取小说 《球状闪电》相关问题的答案", 152 | returnDirect: true, 153 | }); 154 | 155 | const tools = [retrieverTool, new Calculator()]; 156 | const agents = await createReactAgentWithTool(tools); 157 | 158 | const res = await agents.invoke({ 159 | input: "用一句话,介绍小说球状闪电中,跟量子玫瑰有关的情节", 160 | }); 161 | console.log(res); 162 | } 163 | 164 | // testAgentRoute(); 165 | -------------------------------------------------------------------------------- /node/agent-tool.ts: -------------------------------------------------------------------------------- 1 | import { ChatOpenAI } from "@langchain/openai"; 2 | import { SerpAPI } from "@langchain/community/tools/serpapi"; 3 | import "dotenv/config"; 4 | import { AgentExecutor } from "langchain/agents"; 5 | import { pull } from "langchain/hub"; 6 | import { createOpenAIToolsAgent } from "langchain/agents"; 7 | import { ChatPromptTemplate } from "@langchain/core/prompts"; 8 | import { Calculator } from "@langchain/community/tools/calculator"; 9 | 10 | process.env.LANGCHAIN_TRACING_V2 = "true"; 11 | 12 | async function main() { 13 | const tools = [new SerpAPI(process.env.SERP_KEY), new Calculator()]; 14 | 15 | const prompt = await pull("hwchase17/openai-tools-agent"); 16 | 17 | const llm = new ChatOpenAI({ 18 | temperature: 0, 19 | }); 20 | const agent = await createOpenAIToolsAgent({ 21 | llm, 22 | tools, 23 | prompt, 24 | }); 25 | 26 | const agentExecutor = new AgentExecutor({ 27 | agent, 28 | tools, 29 | }); 30 | 31 | const result = await agentExecutor.invoke({ 32 | input: 33 | "我有 10000 人民币,可以购买多少微软股票,注意微软股票一般是以美元计价,需要考虑汇率问题", 34 | }); 35 | 36 | console.log(result); 37 | } 38 | 39 | main(); 40 | -------------------------------------------------------------------------------- /node/gua/gua.json: -------------------------------------------------------------------------------- 1 | { 2 | "乾乾": { 3 | "number": "1", 4 | "name": "乾卦", 5 | "des": "乾为天(乾卦)自强不息", 6 | "sentence": "困龙得水好运交,不由喜气上眉梢,一切谋望皆如意,向后时运渐渐高" 7 | }, 8 | "坤坤": { 9 | "number": "2", 10 | "name": "坤卦", 11 | "des": "坤为地(坤卦)厚德载物", 12 | "sentence": "肥羊失群入山岗,饿虎逢之把口张,适口充肠心欢喜,卦若占之大吉昌" 13 | }, 14 | "震坎": { 15 | "number": "3", 16 | "name": "屯卦", 17 | "des": "水雷屯(屯卦)起始维艰", 18 | "sentence": "风刮乱丝不见头,颠三倒四犯忧愁,慢从款来左顺遂,急促反惹不自由" 19 | }, 20 | "坎艮": { 21 | "number": "4", 22 | "name": "蒙卦", 23 | "des": "山水蒙(蒙卦)启蒙奋发", 24 | "sentence": "卦中爻象犯小耗,君子占之运不高,婚姻合伙有琐碎,做事必然受苦劳" 25 | }, 26 | "乾坎": { 27 | "number": "5", 28 | "name": "需卦", 29 | "des": "水天需(需卦)守正待机", 30 | "sentence": "明珠土埋日久深,无光无亮到如今,忽然大风吹土去,自然显露有重新" 31 | }, 32 | "坎乾": { 33 | "number": "6", 34 | "name": "讼卦", 35 | "des": "天水讼(讼卦)慎争戒讼", 36 | "sentence": "心中有事事难做,恰是二人争路走,雨下俱是要占先,谁肯让谁走一步" 37 | }, 38 | "坎坤": { 39 | "number": "7", 40 | "name": "师卦", 41 | "des": "地水师(师卦)行险而顺", 42 | "sentence": "将帅领旨去出征,骑着烈马拉硬弓,百步穿杨去得准,箭中金钱喜气生" 43 | }, 44 | "坤坎": { 45 | "number": "8", 46 | "name": "比卦", 47 | "des": "水地比(比卦)诚信团结", 48 | "sentence": "顺风行船撒起帆,上天又助一蓬风,不用费力逍遥去,任意而行大亨通" 49 | }, 50 | "乾巽": { 51 | "number": "9", 52 | "name": "小畜卦", 53 | "des": "风天小畜(小畜卦)蓄养待进", 54 | "sentence": "苗逢旱天尽焦梢,水想云浓雨不浇,农人仰面长吁气,是从款来莫心高" 55 | }, 56 | "兑乾": { 57 | "number": "10", 58 | "name": "履卦", 59 | "des": "天泽履(履卦)脚踏实地", 60 | "sentence": "凤凰落在西岐山,长鸣几声出圣贤,天降文王开基业,富贵荣华八百年" 61 | }, 62 | "乾坤": { 63 | "number": "11", 64 | "name": "泰卦", 65 | "des": "地天泰(泰卦)应时而变", 66 | "sentence": "学文满腹入场闱,三元及第得意回,从今解去愁和闷,喜庆平地一声雷" 67 | }, 68 | "坤乾": { 69 | "number": "12", 70 | "name": "否卦", 71 | "des": "天地否(否卦)不交不通", 72 | "sentence": "虎落陷坑不堪言,进前容易退后难,谋望不遂自己便,疾病口舌事牵连" 73 | }, 74 | "离乾": { 75 | "number": "13", 76 | "name": "同人卦", 77 | "des": "天火同人(同人卦)上下和同", 78 | "sentence": "心中有事犯猜疑,谋望从前不着实,幸遇明人来指引,诸般忧闷自消之" 79 | }, 80 | "乾离": { 81 | "number": "14", 82 | "name": "大有卦", 83 | "des": "火天大有(大有卦)顺天依时", 84 | "sentence": "砍树摸雀作事牢,是非口舌自然消,婚姻合伙不费力,若问走失未逃脱" 85 | }, 86 | "艮坤": { 87 | "number": "15", 88 | "name": "谦卦", 89 | "des": "地山谦(谦卦)内高外低", 90 | "sentence": "天赐贫人一封金,不争不抢两平分,彼此分得金到手,一切谋望皆遂心" 91 | }, 92 | "坤震": { 93 | "number": "16", 94 | "name": "豫卦", 95 | "des": "雷地豫(豫卦)顺时依势", 96 | "sentence": "太公插下杏黄旗,收妖为徒归西岐,自此青龙得了位,一旦谋望百事宜" 97 | }, 98 | "震兑": { 99 | "number": "17", 100 | "name": "随卦", 101 | "des": "泽雷随(随卦)随时变通", 102 | "sentence": "泥里步踏这几年,推车靠崖在眼前,目下就该再使力,扒上崖去发财源" 103 | }, 104 | "巽艮": { 105 | "number": "18", 106 | "name": "蛊卦", 107 | "des": "山风蛊(蛊卦)振疲起衰", 108 | "sentence": "卦中爻象如推磨,顺当为福反为祸,心中有益且迟迟,凡事尽从忙处错" 109 | }, 110 | "兑坤": { 111 | "number": "19", 112 | "name": "临卦", 113 | "des": "地泽临(临卦)教民保民", 114 | "sentence": "君王无道民倒悬,常想拨云见青天,幸逢明主施仁政,重又安居乐自然" 115 | }, 116 | "坤巽": { 117 | "number": "20", 118 | "name": "观卦", 119 | "des": "风地观(观卦)观下瞻上", 120 | "sentence": "卦遇蓬花旱逢河,生意买卖利息多,婚姻自有人来助,出门永不受折磨" 121 | }, 122 | "震离": { 123 | "number": "21", 124 | "name": "噬嗑卦", 125 | "des": "火雷噬嗑(噬嗑卦)刚柔相济", 126 | "sentence": "运拙如同身受饥,幸得送饭又送食,适口充腹心欢喜,忧愁从此渐消移" 127 | }, 128 | "离艮": { 129 | "number": "22", 130 | "name": "贲卦", 131 | "des": "山火贲(贲卦)饰外扬质", 132 | "sentence": "近来运转瑞气周,窈窕淑女君子求。钟鼓乐之大吉庆,占者逢之喜临头" 133 | }, 134 | "坤艮": { 135 | "number": "23", 136 | "name": "剥卦", 137 | "des": "山地剥(剥卦)顺势而止", 138 | "sentence": "鹊遇天晚宿林中,不知林内先有鹰,虽然同处心生恶,卦若逢之是非轻" 139 | }, 140 | "震坤": { 141 | "number": "24", 142 | "name": "复卦", 143 | "des": "地雷复(复卦)寓动于顺", 144 | "sentence": "马氏太公不相合,世人占之忧疑多,恩人无义反为怨,是非平地起风波" 145 | }, 146 | "震乾": { 147 | "number": "25", 148 | "name": "无妄卦", 149 | "des": "天雷无妄(无妄卦)无妄而得", 150 | "sentence": "飞鸟失机落笼中,纵然奋飞不能腾,目下只宜守本分,妄想扒高万不能" 151 | }, 152 | "乾艮": { 153 | "number": "26", 154 | "name": "大畜卦", 155 | "des": "山天大畜(大畜卦)止而不止", 156 | "sentence": "忧愁常锁两眉头,千头万绪挂心间,从今以后防开阵,任意行而不相干" 157 | }, 158 | "震艮": { 159 | "number": "27", 160 | "name": "颐卦", 161 | "des": "山雷颐(颐卦)纯正以养", 162 | "sentence": "太公独钓渭水河,手执丝杆忧愁多,时来又遇文王访,自此永不受折磨" 163 | }, 164 | "巽兑": { 165 | "number": "28", 166 | "name": "大过卦", 167 | "des": "泽风大过(大过卦)非常行动", 168 | "sentence": "夜晚梦里梦金银,醒来仍不见一文,目下只宜求本分,思想络是空劳神" 169 | }, 170 | "坎坎": { 171 | "number": "29", 172 | "name": "坎卦", 173 | "des": "坎为水(坎卦)行险用险", 174 | "sentence": "一轮明月照水中,只见影儿不见踪,愚夫当财下去取,摸来摸去一场空" 175 | }, 176 | "离离": { 177 | "number": "30", 178 | "name": "离卦", 179 | "des": "离为火(离卦)附和依托", 180 | "sentence": "官人来占主高升,庄农人家产业增,生意买卖利息厚,匠艺占之大亨通" 181 | }, 182 | "艮兑": { 183 | "number": "31", 184 | "name": "咸卦", 185 | "des": "泽山咸(咸卦)相互感应", 186 | "sentence": "运去黄金失色,时来棒槌发芽,月令极好无差,且喜心宽意大" 187 | }, 188 | "巽震": { 189 | "number": "32", 190 | "name": "恒卦", 191 | "des": "雷风恒(恒卦)恒心有成", 192 | "sentence": "渔翁寻鱼运气好,鱼来撞网跑不了,别人使本挣不来,谁想一到就凑合" 193 | }, 194 | "艮乾": { 195 | "number": "33", 196 | "name": "遁卦", 197 | "des": "天山遁(遁卦)遁世救世", 198 | "sentence": "浓云蔽日不光明,劝君且莫出远行,婚姻求财皆不利,提防口舌到门庭" 199 | }, 200 | "乾震": { 201 | "number": "34", 202 | "name": "大壮卦", 203 | "des": "雷天大壮(大壮卦)壮勿妄动", 204 | "sentence": "卦占工师得大木,眼前该着走上路,时来运转多顺当,有事自管放心宽" 205 | }, 206 | "坤离": { 207 | "number": "35", 208 | "name": "晋卦", 209 | "des": "火地晋(晋卦)求进发展", 210 | "sentence": "锄地锄去苗里草,谁想财帛将人找,一锄锄出银子来,这个运气也算好" 211 | }, 212 | "离坤": { 213 | "number": "36", 214 | "name": "明夷卦", 215 | "des": "地火明夷(明夷卦)晦而转明", 216 | "sentence": "时乖运拙走不着,急忙过河拆了桥,恩人无义反为怨,凡事无功枉受劳" 217 | }, 218 | "离巽": { 219 | "number": "37", 220 | "name": "家人卦", 221 | "des": "风火家人(家人卦)诚威治业", 222 | "sentence": "一朵鲜花镜中开,看着极好取不来,劝君休把镜花恋,卦若逢之主可怪" 223 | }, 224 | "兑离": { 225 | "number": "38", 226 | "name": "睽卦", 227 | "des": "火泽睽(睽卦)异中求同", 228 | "sentence": "此卦占来运气歹,如同太公作买卖,贩猪牛快贩羊迟,猪羊齐贩断了宰" 229 | }, 230 | "艮坎": { 231 | "number": "39", 232 | "name": "蹇卦", 233 | "des": "水山蹇(蹇卦)险阻在前", 234 | "sentence": "大雨倾地雪满天,路上行人苦又寒,拖泥带水费尽力,事不遂心且耐烦" 235 | }, 236 | "坎震": { 237 | "number": "40", 238 | "name": "解卦", 239 | "des": "雷水解(解卦)柔道致治", 240 | "sentence": "目下月令如过关,千辛万苦受熬煎,时来恰相有人救,任意所为不相干" 241 | }, 242 | "兑艮": { 243 | "number": "41", 244 | "name": "损卦", 245 | "des": "山泽损(损卦)损益制衡", 246 | "sentence": "时运不至费心多,比作推车受折磨,山路崎岖吊下耳,左插右按按不着" 247 | }, 248 | "震巽": { 249 | "number": "42", 250 | "name": "益卦", 251 | "des": "风雷益(益卦)损上益下", 252 | "sentence": "时来运转吉气发,多年枯木又开花,枝叶重生多茂盛,几人见了几人夸" 253 | }, 254 | "乾兑": { 255 | "number": "43", 256 | "name": "夬卦", 257 | "des": "泽天夬(夬卦)决而能和", 258 | "sentence": "蜘蛛脱网赛天军,粘住游蜂翅翎毛,幸有大风吹破网,脱离灾难又逍遥" 259 | }, 260 | "巽乾": { 261 | "number": "44", 262 | "name": "姤卦", 263 | "des": "天风姤(姤卦)天下有风", 264 | "sentence": "他乡遇友喜气欢,须知运气福重添,自今交了顺当运,向后管保不相干" 265 | }, 266 | "坤兑": { 267 | "number": "45", 268 | "name": "萃卦", 269 | "des": "泽地萃(萃卦)荟萃聚集", 270 | "sentence": "游鱼戏水被网惊,跳过龙门身化龙,三尺杨柳垂金线,万朵桃花显你能" 271 | }, 272 | "巽坤": { 273 | "number": "46", 274 | "name": "升卦", 275 | "des": "地泽临(临卦)柔顺谦虚", 276 | "sentence": "士人来占必得名,生意买卖也兴隆,匠艺逢之交易好,农间庄稼亦收成" 277 | }, 278 | "坎兑": { 279 | "number": "47", 280 | "name": "困卦", 281 | "des": "泽水困(困卦)困境求通", 282 | "sentence": "时运不来好伤怀,撮上押去把梯抬,一筒虫翼无到手,转了上去下不来" 283 | }, 284 | "巽坎": { 285 | "number": "48", 286 | "name": "井卦", 287 | "des": "水风井(井卦)求贤若渴", 288 | "sentence": "枯井破费已多年,一朝流泉出来鲜,资生济渴人称羡,时来运转喜自然" 289 | }, 290 | "离兑": { 291 | "number": "49", 292 | "name": "革卦", 293 | "des": "泽火革(革卦)顺天应人", 294 | "sentence": "苗逢旱天渐渐衰,幸得天恩降雨来,忧去喜来能变化,求谋干事遂心怀" 295 | }, 296 | "巽离": { 297 | "number": "50", 298 | "name": "鼎卦", 299 | "des": "火风鼎(鼎卦)稳重图变", 300 | "sentence": "莺鹜蛤蜊落沙滩,蛤蜊莺鹜两翅扇,渔人进前双得利,失走行人却自在" 301 | }, 302 | "震震": { 303 | "number": "51", 304 | "name": "震卦", 305 | "des": "震为雷(震卦)临危不乱", 306 | "sentence": "一口金钟在淤泥,人人拿着当玩石,忽然一日钟悬起,响亮一声天下知" 307 | }, 308 | "艮艮": { 309 | "number": "52", 310 | "name": "艮卦", 311 | "des": "艮为山(艮卦)动静适时", 312 | "sentence": "财帛常打心头走,可惜眼前难到手,不如意时且忍耐,逢着闲事休开口" 313 | }, 314 | "艮巽": { 315 | "number": "53", 316 | "name": "渐卦", 317 | "des": "风山渐(渐卦)渐进蓄德", 318 | "sentence": "俊鸟幸得出笼中,脱离灾难显威风,一朝得意福力至,东西南北任意行" 319 | }, 320 | "兑震": { 321 | "number": "54", 322 | "name": "归妹卦", 323 | "des": "雷泽归妹(归妹卦)立家兴业", 324 | "sentence": "求鱼须当向水中,树上求之不顺情,受尽爬揭难随意,劳而无功运平平" 325 | }, 326 | "离震": { 327 | "number": "55", 328 | "name": "丰卦", 329 | "des": "雷火丰(丰卦)日中则斜", 330 | "sentence": "古镜昏暗好几年,一朝磨明似月圆,君子谋事逢此卦,时来运转喜自然" 331 | }, 332 | "艮离": { 333 | "number": "56", 334 | "name": "旅卦", 335 | "des": "火山旅(旅卦)依义顺时", 336 | "sentence": "飞鸟树上垒窝巢,小人使计举火烧,君占此卦为不吉,一切谋望枉徒劳" 337 | }, 338 | "巽巽": { 339 | "number": "57", 340 | "name": "巽卦", 341 | "des": "巽为风(巽卦)谦逊受益", 342 | "sentence": "一叶孤舟落沙滩,有篙无水进退难,时逢大雨江湖溢,不用费力任往返" 343 | }, 344 | "兑兑": { 345 | "number": "58", 346 | "name": "兑卦", 347 | "des": "兑为泽(兑卦)刚内柔外", 348 | "sentence": "这个卦象真可取,觉着做事不费力,休要错过这机关,事事觉得随心意" 349 | }, 350 | "坎巽": { 351 | "number": "59", 352 | "name": "涣卦", 353 | "des": "风水涣(涣卦)拯救涣散", 354 | "sentence": "隔河望见一锭金,欲取岸宽水又深,指望资财难到手,昼思夜想枉费心" 355 | }, 356 | "兑坎": { 357 | "number": "60", 358 | "name": "节卦", 359 | "des": "水泽节(节卦)万物有节", 360 | "sentence": "时来运转喜气生,登台封神姜太公,到此诸神皆退位,纵然有祸不成凶" 361 | }, 362 | "兑巽": { 363 | "number": "61", 364 | "name": "中孚卦", 365 | "des": "风泽中孚(中孚卦)诚信立身", 366 | "sentence": "路上行人色匆匆,急忙无桥过薄冰,小心谨慎过得去,一步错了落水中" 367 | }, 368 | "艮震": { 369 | "number": "62", 370 | "name": "小过卦", 371 | "des": "雷山小过(小过卦)行动有度", 372 | "sentence": "行人路过独木桥,心内惶恐眼里瞧,爽利保你过得去,慢行一定不安牢" 373 | }, 374 | "离坎": { 375 | "number": "63", 376 | "name": "既济卦", 377 | "des": "水火既济(既济卦)盛极将衰", 378 | "sentence": "金榜以上题姓名,不负当年苦用功,人逢此卦名吉庆,一切谋望大亨通" 379 | }, 380 | "坎离": { 381 | "number": "64", 382 | "name": "未济卦", 383 | "des": "火水未济(未济卦)事业未竟", 384 | "sentence": "离地着人几丈深,是防偷营劫寨人,后封太岁为凶煞,时加谨慎祸不侵" 385 | } 386 | } -------------------------------------------------------------------------------- /node/gua/index.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from "fs"; 2 | import path from "path"; 3 | import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts"; 4 | import { ChatMessageHistory } from "langchain/stores/message/in_memory"; 5 | import { RunnableWithMessageHistory } from "@langchain/core/runnables"; 6 | import { ChatOpenAI } from "@langchain/openai"; 7 | import { StringOutputParser } from "@langchain/core/output_parsers"; 8 | import readline from "readline"; 9 | import util from "util"; 10 | import "dotenv/config"; 11 | 12 | const guaInfoBuffer = readFileSync(path.join(__dirname, "./gua.json")); 13 | const guaInfo = JSON.parse(guaInfoBuffer.toString()); 14 | 15 | const yaoName = ["初爻", "二爻", "三爻", "四爻", "五爻", "六爻"]; 16 | 17 | const guaDict = { 18 | 阳阳阳: "乾", 19 | 阴阴阴: "坤", 20 | 阴阳阳: "兑", 21 | 阳阴阳: "震", 22 | 阳阳阴: "巽", 23 | 阴阳阴: "坎", 24 | 阳阴阴: "艮", 25 | 阴阴阳: "离", 26 | }; 27 | 28 | function generateGua(): string[] { 29 | let yaoCount = 0; 30 | const messageList = []; 31 | 32 | const genYao = () => { 33 | const coinRes = Array.from({ length: 3 }, () => (Math.random() > 0.5 ? 1 : 0)); 34 | const yinYang = coinRes.reduce((a, b) => a + b, 0) > 1.5 ? "阳" : "阴"; 35 | const message = `${yaoName[yaoCount]} 为 ${coinRes 36 | .map((i) => (i > 0.5 ? "字" : "背")) 37 | .join("")} 为 ${yinYang}`; 38 | 39 | return { 40 | yinYang, 41 | message, 42 | }; 43 | }; 44 | 45 | const firstGuaYinYang = Array.from({ length: 3 }, () => { 46 | const { yinYang, message } = genYao(); 47 | yaoCount++; 48 | 49 | messageList.push(message); 50 | return yinYang; 51 | }); 52 | const firstGua = guaDict[firstGuaYinYang.join("")]; 53 | messageList.push(`您的首卦为 ${firstGua}`); 54 | 55 | const secondGuaYinYang = Array.from({ length: 3 }, () => { 56 | const { yinYang, message } = genYao(); 57 | yaoCount++; 58 | 59 | messageList.push(message); 60 | return yinYang; 61 | }); 62 | const secondGua = guaDict[secondGuaYinYang.join("")]; 63 | messageList.push(`您的次卦为 ${secondGua}`); 64 | 65 | const gua = secondGua + firstGua; 66 | const guaDesc = guaInfo[gua]; 67 | 68 | const guaRes = ` 69 | 六爻结果: ${gua} 70 | 卦名为:${guaDesc.name} 71 | ${guaDesc.des} 72 | 卦辞为:${guaDesc.sentence} 73 | `; 74 | 75 | messageList.push(guaRes); 76 | 77 | return messageList; 78 | } 79 | 80 | generateGua(); 81 | async function main() { 82 | const messageList = generateGua(); 83 | 84 | const history = new ChatMessageHistory(); 85 | const guaMessage = messageList.map((message): ["ai", string] => ["ai", message]); 86 | 87 | const prompt = await ChatPromptTemplate.fromMessages([ 88 | [ 89 | "system", 90 | `你是一位出自中华六爻世家的卜卦专家,你的任务是根据卜卦者的问题和得到的卦象,为他们提供有益的建议。 91 | 你的解答应基于卦象的理解,同时也要尽可能地展现出乐观和积极的态度,引导卜卦者朝着积极的方向发展。 92 | 你的语言应该具有仙风道骨、雅致高贵的气质,以此来展现你的卜卦专家身份。`, 93 | ], 94 | ...guaMessage, 95 | new MessagesPlaceholder("history_message"), 96 | ["human", "{input}"], 97 | ]); 98 | 99 | const llm = new ChatOpenAI(); 100 | const chain = prompt.pipe(llm).pipe(new StringOutputParser()); 101 | const chainWithHistory = new RunnableWithMessageHistory({ 102 | runnable: chain, 103 | getMessageHistory: (_sessionId) => history, 104 | inputMessagesKey: "input", 105 | historyMessagesKey: "history_message", 106 | }); 107 | 108 | const rl = readline.createInterface({ 109 | input: process.stdin, 110 | output: process.stdout, 111 | }); 112 | 113 | const question = util.promisify(rl.question).bind(rl); 114 | 115 | const input = await question("告诉我你的疑问: "); 116 | 117 | let index = 0; 118 | const printMessagesPromise = new Promise((resolve) => { 119 | const intervalId = setInterval(() => { 120 | if (index < messageList.length) { 121 | console.log(messageList[index]); 122 | index++; 123 | } else { 124 | clearInterval(intervalId); 125 | resolve(); 126 | } 127 | }, 1000); 128 | }); 129 | 130 | const llmResPromise = chainWithHistory.invoke( 131 | { input: "用户的问题是:" + input }, 132 | { configurable: { sessionId: "no-used" } } 133 | ); 134 | 135 | const [_, firstRes] = await Promise.all([printMessagesPromise, llmResPromise]); 136 | 137 | console.log(firstRes); 138 | 139 | async function chat() { 140 | const input = await question("User: "); 141 | 142 | if (input.toLowerCase() === "exit") { 143 | rl.close(); 144 | return; 145 | } 146 | 147 | const response = await chainWithHistory.invoke( 148 | { input }, 149 | { configurable: { sessionId: "no-used" } } 150 | ); 151 | 152 | console.log("AI: ", response); 153 | chat(); 154 | } 155 | 156 | chat(); 157 | } 158 | 159 | main(); 160 | 161 | /** 162 | 告诉我你的疑问: 今晚吃不吃火锅 163 | 初爻 为 背字字 为 阳 164 | 二爻 为 字背背 为 阴 165 | 三爻 为 背背背 为 阴 166 | 您的首卦为 艮 167 | 四爻 为 字背背 为 阴 168 | 五爻 为 字背字 为 阳 169 | 六爻 为 字背背 为 阴 170 | 您的次卦为 坎 171 | 172 | 六爻结果: 坎艮 173 | 卦名为:蒙卦 174 | 山水蒙(蒙卦)启蒙奋发 175 | 卦辞为:卦中爻象犯小耗,君子占之运不高,婚姻合伙有琐碎,做事必然受苦劳 176 | 177 | 蒙卦是启蒙奋发的象征,卦中爻象犯小耗,君子占之运不高。以此卦象来看,吃火锅可谓是一种“小耗”,而且可能会带来一些琐碎的麻烦,如消化不良、辛辣刺激等。因此建议您今晚内省养生,以清淡饮食为主,待身心调整良好,再考虑享受火锅之乐也未迟。犹如卦中所示,启蒙奋发,需先立足于身体的康健。俗话说,身体是革命的本钱,希望您能够以健康为重,更能够在未来的日子里,拥有更好的精神状态去面对生活。 178 | User: 我的首卦意味着什么 179 | AI: 您的首卦是艮卦,艮为山,足艮其德,心行其义,佛道之士能居其位无非也。 180 | 181 | 艮卦代表了停止和立定,是一种稳重、安静、内向的状态。艮卦需要人们保持清晰的头脑,以稳定的决心去实现目标。在处事方面,艮卦建议我们要有耐心,要克制冲动,遵循一种稳步向前的道路。 182 | 183 | 艮卦也提醒我们,任何事情都不能一味地求快,要懂得适时的停止,稳重、安静的考虑对策,不急躁、不冲动,这样才能获得最终的成功。尤其是在面临困难、疑惑的时候,更应静下心来细细思考,以艮卦的智慧指引前行之路。 184 | User: 那我的 五爻 是什么解读 185 | AI: 咱们的五爻为阳爻,卦象上仍属于艮坎之蒙卦。在六爻中,五爻常常被视为君位,具有决策和引领的作用。阳爻象征着刚劲有力、主动进取的态势。 186 | 187 | 五爻在这里,即提示您在面对选择时,要采取主动和积极的态度,走出自己的舒适区,创新和尝试新的可能性。同时也建议您以稳健之心来面对生活中各种挑战,坚持自己的信念,积极应对,勇往直前。 188 | 189 | 即使可能面临困境,也有成就的机会。如咱们所卜之蒙卦,虽为困境困顿,但启蒙奋发的情景却逐渐浮现,这是一种因困境中求新生的勇敢。因此在您面临选择时,也许正需坚韧之心与创新之意,走出困境,在疑云中寻找属于自己的明星。 190 | User: exit 191 | */ 192 | 193 | /** 194 | 告诉我你的疑问: 今天吃啥 195 | 初爻 为 背字背 为 阴 196 | 二爻 为 字背字 为 阳 197 | 三爻 为 背背背 为 阴 198 | 您的首卦为 坎 199 | 四爻 为 字字字 为 阳 200 | 五爻 为 背背背 为 阴 201 | 六爻 为 字字背 为 阳 202 | 您的次卦为 震 203 | 204 | 六爻结果: 震坎 205 | 卦名为:屯卦 206 | 水雷屯(屯卦)起始维艰 207 | 卦辞为:风刮乱丝不见头,颠三倒四犯忧愁,慢从款来左顺遂,急促反惹不自由 208 | 209 | 卜卦者问:今日临餐之挑选。 210 | 211 | 屯卦,象征新生、开始,同时也难以避免的困境。据此卦,不妨尝试新的食物,只需略加思索,在尝试和新鲜感中寻找正確的味道。然而,也要提醒卜卦者,新兴之事需慎重,不可急躁,务必做好准备,以防不测。 212 | 213 | 吾建议:你今日之餐,或可选择一些素未尝鲜的食材,亦或是尝试一款未曾品尝过的菜品,从中寻找新的口感乐趣。但同时也要注意营养均衡,健康饮食。此乃良好的开始,在尝试中汲取新知,并将其化为生活的一部分。 214 | */ 215 | -------------------------------------------------------------------------------- /node/load-kong-faiss.ts: -------------------------------------------------------------------------------- 1 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 2 | import { OpenAIEmbeddings } from "@langchain/openai"; 3 | import "faiss-node"; 4 | import "dotenv/config"; 5 | 6 | async function run() { 7 | const directory = "../db/kongyiji"; 8 | const embeddings = new OpenAIEmbeddings(); 9 | const vectorstore = await FaissStore.load(directory, embeddings); 10 | 11 | const retriever = vectorstore.asRetriever(2); 12 | const res = await retriever.invoke("茴香豆是做什么用的"); 13 | 14 | console.log(res); 15 | } 16 | 17 | run(); 18 | -------------------------------------------------------------------------------- /node/mbti/index.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from "fs"; 2 | import path from "path"; 3 | import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts"; 4 | import { ChatOpenAI } from "@langchain/openai"; 5 | import { RunnableSequence } from "@langchain/core/runnables"; 6 | import { StringOutputParser } from "@langchain/core/output_parsers"; 7 | import { z } from "zod"; 8 | import "dotenv/config"; 9 | import { DynamicStructuredTool } from "@langchain/core/tools"; 10 | import { AgentExecutor, createOpenAIToolsAgent, createReactAgent } from "langchain/agents"; 11 | import { ChatMessageHistory } from "langchain/stores/message/in_memory"; 12 | import { RunnableWithMessageHistory } from "@langchain/core/runnables"; 13 | import readline from "readline"; 14 | import { Calculator } from "@langchain/community/tools/calculator"; 15 | 16 | process.env.LANGCHAIN_TRACING_V2 = "true"; 17 | 18 | const mbtiInfoBuffer = readFileSync(path.join(__dirname, "./mbti-info.json")); 19 | const mbtiInfo = JSON.parse(mbtiInfoBuffer.toString()); 20 | const mbtiList = [ 21 | "ISTJ", 22 | "ISFJ", 23 | "INFJ", 24 | "INTJ", 25 | "ISTP", 26 | "ISFP", 27 | "INFP", 28 | "INTP", 29 | "ESTP", 30 | "ESFP", 31 | "ENFP", 32 | "ENTP", 33 | "ESTJ", 34 | "ESFJ", 35 | "ENFJ", 36 | "ENTJ", 37 | ] as [string, ...string[]]; 38 | 39 | async function getMBTIChatChain() { 40 | const prompt = ChatPromptTemplate.fromMessages([ 41 | [ 42 | "system", 43 | "你是一个共情能力非常强的心理医生,并且很了解MBTI(迈尔斯-布里格斯性格类型指标)的各种人格类型,你的任务是根据来访者的 MBTI 和问题,给出针对性的情感支持,你的回答要富有感情、有深度和充足的情感支持,引导来访者乐观积极面对问题", 44 | ], 45 | ["human", "用户的 MBTI 类型是{type}, 这个类型的特点是{info}, 他的问题是{question}"], 46 | ]); 47 | 48 | const model = new ChatOpenAI(); 49 | const mbtiChain = RunnableSequence.from([prompt, model, new StringOutputParser()]); 50 | 51 | return mbtiChain; 52 | } 53 | 54 | async function getAgent() { 55 | const mbtiChatChain = await getMBTIChatChain(); 56 | 57 | const mbtiTool = new DynamicStructuredTool({ 58 | name: "get-mbti-chat", 59 | schema: z.object({ 60 | type: z.enum(mbtiList).describe("用户的 MBTI 类型"), 61 | question: z.string().describe("用户的问题"), 62 | }), 63 | func: async ({ type, question }) => { 64 | const info = mbtiInfo[type]; 65 | 66 | const res = await mbtiChatChain.invoke({ type, question, info }); 67 | return res; 68 | }, 69 | description: "根据用户的问题和 MBTI 类型,回答用户的问题", 70 | }); 71 | 72 | const tools = [mbtiTool]; 73 | 74 | const agentPrompt = await ChatPromptTemplate.fromMessages([ 75 | [ 76 | "system", 77 | "你是一个用户接待的 agent,通过自然语言询问用户的 MBTI 类型和问题,直到你有足够的信息调用 get-mbti-chat 来回答用户的问题", 78 | ], 79 | new MessagesPlaceholder("history_message"), 80 | ["human", "{input}"], 81 | new MessagesPlaceholder("agent_scratchpad"), 82 | ]); 83 | 84 | const llm = new ChatOpenAI({ 85 | temperature: 0.4, 86 | }); 87 | const agent = await createOpenAIToolsAgent({ 88 | llm, 89 | tools, 90 | prompt: agentPrompt, 91 | }); 92 | 93 | const agentExecutor = new AgentExecutor({ 94 | agent, 95 | tools, 96 | }); 97 | 98 | const messageHistory = new ChatMessageHistory(); 99 | 100 | const agentWithChatHistory = new RunnableWithMessageHistory({ 101 | runnable: agentExecutor, 102 | getMessageHistory: () => messageHistory, 103 | inputMessagesKey: "input", 104 | historyMessagesKey: "history_message", 105 | }); 106 | 107 | return agentWithChatHistory; 108 | } 109 | 110 | async function main() { 111 | const agent = await getAgent(); 112 | 113 | const rl = readline.createInterface({ 114 | input: process.stdin, 115 | output: process.stdout, 116 | }); 117 | 118 | function chat() { 119 | rl.question("User: ", async (input) => { 120 | if (input.toLowerCase() === "exit") { 121 | rl.close(); 122 | return; 123 | } 124 | 125 | const response = await agent.invoke( 126 | { 127 | input, 128 | }, 129 | { 130 | configurable: { 131 | sessionId: "no-used", 132 | }, 133 | } 134 | ); 135 | 136 | console.log("Agent: ", response.output); 137 | 138 | chat(); 139 | }); 140 | } 141 | 142 | console.log("请输入问题。 输入 exit 退出聊天。"); 143 | chat(); 144 | } 145 | 146 | main(); 147 | 148 | /** 149 | * 请输入问题。 输入 exit 退出聊天。 150 | User: 我应该做点啥? 151 | Agent: 当然,我很乐意帮助你。但是,我需要更多的信息才能给出最佳建议。你可以告诉我你的 MBTI 类型吗?这将帮助我更好地理解你的个性和偏好。 152 | User: 我也不知道我的类型,我是偏内向和没有计划的 153 | Agent: 了解了,你的描述可能对应于几种 MBTI 类型。你是否喜欢深入思考和理解事物,还是更倾向于实际操作和直观理解?你在做决定时更依赖逻辑分析还是情感和价值观?这些信息可以帮助我更准确地确定你的 MBTI 类型。 154 | User: 我希望深入思考和实际操作 逻辑分析 155 | Agent: 根据你的描述,你可能是 INTJ 或者 INTP 类型。这两种类型的人都是内向、直观、思考型的人,但主要的区别在于 J 类型的人通常更喜欢有计划和结构,而 P 类型的人更喜欢灵活和开放。你刚才提到你没有计划,所以我猜你可能是 INTP 类型。你是否认同这个判断? 156 | User: 我应该是更加有计划和结构的吧 157 | Agent: 好的,那么你可能是 INTJ 类型的人。让我们继续你的问题,你想要做些什么?你可以提供一些具体的上下文吗?例如,你是在寻找新的职业道路,还是想找一些业余爱好? 158 | User: 我在纠结今晚吃不吃火锅 159 | > 160 | > 我在纠结Agent: 根据你的 INTJ 性格类型,你是一个善于策划和执行的人,同时具有强烈的洞察力。你可以一步步地思考和规划生活的各种细节,这也包括你的饮食选择。 161 | 162 | 火锅是一种社交的餐饮方式,可以提供大量的膳食选择,从各种肉类、蔬菜、豆制品到各种小吃,几乎可以满足所有人的口味。这种丰富的选择也正符合你作为 INTJ 类型的性格特点,即多元而独特。 163 | 164 | 如果你正在寻找一种放松自己,调剂生活的方式,或者想和朋友们享受一个美妙的晚餐,那么今晚吃火锅绝对是一个不错的选择。同时,请记住,生活中的决定就像你面前的火锅一样多元和丰富,你有能力作出最适合自己的选择。 165 | 166 | 所以,如果你喜欢火锅,并且觉得今晚吃火锅可以让你感到愉快,那么就去吃吧!希望你今晚过得愉快! 167 | */ 168 | -------------------------------------------------------------------------------- /node/mbti/mbti-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "istj": "严肃、安静、藉由集中心志与全力投入、及可被信赖获致成功。行事务实、有序、实际、逻辑、真实及可信赖十分留意且乐于任何事(工作、居家、生活均有良好组织及有序。负责任。照设定成效来作出决策且不畏阻挠与闲言会坚定为之。重视传统与忠诚。传统性的思考者或经理。", 3 | "isfj": "安静、和善、负责任且有良心。行事尽责投入。安定性高,常居项目工作或团体之安定力量。愿投入、吃苦及力求精确。兴趣通常不在于科技方面。对细节事务有耐心。忠诚、考虑周到、知性且会关切他人感受。致力于创构有序及和谐的工作与家庭环境。", 4 | "infj": "因为坚忍、创意及必须达成的意图而能成功。会在工作中投注最大的努力。默默强力的、诚挚的及用心的关切他人。因坚守原则而受敬重。提出造福大众利益的明确远景而为人所尊敬与追随。追求创见、关系及物质财物的意义及关联。想了解什么能激励别人及对他人具洞察力。光明正大且坚信其价值观。有组织且果断地履行其愿景。", 5 | "intj": "具强大动力与本意来达成目的与创意—固执顽固者。有宏大的愿景且能快速在众多外界事件中找出有意义的模范。对所承负职务,具良好能力于策划工作并完成。具怀疑心、挑剔性、独立性、果决,对专业水准及绩效要求高。", 6 | "istp": "冷静旁观者—安静、预留余地、弹性及会以无偏见的好奇心与未预期原始的幽默观察与分析。有兴趣于探索原因及效果,技术事件是为何及如何运作且使用逻辑的原理组构事实、重视效能。擅长于掌握问题核心及找出解决方式。分析成事的缘由且能实时由大量资料中找出实际问题的核心。", 7 | "isfp": "羞怯的、安宁和善地、敏感的、亲切的、且行事谦虚。喜于避开争论,不对他人强加已见或价值观。无意于领导却常是忠诚的追随者。办事不急躁,安于现状无意于以过度的急切或努力破坏现况,且非成果导向。喜欢有自有的空间及照自订的时程办事。", 8 | "infp": "安静观察者,具理想性与对其价值观及重要之人具忠诚心。希外在生活形态与内在价值观相吻合。具好奇心且很快能看出机会所在。常担负开发创意的触媒者。除非价值观受侵犯,行事会具弹性、适应力高且承受力强。具想了解及发展他人潜能的企图。想作太多且作事全神贯注。对所处境遇及拥有不太在意。具适应力、有弹性除非价值观受到威胁。", 9 | "intp": "安静、自持、弹性及具适应力。特别喜爱追求理论与科学事理。习于以逻辑及分析来解决问题—问题解决者。最有兴趣于创意事务及特定工作,对聚会与闲聊无大兴趣。追求可发挥个人强烈兴趣的生涯。追求发展对有兴趣事务之逻辑解释。", 10 | "estp": "擅长现场实时解决问题—解决问题者。喜欢办事并乐于其中及过程。倾向于喜好技术事务及运动,交结同好友人。具适应性、容忍度、务实性;投注心力于会很快具成效工作。不喜欢冗长概念的解释及理论。最专精于可操作、处理、分解或组合的真实事务。", 11 | "esfp": "外向、和善、接受性、乐于分享喜乐予他人。喜欢与他人一起行动且促成事件发生,在学习时亦然。知晓事件未来的发展并会热列参与。最擅长于人际相处能力及具备完备常识,很有弹性能立即适应他人与环境。对生命、人、物质享受的热爱者。", 12 | "enfp": "充满热忱、活力充沛、聪明的、富想象力的,视生命充满机会但期能得自他人肯定与支持。几乎能达成所有有兴趣的事。对难题很快就有对策并能对有困难的人施予援手。依赖能改善的能力而无须预作规划准备。为达目的常能找出强制自己为之的理由。即兴执行者。", 13 | "entp": "反应快、聪明、长于多样事务。具激励伙伴、敏捷及直言讳专长。会为了有趣对问题的两面加予争辩。对解决新及挑战性的问题富有策略,但会轻忽或厌烦经常的任务与细节。兴趣多元,易倾向于转移至新生的兴趣。对所想要的会有技巧地找出逻辑的理由。长于看清础他人,有智能去解决新或有挑战的问题。", 14 | "estj": "务实、真实、事实倾向,具企业或技术天份。不喜欢抽象理论;最喜欢学习可立即运用事理。喜好组织与管理活动且专注以最有效率方式行事以达致成效。具决断力、关注细节且很快作出决策—优秀行政者。会忽略他人感受。喜作领导者或企业主管。", 15 | "esfj": "诚挚、爱说话、合作性高、受欢迎、光明正大的—天生的合作者及活跃的组织成员。重和谐且长于创造和谐。常作对他人有益事务。给予鼓励及称许会有更佳工作成效。最有兴趣于会直接及有形影响人们生活的事务。喜欢与他人共事去精确且准时地完成工作。", 16 | "enfj": "热忱、易感应及负责任的--具能鼓励他人的领导风格。对别人所想或希求会表达真正关切且切实用心去处理。能怡然且技巧性地带领团体讨论或演示文稿提案。爱交际、受欢迎及富同情心。对称许及批评很在意。喜欢带引别人且能使别人或团体发挥潜能。", 17 | "entj": "坦诚、具决策力的活动领导者。长于发展与实施广泛的系统以解决组织的问题。专精于具内涵与智能的谈话如对公众演讲。乐于经常吸收新知且能广开信息管道。易生过度自信,会强于表达自已创见。喜于长程策划及目标设定。" 18 | } -------------------------------------------------------------------------------- /node/multiQueryRetriever.ts: -------------------------------------------------------------------------------- 1 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 2 | import { OpenAIEmbeddings, ChatOpenAI } from "@langchain/openai"; 3 | import { MultiQueryRetriever } from "langchain/retrievers/multi_query"; 4 | import "faiss-node"; 5 | import "dotenv/config"; 6 | 7 | async function run() { 8 | const directory = "../db/kongyiji"; 9 | const embeddings = new OpenAIEmbeddings(); 10 | const vectorstore = await FaissStore.load(directory, embeddings); 11 | 12 | const model = new ChatOpenAI(); 13 | const retriever = MultiQueryRetriever.fromLLM({ 14 | llm: model, 15 | retriever: vectorstore.asRetriever(3), 16 | queryCount: 3, 17 | verbose: true, 18 | }); 19 | const res = await retriever.invoke("茴香豆是做什么用的"); 20 | 21 | console.log(res); 22 | } 23 | 24 | run(); 25 | -------------------------------------------------------------------------------- /node/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test-app-node", 3 | "private": true, 4 | "version": "0.0.0", 5 | "scripts": { 6 | "prepare-kong-faiss": "ts-node prepare-kong-faiss.ts", 7 | "load-kong-faiss": "ts-node load-kong-faiss.ts", 8 | "multiQueryRetriever": "ts-node multiQueryRetriever.ts", 9 | "LLMChainExtractor": "ts-node LLMChainExtractor.ts", 10 | "ScoreThresholdRetriever": "ts-node ScoreThresholdRetriever.ts", 11 | "prepare-qiu": "ts-node ./rag/prepare-qiu.ts", 12 | "rag-2": "ts-node ./rag/index.ts", 13 | "rag-server": "ts-node ./rag/server.ts", 14 | "rag-client": "ts-node ./rag/client.ts" 15 | }, 16 | "dependencies": { 17 | "dotenv": "^16.4.5", 18 | "express": "^4.19.2", 19 | "faiss-node": "^0.5.1", 20 | "langchain": "^0.1.30", 21 | "ts-node": "^10.9.2", 22 | "typescript": "^5.4.3" 23 | } 24 | } -------------------------------------------------------------------------------- /node/prepare-kong-faiss.ts: -------------------------------------------------------------------------------- 1 | import { TextLoader } from "langchain/document_loaders/fs/text"; 2 | import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; 3 | import "dotenv/config"; 4 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 5 | import { OpenAIEmbeddings } from "@langchain/openai"; 6 | 7 | const run = async () => { 8 | const loader = new TextLoader("../data/kong.txt"); 9 | const docs = await loader.load(); 10 | 11 | const splitter = new RecursiveCharacterTextSplitter({ 12 | chunkSize: 100, 13 | chunkOverlap: 20, 14 | }); 15 | 16 | const splitDocs = await splitter.splitDocuments(docs); 17 | 18 | const embeddings = new OpenAIEmbeddings(); 19 | const vectorStore = await FaissStore.fromDocuments(splitDocs, embeddings); 20 | 21 | const directory = "../db/kongyiji"; 22 | await vectorStore.save(directory); 23 | }; 24 | 25 | run(); 26 | -------------------------------------------------------------------------------- /node/rag/client.ts: -------------------------------------------------------------------------------- 1 | const port = 8080; 2 | 3 | async function fetchStream() { 4 | const response = await fetch(`http://localhost:${port}`, { 5 | method: "POST", 6 | headers: { 7 | "content-type": "application/json", 8 | }, 9 | body: JSON.stringify({ 10 | question: "什么是球状闪电", 11 | session_id: "test-server", 12 | }), 13 | }); 14 | const reader = response.body.getReader(); 15 | const decoder = new TextDecoder(); 16 | 17 | while (true) { 18 | const { done, value } = await reader.read(); 19 | if (done) break; 20 | console.log(decoder.decode(value)); 21 | } 22 | 23 | console.log("Stream has ended"); 24 | } 25 | 26 | fetchStream(); 27 | -------------------------------------------------------------------------------- /node/rag/index.ts: -------------------------------------------------------------------------------- 1 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 2 | import { OpenAIEmbeddings } from "@langchain/openai"; 3 | import "dotenv/config"; 4 | import path from "path"; 5 | import { JSONChatHistory } from "../../JSONChatHistory/index"; 6 | import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts"; 7 | import { 8 | RunnableSequence, 9 | RunnablePassthrough, 10 | RunnableWithMessageHistory, 11 | Runnable, 12 | } from "@langchain/core/runnables"; 13 | import { ChatOpenAI } from "@langchain/openai"; 14 | import { StringOutputParser } from "@langchain/core/output_parsers"; 15 | import { HumanMessage, AIMessage } from "@langchain/core/messages"; 16 | import { Document } from "@langchain/core/documents"; 17 | 18 | async function loadVectorStore() { 19 | const directory = path.join(__dirname, "../../db/qiu"); 20 | const embeddings = new OpenAIEmbeddings(); 21 | const vectorStore = await FaissStore.load(directory, embeddings); 22 | 23 | return vectorStore; 24 | } 25 | 26 | async function getRephraseChain() { 27 | const rephraseChainPrompt = ChatPromptTemplate.fromMessages([ 28 | [ 29 | "system", 30 | "给定以下对话和一个后续问题,请将后续问题重述为一个独立的问题。请注意,重述的问题应该包含足够的信息,使得没有看过对话历史的人也能理解。", 31 | ], 32 | new MessagesPlaceholder("history"), 33 | ["human", "将以下问题重述为一个独立的问题:\n{question}"], 34 | ]); 35 | 36 | const rephraseChain = RunnableSequence.from([ 37 | rephraseChainPrompt, 38 | new ChatOpenAI({ 39 | temperature: 0.4, 40 | }), 41 | new StringOutputParser(), 42 | ]); 43 | 44 | return rephraseChain; 45 | } 46 | 47 | async function testRephraseChain() { 48 | const historyMessages = [new HumanMessage("你好,我叫小明"), new AIMessage("你好小明")]; 49 | const rephraseChain = await getRephraseChain(); 50 | 51 | const question = "你觉得我的名字怎么样?"; 52 | const standaloneQuestion = await rephraseChain.invoke({ history: historyMessages, question }); 53 | 54 | console.log(standaloneQuestion); 55 | } 56 | 57 | export async function getRagChain(): Promise { 58 | const vectorStore = await loadVectorStore(); 59 | const retriever = vectorStore.asRetriever(2); 60 | 61 | const convertDocsToString = (documents: Document[]): string => { 62 | return documents.map((document) => document.pageContent).join("\n"); 63 | }; 64 | const contextRetrieverChain = RunnableSequence.from([ 65 | (input) => input.standalone_question, 66 | retriever, 67 | convertDocsToString, 68 | ]); 69 | 70 | const SYSTEM_TEMPLATE = ` 71 | 你是一个熟读刘慈欣的《球状闪电》的终极原着党,精通根据作品原文详细解释和回答问题,你在回答时会引用作品原文。 72 | 并且回答时仅根据原文,尽可能回答用户问题,如果原文中没有相关内容,你可以回答“原文中没有相关内容”, 73 | 74 | 以下是原文中跟用户回答相关的内容: 75 | {context} 76 | `; 77 | 78 | const prompt = ChatPromptTemplate.fromMessages([ 79 | ["system", SYSTEM_TEMPLATE], 80 | new MessagesPlaceholder("history"), 81 | ["human", "现在,你需要基于原文,回答以下问题:\n{standalone_question}`"], 82 | ]); 83 | const model = new ChatOpenAI(); 84 | const rephraseChain = await getRephraseChain(); 85 | 86 | const ragChain = RunnableSequence.from([ 87 | RunnablePassthrough.assign({ 88 | standalone_question: rephraseChain, 89 | }), 90 | RunnablePassthrough.assign({ 91 | context: contextRetrieverChain, 92 | }), 93 | prompt, 94 | model, 95 | new StringOutputParser(), 96 | ]); 97 | 98 | const chatHistoryDir = path.join(__dirname, "../../chat_data"); 99 | 100 | const ragChainWithHistory = new RunnableWithMessageHistory({ 101 | runnable: ragChain, 102 | getMessageHistory: (sessionId) => new JSONChatHistory({ sessionId, dir: chatHistoryDir }), 103 | historyMessagesKey: "history", 104 | inputMessagesKey: "question", 105 | }); 106 | 107 | return ragChainWithHistory; 108 | } 109 | 110 | async function run() { 111 | const ragChain = await getRagChain(); 112 | 113 | const res = await ragChain.invoke( 114 | { 115 | // question: "什么是球状闪电?", 116 | question: "这个现象在文中有什么故事", 117 | }, 118 | { 119 | configurable: { sessionId: "test-history" }, 120 | } 121 | ); 122 | 123 | console.log(res); 124 | } 125 | 126 | // run(); 127 | 128 | // testRephraseChain(); 129 | -------------------------------------------------------------------------------- /node/rag/prepare-qiu.ts: -------------------------------------------------------------------------------- 1 | import { TextLoader } from "langchain/document_loaders/fs/text"; 2 | import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; 3 | import "dotenv/config"; 4 | import { FaissStore } from "@langchain/community/vectorstores/faiss"; 5 | import { OpenAIEmbeddings } from "@langchain/openai"; 6 | import path from "path"; 7 | 8 | const run = async () => { 9 | const baseDir = __dirname; 10 | 11 | const loader = new TextLoader(path.join(baseDir, "../../data/qiu.txt")); 12 | const docs = await loader.load(); 13 | 14 | const splitter = new RecursiveCharacterTextSplitter({ 15 | chunkSize: 500, 16 | chunkOverlap: 100, 17 | }); 18 | 19 | const splitDocs = await splitter.splitDocuments(docs); 20 | 21 | const embeddings = new OpenAIEmbeddings(); 22 | const vectorStore = await FaissStore.fromDocuments(splitDocs, embeddings); 23 | 24 | await vectorStore.save(path.join(baseDir, "../../db/qiu")); 25 | }; 26 | 27 | run(); 28 | -------------------------------------------------------------------------------- /node/rag/server.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import { getRagChain } from "."; 3 | 4 | const app = express(); 5 | const port = 8080; 6 | 7 | app.use(express.json()); 8 | 9 | app.post("/", async (req, res) => { 10 | const ragChain = await getRagChain(); 11 | const body = req.body; 12 | const result = await ragChain.stream( 13 | { 14 | question: body.question, 15 | }, 16 | { configurable: { sessionId: body.session_id } } 17 | ); 18 | 19 | res.set("Content-Type", "text/plain"); 20 | for await (const chunk of result) { 21 | res.write(chunk); 22 | } 23 | res.end(); 24 | }); 25 | 26 | app.listen(port, () => { 27 | console.log(`Server is running on port ${port}`); 28 | }); 29 | -------------------------------------------------------------------------------- /node/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": false, 3 | "compilerOptions": { 4 | "moduleResolution": "node", 5 | "strict": false, 6 | "esModuleInterop": true, 7 | "target": "ES6" 8 | }, 9 | "exclude": ["node_modules"] 10 | } 11 | -------------------------------------------------------------------------------- /output-parser.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { load } from \"dotenv\";\n", 10 | "const env = await load();\n", 11 | "\n", 12 | "const process = {\n", 13 | " env\n", 14 | "}" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 24 | "import { HumanMessage } from \"@langchain/core/messages\";\n", 25 | "\n", 26 | "const model = new ChatOpenAI();\n", 27 | "\n", 28 | "await model.invoke([\n", 29 | " new HumanMessage(\"Tell me a joke\")\n", 30 | "])" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 40 | "\n", 41 | "const parser = new StringOutputParser();\n", 42 | "const model = new ChatOpenAI();\n", 43 | "\n", 44 | "const chain = model.pipe(parser)\n", 45 | "\n", 46 | "await chain.invoke([\n", 47 | " new HumanMessage(\"Tell me a joke\")\n", 48 | "])" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "import { StructuredOutputParser } from \"langchain/output_parsers\";\n", 58 | "import { PromptTemplate } from \"@langchain/core/prompts\";\n", 59 | "\n", 60 | "const parser = StructuredOutputParser.fromNamesAndDescriptions({\n", 61 | " answer: \"用户问题的答案\",\n", 62 | " evidence: \"你回答用户问题所依据的答案\",\n", 63 | " confidence: \"问题答案的可信度评分,格式是百分数\",\n", 64 | "});\n", 65 | "\n", 66 | "console.log(parser.getFormatInstructions())" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "const prompt = PromptTemplate.fromTemplate(\"尽可能的回答用的问题 \\n{instructions} \\n{question}\")\n", 76 | "const model = new ChatOpenAI();\n", 77 | "\n", 78 | "const chain = prompt.pipe(model).pipe(parser)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [ 87 | "const res = await chain.invoke({\n", 88 | " question: \"蒙娜丽莎的作者是谁?是什么时候绘制的\",\n", 89 | " instructions: parser.getFormatInstructions()\n", 90 | "})\n", 91 | " \n", 92 | "console.log(res)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "import { CommaSeparatedListOutputParser } from \"@langchain/core/output_parsers\";\n", 102 | "\n", 103 | "const parser = new CommaSeparatedListOutputParser();\n", 104 | "\n", 105 | "console.log(parser.getFormatInstructions())" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": {}, 112 | "outputs": [], 113 | "source": [ 114 | "const model = new ChatOpenAI();\n", 115 | "const prompt = PromptTemplate.fromTemplate(\"列出3个 {country} 的着名的互联网公司.\\n{instructions}\")\n", 116 | " \n", 117 | "const chain = prompt.pipe(model).pipe(parser)\n", 118 | "\n", 119 | "const response = await chain.invoke({\n", 120 | " country: \"America\",\n", 121 | " instructions: parser.getFormatInstructions(),\n", 122 | "});\n", 123 | "\n", 124 | "response" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [ 133 | "import { z } from \"zod\";\n", 134 | "import { StructuredOutputParser, OutputFixingParser } from \"langchain/output_parsers\";\n", 135 | "import { PromptTemplate } from \"@langchain/core/prompts\";\n", 136 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 137 | "import { HumanMessage } from \"@langchain/core/messages\";\n", 138 | "\n", 139 | "const schema = z.object({\n", 140 | " answer: z.string().describe(\"用户问题的答案\"),\n", 141 | " confidence: z.number().min(0).max(100).describe(\"问题答案的可信度评分,满分 100\")\n", 142 | "});" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "const parser = StructuredOutputParser.fromZodSchema(schema);\n", 152 | "const prompt = PromptTemplate.fromTemplate(\"尽可能的回答用的问题 \\n{instructions} \\n{question}\")\n", 153 | "const model = new ChatOpenAI();\n", 154 | "\n", 155 | "const chain = prompt.pipe(model).pipe(parser)\n", 156 | "const res = await chain.invoke({\n", 157 | " question: \"蒙娜丽莎的作者是谁?是什么时候绘制的\",\n", 158 | " instructions: parser.getFormatInstructions()\n", 159 | "})\n", 160 | " \n", 161 | "console.log(res)" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": null, 167 | "metadata": {}, 168 | "outputs": [], 169 | "source": [ 170 | "const wrongOutput = {\n", 171 | " \"answer\": \"蒙娜丽莎的作者是达芬奇,大约在16世纪初期(1503年至1506年之间)开始绘制。\",\n", 172 | " \"sources\": \"90%\" \n", 173 | "};\n", 174 | "\n", 175 | "const fixParser = OutputFixingParser.fromLLM(model, parser);\n", 176 | "const output = await fixParser.parse(JSON.stringify(wrongOutput));\n" 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": null, 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "output" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": null, 191 | "metadata": {}, 192 | "outputs": [], 193 | "source": [ 194 | "const wrongOutput = {\n", 195 | " \"answer\": \"蒙娜丽莎的作者是达芬奇,大约在16世纪初期(1503年至1506年之间)开始绘制。\",\n", 196 | " \"sources\": \"-1\" \n", 197 | "};\n", 198 | "\n", 199 | "const fixParser = OutputFixingParser.fromLLM(model, parser);\n", 200 | "const output = await fixParser.parse(JSON.stringify(wrongOutput));\n", 201 | "\n", 202 | "output" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "metadata": {}, 209 | "outputs": [], 210 | "source": [ 211 | "console.log(fixParser.getFormatInstructions())" 212 | ] 213 | } 214 | ], 215 | "metadata": { 216 | "kernelspec": { 217 | "display_name": "Deno", 218 | "language": "typescript", 219 | "name": "deno" 220 | }, 221 | "language_info": { 222 | "file_extension": ".ts", 223 | "mimetype": "text/x.typescript", 224 | "name": "typescript", 225 | "nb_converter": "script", 226 | "pygments_lexer": "typescript", 227 | "version": "5.3.3" 228 | } 229 | }, 230 | "nbformat": 4, 231 | "nbformat_minor": 4 232 | } 233 | -------------------------------------------------------------------------------- /prompt-template.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { PromptTemplate } from \"@langchain/core/prompts\";\n", 10 | "\n", 11 | "const greetingPrompt = new PromptTemplate({\n", 12 | " inputVariables: [],\n", 13 | " template: \"hello world\",\n", 14 | "});\n", 15 | "const formattedGreetingPrompt = await greetingPrompt.format();\n", 16 | "\n", 17 | "console.log(formattedGreetingPrompt);" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": { 24 | "scrolled": true 25 | }, 26 | "outputs": [], 27 | "source": [ 28 | "const personalizedGreetingPrompt = new PromptTemplate({\n", 29 | " inputVariables: [\"name\"],\n", 30 | " template: \"hello,{name} {{test}}\",\n", 31 | "});\n", 32 | "const formattedPersonalizedGreeting = await personalizedGreetingPrompt.format({\n", 33 | " name: \"Kai\",\n", 34 | "});\n", 35 | "\n", 36 | "console.log(formattedPersonalizedGreeting);" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "const multiVariableGreetingPrompt = new PromptTemplate({\n", 46 | " inputVariables: [\"timeOfDay\", \"name\"],\n", 47 | " template: \"good {timeOfDay}, {name}\",\n", 48 | "});\n", 49 | "const formattedMultiVariableGreeting = await multiVariableGreetingPrompt.format({\n", 50 | " timeOfDay: \"morning\",\n", 51 | " name: \"Kai\",\n", 52 | "});\n", 53 | "\n", 54 | "console.log(formattedMultiVariableGreeting);\n", 55 | "// good morning, Kai\n" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "const multiVariableGreetingPrompt = new PromptTemplate({\n", 65 | " inputVariables: [\"timeOfDay\", \"name\"],\n", 66 | " template: \"good {timeOfDay}, {name} {{test}}\",\n", 67 | "});\n", 68 | "const formattedMultiVariableGreeting = await multiVariableGreetingPrompt.format({\n", 69 | " timeOfDay: \"morning\",\n", 70 | " name: \"Kai\",\n", 71 | "});\n", 72 | "\n", 73 | "console.log(formattedMultiVariableGreeting);\n", 74 | "// good morning, Kai {test}\n" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "const autoInferTemplate = PromptTemplate.fromTemplate(\"good {timeOfDay}, {name}\");\n", 84 | "console.log(autoInferTemplate.inputVariables);\n", 85 | "// ['timeOfDay', 'name']\n", 86 | "\n", 87 | "const formattedAutoInferTemplate = await autoInferTemplate.format({\n", 88 | " timeOfDay: \"morning\",\n", 89 | " name: \"Kai\",\n", 90 | "});\n", 91 | "console.log(formattedAutoInferTemplate)\n", 92 | "// good morning, Kai" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "const initialPrompt = new PromptTemplate({\n", 102 | " template: \"这是一个{type},它是{item}。\",\n", 103 | " inputVariables: [\"type\", \"item\"],\n", 104 | "});\n", 105 | "\n", 106 | "\n", 107 | "const partialedPrompt = await initialPrompt.partial({\n", 108 | " type: \"工具\",\n", 109 | "});\n", 110 | "\n", 111 | "const formattedPrompt = await partialedPrompt.format({\n", 112 | " item: \"锤子\",\n", 113 | "});\n", 114 | "\n", 115 | "console.log(formattedPrompt);\n", 116 | "// 这是一个工具,它是锤子。\n", 117 | "\n", 118 | "const formattedPrompt2 = await partialedPrompt.format({\n", 119 | " item: \"改锥\",\n", 120 | "});\n", 121 | "\n", 122 | "console.log(formattedPrompt2)\n", 123 | "// 这是一个工具,它是改锥。" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "const getCurrentDateStr = () => {\n", 133 | " return new Date().toLocaleDateString();\n", 134 | "};\n", 135 | "\n", 136 | "const promptWithDate = new PromptTemplate({\n", 137 | " template: \"今天是{date},{activity}。\",\n", 138 | " inputVariables: [\"date\", \"activity\"],\n", 139 | "});\n", 140 | "\n", 141 | "const partialedPromptWithDate = await promptWithDate.partial({\n", 142 | " date: getCurrentDateStr,\n", 143 | "});\n", 144 | "\n", 145 | "const formattedPromptWithDate = await partialedPromptWithDate.format({\n", 146 | " activity: \"我们去爬山\",\n", 147 | "});\n", 148 | "\n", 149 | "console.log(formattedPromptWithDate);\n", 150 | "// 输出: 今天是2023/7/13,我们去爬山。\n" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": {}, 157 | "outputs": [], 158 | "source": [ 159 | "const getCurrentDateStr = () => {\n", 160 | " return new Date().toLocaleDateString();\n", 161 | "};\n", 162 | "\n", 163 | "function generateGreeting(timeOfDay) {\n", 164 | " return () => {\n", 165 | " const date = getCurrentDateStr()\n", 166 | " switch (timeOfDay) {\n", 167 | " case 'morning':\n", 168 | " return date + ' 早上好';\n", 169 | " case 'afternoon':\n", 170 | " return date + ' 下午好';\n", 171 | " case 'evening':\n", 172 | " return date + ' 晚上好';\n", 173 | " default:\n", 174 | " return date + ' 你好';\n", 175 | " }\n", 176 | " };\n", 177 | "}\n", 178 | "\n", 179 | "const prompt = new PromptTemplate({\n", 180 | " template: \"{greeting}!\",\n", 181 | " inputVariables: [\"greeting\"],\n", 182 | "});\n", 183 | "\n", 184 | "const currentTimeOfDay = 'afternoon';\n", 185 | "const partialPrompt = await prompt.partial({\n", 186 | " greeting: generateGreeting(currentTimeOfDay),\n", 187 | "});\n", 188 | "\n", 189 | "const formattedPrompt = await partialPrompt.format();\n", 190 | "\n", 191 | "console.log(formattedPrompt);\n", 192 | "// 输出: 3/21/2024 下午好!\n" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": null, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "import { SystemMessagePromptTemplate } from \"@langchain/core/prompts\";\n", 202 | "\n", 203 | "const translateInstructionTemplate = SystemMessagePromptTemplate.fromTemplate(\"你是一个专业的翻译员,你的任务是将文本从{source_lang}翻译成{target_lang}。\");" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": null, 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "import { HumanMessagePromptTemplate } from \"@langchain/core/prompts\";\n", 213 | "\n", 214 | "const userQuestionTemplate = HumanMessagePromptTemplate.fromTemplate(\"请翻译这句话:{text}\")\n" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": null, 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "import { ChatPromptTemplate } from \"@langchain/core/prompts\";\n", 224 | "\n", 225 | "const chatPrompt = ChatPromptTemplate.fromMessages([\n", 226 | " translateInstructionTemplate,\n", 227 | " userQuestionTemplate,\n", 228 | "]);" 229 | ] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "metadata": {}, 235 | "outputs": [], 236 | "source": [ 237 | "const formattedChatPrompt = await chatPrompt.formatMessages({\n", 238 | " source_lang: \"中文\",\n", 239 | " target_lang: \"法语\",\n", 240 | " text: \"你好,世界\",\n", 241 | "});\n", 242 | "formattedChatPrompt" 243 | ] 244 | }, 245 | { 246 | "cell_type": "code", 247 | "execution_count": null, 248 | "metadata": {}, 249 | "outputs": [], 250 | "source": [ 251 | "const systemTemplate = \"你是一个专业的翻译员,你的任务是将文本从{source_lang}翻译成{target_lang}。\";\n", 252 | "const humanTemplate = \"请翻译这句话:{text}\";\n", 253 | "\n", 254 | "const chatPrompt = ChatPromptTemplate.fromMessages([\n", 255 | " [\"system\", systemTemplate],\n", 256 | " [\"human\", humanTemplate],\n", 257 | "]);" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "import { load } from \"dotenv\";\n", 267 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 268 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 269 | "\n", 270 | "const env = await load();\n", 271 | "const process = {\n", 272 | " env\n", 273 | "}\n", 274 | "\n", 275 | "const chatModel = new ChatOpenAI();\n", 276 | "const outputPraser = new StringOutputParser();" 277 | ] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "execution_count": null, 282 | "metadata": {}, 283 | "outputs": [], 284 | "source": [ 285 | "const chain = chatPrompt.pipe(chatModel).pipe(outputPraser);\n", 286 | "\n", 287 | "await chain.invoke({\n", 288 | " source_lang: \"中文\",\n", 289 | " target_lang: \"法语\",\n", 290 | " text: \"你好,世界\",\n", 291 | "})" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": null, 297 | "metadata": {}, 298 | "outputs": [], 299 | "source": [ 300 | "import {\n", 301 | " PromptTemplate,\n", 302 | " PipelinePromptTemplate,\n", 303 | "} from \"@langchain/core/prompts\";\n", 304 | "\n", 305 | "const getCurrentDateStr = () => {\n", 306 | " return new Date().toLocaleDateString();\n", 307 | "};\n", 308 | "\n", 309 | "const fullPrompt = PromptTemplate.fromTemplate(`\n", 310 | "你是一个智能管家,今天是 {date},你的主人的信息是{info}, \n", 311 | "根据上下文,完成主人的需求\n", 312 | "{task}`);\n", 313 | "\n", 314 | "const datePrompt = PromptTemplate.fromTemplate(\"{date},现在是 {period}\")\n", 315 | "const periodPrompt = await datePrompt.partial({\n", 316 | " date: getCurrentDateStr\n", 317 | "})\n", 318 | "\n", 319 | "const infoPrompt = PromptTemplate.fromTemplate(\"姓名是 {name}, 性别是 {gender}\");\n", 320 | "\n", 321 | "const taskPrompt = PromptTemplate.fromTemplate(`\n", 322 | "我想吃 {period} 的 {food}。 \n", 323 | "再重复一遍我的信息 {info}`);\n", 324 | "\n", 325 | "const composedPrompt = new PipelinePromptTemplate({\n", 326 | " pipelinePrompts: [\n", 327 | " {\n", 328 | " name: \"date\",\n", 329 | " prompt: periodPrompt,\n", 330 | " },\n", 331 | " {\n", 332 | " name: \"info\",\n", 333 | " prompt: infoPrompt,\n", 334 | " },\n", 335 | " {\n", 336 | " name: \"task\",\n", 337 | " prompt: taskPrompt,\n", 338 | " },\n", 339 | " ],\n", 340 | " finalPrompt: fullPrompt,\n", 341 | "});\n", 342 | "\n", 343 | "const formattedPrompt = await composedPrompt.format({\n", 344 | " period: \"早上\",\n", 345 | " name: \"张三\",\n", 346 | " gender: \"male\",\n", 347 | " food: \"lemon\"\n", 348 | "});\n", 349 | "\n", 350 | "console.log(formattedPrompt)" 351 | ] 352 | } 353 | ], 354 | "metadata": { 355 | "kernelspec": { 356 | "display_name": "Deno", 357 | "language": "typescript", 358 | "name": "deno" 359 | }, 360 | "language_info": { 361 | "file_extension": ".ts", 362 | "mimetype": "text/x.typescript", 363 | "name": "typescript", 364 | "nb_converter": "script", 365 | "pygments_lexer": "typescript", 366 | "version": "5.3.3" 367 | } 368 | }, 369 | "nbformat": 4, 370 | "nbformat_minor": 4 371 | } 372 | -------------------------------------------------------------------------------- /rag.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { load } from \"dotenv\";\n", 10 | "const env = await load();\n", 11 | "\n", 12 | "const process = {\n", 13 | " env\n", 14 | "}" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import { TextLoader } from \"langchain/document_loaders/fs/text\";\n", 24 | "const loader = new TextLoader(\"data/qiu.txt\");\n", 25 | "const docs = await loader.load();" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import { RecursiveCharacterTextSplitter } from \"langchain/text_splitter\";\n", 35 | "\n", 36 | "const splitter = new RecursiveCharacterTextSplitter({\n", 37 | " chunkSize: 500,\n", 38 | " chunkOverlap: 100,\n", 39 | " });\n", 40 | "\n", 41 | "const splitDocs = await splitter.splitDocuments(docs);\n" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "console.log(splitDocs[4])" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "console.log(splitDocs[4].pageContent)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "import { OpenAIEmbeddings } from \"@langchain/openai\";\n", 69 | "\n", 70 | "\n", 71 | "const embeddings = new OpenAIEmbeddings();" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "import { MemoryVectorStore } from \"langchain/vectorstores/memory\";\n", 81 | "\n", 82 | "const vectorstore = new MemoryVectorStore(embeddings);\n", 83 | "await vectorstore.addDocuments(splitDocs);" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "const retriever = vectorstore.asRetriever(2)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "const res = await retriever.invoke(\"原文中,谁提出了宏原子的假设?并详细介绍给我宏原子假设的理论\")" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "res" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": null, 116 | "metadata": {}, 117 | "outputs": [], 118 | "source": [ 119 | "import { RunnableSequence } from \"@langchain/core/runnables\";\n", 120 | "import { Document } from \"@langchain/core/documents\";\n", 121 | "\n", 122 | "const convertDocsToString = (documents: Document[]): string => {\n", 123 | " return documents.map((document) => document.pageContent).join(\"\\n\")\n", 124 | " }\n", 125 | "const contextRetriverChain = RunnableSequence.from([\n", 126 | " (input) => input.question,\n", 127 | " retriever,\n", 128 | " convertDocsToString\n", 129 | "])" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "const result = await contextRetriverChain.invoke({ question: \"原文中,谁提出了宏原子的假设?并详细介绍给我宏原子假设的理论\"})\n", 139 | "\n", 140 | "console.log(result)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": null, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "import { ChatPromptTemplate } from \"@langchain/core/prompts\";\n", 150 | "\n", 151 | "const TEMPLATE = `\n", 152 | "你是一个熟读刘慈欣的《球状闪电》的终极原著党,精通根据作品原文详细解释和回答问题,你在回答时会引用作品原文。\n", 153 | "并且回答时仅根据原文,尽可能回答用户问题,如果原文中没有相关内容,你可以回答“原文中没有相关内容”,\n", 154 | "\n", 155 | "以下是原文中跟用户回答相关的内容:\n", 156 | "{context}\n", 157 | "\n", 158 | "现在,你需要基于原文,回答以下问题:\n", 159 | "{question}`;\n", 160 | "\n", 161 | "const prompt = ChatPromptTemplate.fromTemplate(\n", 162 | " TEMPLATE\n", 163 | ");" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "import { ChatOpenAI } from \"@langchain/openai\";\n", 173 | "\n", 174 | "const model = new ChatOpenAI();" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": null, 180 | "metadata": {}, 181 | "outputs": [], 182 | "source": [ 183 | "import { StringOutputParser } from \"@langchain/core/output_parsers\";\n", 184 | "\n", 185 | "const ragChain = RunnableSequence.from([\n", 186 | " {\n", 187 | " context: contextRetriverChain,\n", 188 | " question: (input) => input.question,\n", 189 | " },\n", 190 | " prompt,\n", 191 | " model,\n", 192 | " new StringOutputParser()\n", 193 | "])" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": null, 199 | "metadata": {}, 200 | "outputs": [], 201 | "source": [ 202 | "const answer = await ragChain.invoke({\n", 203 | " question: \"什么是球状闪电\"\n", 204 | " });\n", 205 | " \n", 206 | " console.log(answer);" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": null, 212 | "metadata": {}, 213 | "outputs": [], 214 | "source": [ 215 | "const answer = await ragChain.invoke({\n", 216 | " question: \"详细描述原文中有什么跟直升机相关的场景\"\n", 217 | " });\n", 218 | "\n", 219 | "console.log(answer);" 220 | ] 221 | } 222 | ], 223 | "metadata": { 224 | "kernelspec": { 225 | "display_name": "Deno", 226 | "language": "typescript", 227 | "name": "deno" 228 | }, 229 | "language_info": { 230 | "file_extension": ".ts", 231 | "mimetype": "text/x.typescript", 232 | "name": "typescript", 233 | "nb_converter": "script", 234 | "pygments_lexer": "typescript", 235 | "version": "5.3.3" 236 | } 237 | }, 238 | "nbformat": 4, 239 | "nbformat_minor": 4 240 | } 241 | -------------------------------------------------------------------------------- /splitter.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { TextLoader } from \"langchain/document_loaders/fs/text\";\n", 10 | "const loader = new TextLoader(\"data/kong.txt\");\n", 11 | "const docs = await loader.load();\n" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import { RecursiveCharacterTextSplitter } from \"langchain/text_splitter\";\n", 21 | "\n", 22 | "const splitter = new RecursiveCharacterTextSplitter({\n", 23 | " chunkSize: 64,\n", 24 | " chunkOverlap: 0,\n", 25 | " });\n", 26 | "\n", 27 | "const splitDocs = await splitter.splitDocuments(docs);" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "const splitter = new RecursiveCharacterTextSplitter({\n", 37 | " chunkSize: 64,\n", 38 | " chunkOverlap: 16,\n", 39 | " });\n", 40 | "\n", 41 | "const splitDocs = await splitter.splitDocuments(docs);" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "splitDocs" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "import { SupportedTextSplitterLanguages } from \"langchain/text_splitter\";\n", 60 | "\n", 61 | "console.log(SupportedTextSplitterLanguages); " 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "import { RecursiveCharacterTextSplitter } from \"langchain/text_splitter\";\n", 71 | "\n", 72 | "const js = `\n", 73 | "function myFunction(name,job){\n", 74 | "\tconsole.log(\"Welcome \" + name + \", the \" + job);\n", 75 | "}\n", 76 | "\n", 77 | "myFunction('Harry Potter','Wizard')\n", 78 | "\n", 79 | "function forFunction(){\n", 80 | "\tfor (let i=0; i<5; i++){\n", 81 | " console.log(\"这个数字是\" + i)\n", 82 | "\t}\n", 83 | "}\n", 84 | "\n", 85 | "forFunction()\n", 86 | "`;\n", 87 | "\n", 88 | "const splitter = RecursiveCharacterTextSplitter.fromLanguage(\"js\", {\n", 89 | " chunkSize: 64,\n", 90 | " chunkOverlap: 0,\n", 91 | "});\n", 92 | "const jsOutput = await splitter.createDocuments([js]);" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "jsOutput" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "import { TokenTextSplitter } from \"langchain/text_splitter\";\n", 111 | "\n", 112 | "const text = \"I stand before you today the representative of a family in grief, in a country in mourning before a world in shock.\";\n", 113 | "\n", 114 | "const splitter = new TokenTextSplitter({\n", 115 | " encodingName: \"gpt2\",\n", 116 | " chunkSize: 10,\n", 117 | " chunkOverlap: 0,\n", 118 | "});\n", 119 | "\n", 120 | "const docs = await splitter.createDocuments([text]);" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": null, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "docs" 130 | ] 131 | } 132 | ], 133 | "metadata": { 134 | "kernelspec": { 135 | "display_name": "Deno", 136 | "language": "typescript", 137 | "name": "deno" 138 | }, 139 | "language_info": { 140 | "file_extension": ".ts", 141 | "mimetype": "text/x.typescript", 142 | "name": "typescript", 143 | "nb_converter": "script", 144 | "pygments_lexer": "typescript", 145 | "version": "5.3.3" 146 | } 147 | }, 148 | "nbformat": 4, 149 | "nbformat_minor": 4 150 | } 151 | -------------------------------------------------------------------------------- /test/data/05-versions-space.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RealKai42/langchainjs-juejin/49b8764d37f76e02861bc3d8b01508fe4805ca64/test/data/05-versions-space.pdf -------------------------------------------------------------------------------- /tool-lesson.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { load } from \"dotenv\";\n", 10 | "const env = await load();" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": null, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "import OpenAI from \"openai\";\n", 20 | "\n", 21 | "const openai = new OpenAI({\n", 22 | " apiKey: env[\"AZURE_OPENAI_API_KEY\"],\n", 23 | " baseURL: `https://${env[\"AZURE_OPENAI_API_INSTANCE_NAME\"]}.openai.azure.com/openai/deployments/${env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"]}`,\n", 24 | " defaultQuery: { 'api-version': env[\"AZURE_OPENAI_API_VERSION\"] },\n", 25 | " defaultHeaders: { 'api-key': env[\"AZURE_OPENAI_API_KEY\"] },\n", 26 | "});" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "const result = await openai.chat.completions.create({\n", 36 | " // model: 'gpt-3.5-turbo',\n", 37 | " model: env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"],\n", 38 | " messages: [{ role: 'user', content: 'Say hello!' }],\n", 39 | " });\n", 40 | " console.log(result.choices[0]!.message?.content);" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "function getCurrentWeather({ location, unit=\"fahrenheit\"}){\n", 50 | " const weather_info = {\n", 51 | " \"location\": location,\n", 52 | " \"temperature\": \"72\",\n", 53 | " \"unit\": unit,\n", 54 | " \"forecast\": [\"sunny\", \"windy\"],\n", 55 | " }\n", 56 | " return JSON.stringify(weather_info);\n", 57 | "}" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "const tools = [\n", 67 | " {\n", 68 | " type: \"function\",\n", 69 | " function: {\n", 70 | " name: \"getCurrentWeather\",\n", 71 | " description: \"Get the current weather in a given location\",\n", 72 | " parameters: {\n", 73 | " type: \"object\",\n", 74 | " properties: {\n", 75 | " location: {\n", 76 | " type: \"string\",\n", 77 | " description: \"The city and state, e.g. San Francisco, CA\",\n", 78 | " },\n", 79 | " unit: { \n", 80 | " type: \"string\", \n", 81 | " enum: [\"celsius\", \"fahrenheit\"],\n", 82 | " description: \"The unit of temperature\"\n", 83 | " },\n", 84 | " },\n", 85 | " required: [\"location\", \"unit\"],\n", 86 | " },\n", 87 | " },\n", 88 | " }\n", 89 | "]" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | " const messages = [\n", 99 | " {\n", 100 | " \"role\": \"user\",\n", 101 | " // \"content\": \"北京的天气怎么样\"\n", 102 | " \"content\": \"What's the weather like in Redmond?\"\n", 103 | "\n", 104 | " }\n", 105 | "]\n", 106 | "\n", 107 | "const result = await openai.chat.completions.create({\n", 108 | " // model: 'gpt-3.5-turbo',\n", 109 | " model: env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"],\n", 110 | " messages,\n", 111 | " tools\n", 112 | " });\n", 113 | " console.log(result);" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "result.choices[0]" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "\n", 132 | "const messages = [\n", 133 | " {\n", 134 | " \"role\": \"user\",\n", 135 | " \"content\": \"你好\"\n", 136 | " }\n", 137 | "]\n", 138 | "\n", 139 | "const result = await openai.chat.completions.create({\n", 140 | " // model: 'gpt-3.5-turbo',\n", 141 | " model: env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"],\n", 142 | " messages,\n", 143 | " tools,\n", 144 | " tool_choice: {\n", 145 | " type: \"function\",\n", 146 | " function: {\n", 147 | " name: \"getCurrentWeather\"\n", 148 | " }\n", 149 | " }\n", 150 | " });" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": {}, 157 | "outputs": [], 158 | "source": [ 159 | "\n", 160 | "const functions = {\n", 161 | " \"getCurrentWeather\": getCurrentWeather\n", 162 | " }\n", 163 | "\n", 164 | "const functionInfo = result.choices[0].message.tool_calls[0].function\n", 165 | "const functionName = functionInfo.name;\n", 166 | "const functionParams = functionInfo.arguments\n", 167 | "\n", 168 | "const functionResult = functions[functionName](functionParams);\n", 169 | "\n", 170 | "console.log(functionResult);" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "metadata": {}, 177 | "outputs": [], 178 | "source": [ 179 | "function getCurrentTime({ format = \"iso\" } = {}) {\n", 180 | " let currentTime;\n", 181 | " switch (format) {\n", 182 | " case \"iso\":\n", 183 | " currentTime = new Date().toISOString();\n", 184 | " break;\n", 185 | " case \"locale\":\n", 186 | " currentTime = new Date().toLocaleString();\n", 187 | " break;\n", 188 | " default:\n", 189 | " currentTime = new Date().toString();\n", 190 | " break;\n", 191 | " }\n", 192 | " return currentTime;\n", 193 | "}\n", 194 | "\n", 195 | "const tools = [\n", 196 | " {\n", 197 | " type: \"function\",\n", 198 | " function: {\n", 199 | " name: \"getCurrentTime\",\n", 200 | " description: \"Get the current time in a given format\",\n", 201 | " parameters: {\n", 202 | " type: \"object\",\n", 203 | " properties: {\n", 204 | " format: {\n", 205 | " type: \"string\",\n", 206 | " enum: [\"iso\", \"locale\", \"string\"],\n", 207 | " description: \"The format of the time, e.g. iso, locale, string\",\n", 208 | " },\n", 209 | " },\n", 210 | " required: [],\n", 211 | " },\n", 212 | " },\n", 213 | " },\n", 214 | " {\n", 215 | " type: \"function\",\n", 216 | " function: {\n", 217 | " name: \"getCurrentWeather\",\n", 218 | " description: \"Get the current weather in a given location\",\n", 219 | " parameters: {\n", 220 | " type: \"object\",\n", 221 | " properties: {\n", 222 | " location: {\n", 223 | " type: \"string\",\n", 224 | " description: \"The city and state, e.g. San Francisco, CA\",\n", 225 | " },\n", 226 | " unit: { type: \"string\", enum: [\"celsius\", \"fahrenheit\"] },\n", 227 | " },\n", 228 | " required: [\"location\", \"unit\"],\n", 229 | " },\n", 230 | " },\n", 231 | " }\n", 232 | "];" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "metadata": {}, 239 | "outputs": [], 240 | "source": [ 241 | "\n", 242 | "const messages = [\n", 243 | " { role: \"user\", content: \" 上海 新疆 这三个城市的天气如何?\" },\n", 244 | "]\n", 245 | "\n", 246 | "const result = await openai.chat.completions.create({\n", 247 | " // model: 'gpt-3.5-turbo',\n", 248 | " model: env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"],\n", 249 | " messages,\n", 250 | " tools\n", 251 | " });\n" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "result.choices[0]" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": null, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "\n", 270 | "messages.push(result.choices[0].message)\n", 271 | "\n", 272 | "const functions = {\n", 273 | " \"getCurrentWeather\": getCurrentWeather\n", 274 | " }\n", 275 | "\n", 276 | "const cell = result.choices[0].message.tool_calls[0]\n", 277 | "const functionInfo = cell.function\n", 278 | "const functionName = functionInfo.name;\n", 279 | "const functionParams = functionInfo.arguments\n", 280 | "const functionResult = functions[functionName](functionParams);\n", 281 | "\n", 282 | "console.log(messages);\n", 283 | "messages.push({\n", 284 | " tool_call_id: cell.id,\n", 285 | " role: \"tool\",\n", 286 | " name: functionName,\n", 287 | " content: functionResult,\n", 288 | "}); \n", 289 | "\n", 290 | "const response = await openai.chat.completions.create({\n", 291 | " // model: 'gpt-3.5-turbo',\n", 292 | " model: env[\"AZURE_OPENAI_API_DEPLOYMENT_NAME\"],\n", 293 | " messages,\n", 294 | "});\n", 295 | "console.log(response);" 296 | ] 297 | }, 298 | { 299 | "cell_type": "code", 300 | "execution_count": null, 301 | "metadata": {}, 302 | "outputs": [], 303 | "source": [ 304 | "response.choices[0].message" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": null, 310 | "metadata": {}, 311 | "outputs": [], 312 | "source": [ 313 | "messages" 314 | ] 315 | } 316 | ], 317 | "metadata": { 318 | "kernelspec": { 319 | "display_name": "Deno", 320 | "language": "typescript", 321 | "name": "deno" 322 | }, 323 | "language_info": { 324 | "file_extension": ".ts", 325 | "mimetype": "text/x.typescript", 326 | "name": "typescript", 327 | "nb_converter": "script", 328 | "pygments_lexer": "typescript", 329 | "version": "5.3.3" 330 | } 331 | }, 332 | "nbformat": 4, 333 | "nbformat_minor": 2 334 | } 335 | -------------------------------------------------------------------------------- /vector-store.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import { load } from \"dotenv\";\n", 10 | "const env = await load();\n", 11 | "\n", 12 | "const process = {\n", 13 | " env\n", 14 | "}" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import { TextLoader } from \"langchain/document_loaders/fs/text\";\n", 24 | "import { RecursiveCharacterTextSplitter } from \"langchain/text_splitter\";\n", 25 | "const loader = new TextLoader(\"data/kong.txt\");\n", 26 | "const docs = await loader.load();\n", 27 | "\n", 28 | "const splitter = new RecursiveCharacterTextSplitter({\n", 29 | " chunkSize: 100,\n", 30 | " chunkOverlap: 20,\n", 31 | " });\n", 32 | "\n", 33 | "const splitDocs = await splitter.splitDocuments(docs);" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": null, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "splitDocs" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "console.log(splitDocs[0])" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "import { OpenAIEmbeddings } from \"@langchain/openai\";\n", 61 | "const embeddings = new OpenAIEmbeddings()\n", 62 | "\n", 63 | "const res = await embeddings.embedQuery(splitDocs[0].pageContent)" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "res" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": null, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "import { MemoryVectorStore } from \"langchain/vectorstores/memory\";\n", 82 | "\n", 83 | "const vectorstore = new MemoryVectorStore(embeddings);\n", 84 | "await vectorstore.addDocuments(splitDocs);" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "const retriever = vectorstore.asRetriever(2)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "const res = await retriever.invoke(\"茴香豆是做什么用的\")" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "res" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "const res = await retriever.invoke(\"下酒菜一般是什么?\")\n", 121 | "res" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "const res = await retriever.invoke(\"孔乙己用什么谋生?\")\n", 131 | "res" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": null, 137 | "metadata": {}, 138 | "outputs": [], 139 | "source": [] 140 | } 141 | ], 142 | "metadata": { 143 | "kernelspec": { 144 | "display_name": "Deno", 145 | "language": "typescript", 146 | "name": "deno" 147 | }, 148 | "language_info": { 149 | "file_extension": ".ts", 150 | "mimetype": "text/x.typescript", 151 | "name": "typescript", 152 | "nb_converter": "script", 153 | "pygments_lexer": "typescript", 154 | "version": "5.3.3" 155 | } 156 | }, 157 | "nbformat": 4, 158 | "nbformat_minor": 4 159 | } 160 | --------------------------------------------------------------------------------