├── .github ├── FUNDING.yml └── workflows │ └── swift.yml ├── .gitignore ├── .swiftpm └── xcode │ ├── package.xcworkspace │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcshareddata │ └── xcschemes │ ├── LangChain.xcscheme │ ├── LangChainTests.xcscheme │ └── langchain-swift-Package.xcscheme ├── LICENSE ├── Package.resolved ├── Package.swift ├── README.md ├── Sources └── LangChain │ ├── LangChain.swift │ ├── agents │ └── Agent.swift │ ├── cache │ └── Cache.swift │ ├── callbacks │ ├── BaseCallbackHandler.swift │ ├── StdOutCallbackHandler.swift │ └── TraceCallbackHandler.swift │ ├── chains │ ├── BaseChain.swift │ ├── DNChain.swift │ ├── LLMChain.swift │ ├── SequentialChain.swift │ ├── SimpleSequentialChain.swift │ ├── TransformChain.swift │ ├── qa │ │ ├── BaseCombineDocumentsChain.swift │ │ ├── BaseConversationalRetrievalChain.swift │ │ ├── ConversationalRetrievalChain.swift │ │ └── StuffDocumentsChain.swift │ └── router │ │ ├── LLMRouterChain.swift │ │ └── MultiRouteChain.swift │ ├── document_loaders │ ├── AudioLoader.swift │ ├── BaseLoader.swift │ ├── BilibiliLoader.swift │ ├── HtmlLoader.swift │ ├── ImageOCRLoader.swift │ ├── NotionLoader.swift │ ├── PDFLoader.swift │ ├── RSSLoader.swift │ ├── TextLoader.swift │ └── YoutubeLoader.swift │ ├── embeddings │ ├── Distilbert.swift │ ├── Embeddings.swift │ ├── OllamaEmbeddings.swift │ └── OpenAIEmbeddings.swift │ ├── llms │ ├── Baidu.swift │ ├── ChatGLM.swift │ ├── ChatOllama.swift │ ├── ChatOpenAI.swift │ ├── Dalle.swift │ ├── Gemini.swift │ ├── HuggingFace.swift │ ├── LLM.swift │ ├── LMStudio.swift │ ├── Llama2.swift │ ├── Local.swift │ ├── Ollama.swift │ └── OpenAI.swift │ ├── memory │ ├── Base.swift │ ├── Chat.swift │ └── ReadOnlySharedMemory.swift │ ├── parser │ ├── BaseOutputParser.swift │ ├── DateOutputParser.swift │ ├── EnumOutputParser.swift │ ├── ListOutputParser.swift │ ├── MRKLOutputParser.swift │ ├── ObjectOutputParser.swift │ ├── RouterOutputParser.swift │ └── SimpleJsonOutputParser.swift │ ├── prompts │ ├── MultiPromptRouter.swift │ ├── PromptTemplate.swift │ └── mrkl │ │ └── MrklPrompt.swift │ ├── retriever │ ├── BaseRetriever.swift │ ├── MultiVectorRetriever.swift │ ├── ParentDocumentRetriever.swift │ ├── PubmedRetriever.swift │ └── WikipediaRetriever.swift │ ├── schema │ ├── BaseStore.swift │ ├── InMemoryStore.swift │ ├── LocalFileStore.swift │ └── Schema.swift │ ├── tools │ ├── BaseTool.swift │ ├── Dummy.swift │ ├── GetLocationTool.swift │ ├── InvalidTool.swift │ ├── JavascriptREPLTool.swift │ ├── Serper.swift │ ├── TTSTool.swift │ └── WeatherTool.swift │ ├── utilities │ ├── GoogleSerperAPIWrapper.swift │ ├── HFInferenceApi.swift │ ├── LC.swift │ ├── LlamaAPIWrapper.swift │ ├── OpenWeatherAPIWrapper.swift │ ├── TextSplitter.swift │ ├── ThreadManager.swift │ ├── anotheropenai │ │ └── OpenAITTSAPIWrapper.swift │ ├── baidu │ │ └── BaiduClient.swift │ ├── bilibili │ │ ├── BilibiliClient.swift │ │ ├── BilibiliCredential.swift │ │ └── BilibiliVideo.swift │ ├── chatglm │ │ ├── ChatGLMAPIWrapper.swift │ │ └── ChatGLMModel.swift │ ├── dalle │ │ └── DalleImage.swift │ ├── jwt │ │ ├── Cryptor.swift │ │ ├── Extensions │ │ │ └── String.swift │ │ └── JWT.swift │ ├── pubmed │ │ ├── PubmedAPIWrapper.swift │ │ └── PubmedPage.swift │ ├── report │ │ ├── ReportKey.swift │ │ └── TraceManager.swift │ ├── wikipedia │ │ ├── WikipediaAPIWrapper.swift │ │ └── WikipediaPage.swift │ └── youtube │ │ ├── Transcript.swift │ │ ├── TranscriptList.swift │ │ ├── TranscriptListFetcher.swift │ │ ├── YoutubeHackClient.swift │ │ └── YoutubeInfo.swift │ └── vectorstores │ ├── SimilaritySearchKit.swift │ ├── Supabase.swift │ ├── VectorStore.swift │ └── supabase │ └── supabase.sql ├── Tests └── LangChainTests │ ├── Base64.swift │ ├── BySplit.swift │ └── langchain_swiftTests.swift ├── techstack.md └── techstack.yml /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: ['paypal.me/buhe1986'] 2 | -------------------------------------------------------------------------------- /.github/workflows/swift.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Swift project 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-swift 3 | 4 | name: Swift 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: macOS-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: swift-actions/setup-swift@v1 20 | - name: Build 21 | run: swift build -v 22 | - name: Run tests 23 | run: swift test -v 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | /.build 3 | /Packages 4 | xcuserdata/ 5 | DerivedData/ 6 | .swiftpm/configuration/registries.json 7 | .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata 8 | .netrc 9 | -------------------------------------------------------------------------------- /.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /.swiftpm/xcode/xcshareddata/xcschemes/LangChain.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 31 | 32 | 42 | 43 | 49 | 50 | 56 | 57 | 58 | 59 | 61 | 62 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /.swiftpm/xcode/xcshareddata/xcschemes/LangChainTests.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 16 | 18 | 24 | 25 | 26 | 27 | 28 | 38 | 39 | 45 | 46 | 48 | 49 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /.swiftpm/xcode/xcshareddata/xcschemes/langchain-swift-Package.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 29 | 35 | 36 | 37 | 38 | 39 | 45 | 46 | 56 | 57 | 63 | 64 | 70 | 71 | 72 | 73 | 75 | 76 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version: 5.9.0 2 | // The swift-tools-version declares the minimum version of Swift required to build this package. 3 | 4 | import PackageDescription 5 | 6 | let package = Package( 7 | name: "langchain-swift", 8 | platforms: [ 9 | .iOS(.v15), 10 | .macOS(.v12), 11 | .watchOS(.v8), 12 | .visionOS(.v1) 13 | ], 14 | products: [ 15 | // Products define the executables and libraries a package produces, making them visible to other packages. 16 | .library( 17 | name: "LangChain", 18 | targets: ["LangChain"]), 19 | ], 20 | dependencies: [ 21 | .package(url: "https://github.com/buhe/openai-kit", .upToNextMajor(from: "1.8.5")), 22 | .package(url: "https://github.com/supabase-community/supabase-swift", .upToNextMajor(from: "0.2.1")), 23 | .package(url: "https://github.com/SwiftyJSON/SwiftyJSON", .upToNextMajor(from: "5.0.1")), 24 | .package(url: "https://github.com/drmohundro/SWXMLHash", .upToNextMajor(from: "7.0.2")), 25 | .package(url: "https://github.com/scinfu/SwiftSoup", .upToNextMajor(from: "2.6.1")), 26 | .package(url: "https://github.com/juyan/swift-filestore", .upToNextMajor(from: "0.5.0")), 27 | .package(url: "https://github.com/buhe/similarity-search-kit", from: "0.0.16"), 28 | .package(url: "https://github.com/google/generative-ai-swift", .upToNextMajor(from: "0.4.4")), 29 | .package(url: "https://github.com/buhe/SwiftyNotion", .upToNextMajor(from: "0.1.5")), 30 | .package(url: "https://github.com/nmdias/FeedKit", .upToNextMajor(from: "9.1.2")), 31 | ], 32 | targets: [ 33 | // Targets are the basic building blocks of a package, defining a module or a test suite. 34 | // Targets can depend on other targets in this package and products from dependencies. 35 | .target( 36 | name: "LangChain", 37 | dependencies: [ 38 | .product(name: "OpenAIKit", package: "openai-kit"), 39 | .product(name: "Supabase", package: "supabase-swift"), 40 | .product(name: "SwiftyJSON", package: "SwiftyJSON"), 41 | .product(name: "SWXMLHash", package: "SWXMLHash"), 42 | .product(name: "SwiftSoup", package: "SwiftSoup"), 43 | .product(name: "SwiftFileStore", package: "swift-filestore"), 44 | .product(name: "SimilaritySearchKit", package: "similarity-search-kit", condition: .when(platforms: [.macOS, .iOS, .visionOS])), 45 | // .product(name: "SimilaritySearchKitDistilbert", package: "similarity-search-kit", condition: .when(platforms: [.macOS, .iOS, .visionOS])), 46 | .product(name: "GoogleGenerativeAI", package: "generative-ai-swift"), 47 | .product(name: "SwiftyNotion", package: "SwiftyNotion"), 48 | .product(name: "FeedKit", package: "FeedKit"), 49 | ] 50 | 51 | ), 52 | .testTarget( 53 | name: "LangChainTests", 54 | dependencies: ["LangChain"]), 55 | ] 56 | ) 57 | -------------------------------------------------------------------------------- /Sources/LangChain/LangChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/11. 6 | // 7 | 8 | import Foundation 9 | import CommonCrypto 10 | 11 | enum LangChainError: Error { 12 | case LoaderError(String) 13 | case ChainError 14 | case ToolError 15 | } 16 | 17 | extension Data{ 18 | public func sha256() -> String{ 19 | return hexStringFromData(input: digest(input: self as NSData)) 20 | } 21 | 22 | private func digest(input : NSData) -> NSData { 23 | let digestLength = Int(CC_SHA256_DIGEST_LENGTH) 24 | var hash = [UInt8](repeating: 0, count: digestLength) 25 | CC_SHA256(input.bytes, UInt32(input.length), &hash) 26 | return NSData(bytes: hash, length: digestLength) 27 | } 28 | 29 | private func hexStringFromData(input: NSData) -> String { 30 | var bytes = [UInt8](repeating: 0, count: input.length) 31 | input.getBytes(&bytes, length: input.length) 32 | 33 | var hexString = "" 34 | for byte in bytes { 35 | hexString += String(format:"%02x", UInt8(byte)) 36 | } 37 | 38 | return hexString 39 | } 40 | } 41 | 42 | public extension String { 43 | func sha256() -> String{ 44 | if let stringData = self.data(using: String.Encoding.utf8) { 45 | return stringData.sha256() 46 | } 47 | return "" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /Sources/LangChain/cache/Cache.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/10/30. 6 | // 7 | 8 | import Foundation 9 | import SwiftFileStore 10 | 11 | public class BaseCache { 12 | public init() {} 13 | public func lookup(prompt: String) async -> LLMResult? { 14 | nil 15 | } 16 | public func update(prompt: String, return_val: LLMResult) async { 17 | 18 | } 19 | //For test? 20 | public func clear() { 21 | 22 | } 23 | } 24 | 25 | public class InMemoryCache: BaseCache { 26 | 27 | var memery: [String: LLMResult] = [:] 28 | public override func lookup(prompt: String) async -> LLMResult? { 29 | print("🍰 Get \(prompt) from cache") 30 | return memery[prompt] 31 | } 32 | public override func update(prompt: String, return_val: LLMResult) async { 33 | print("🍰 Update \(prompt)") 34 | memery[prompt] = return_val 35 | } 36 | public override func clear() { 37 | memery = [:] 38 | } 39 | } 40 | struct LLMCache: Codable, JSONDataRepresentable { 41 | let key: String 42 | let value: String 43 | } 44 | public class FileCache: BaseCache { 45 | let objectStore: FileObjectStore? 46 | 47 | public override init() { 48 | do { 49 | self.objectStore = try FileObjectStore.create() 50 | } catch { 51 | self.objectStore = nil 52 | } 53 | } 54 | public override func lookup(prompt: String) async -> LLMResult? { 55 | // print("🍰 Get \(prompt) from file") 56 | do { 57 | if let data = prompt.data(using: .utf8) { 58 | let base64 = data.base64EncodedString() 59 | 60 | let cache = try await objectStore!.read(key: base64.sha256(), namespace: "llm_cache", objectType: LLMCache.self) 61 | if let c = cache { 62 | return LLMResult(llm_output: c.value) 63 | } 64 | } 65 | return nil 66 | } catch { 67 | print("FileCache get failed") 68 | return nil 69 | } 70 | 71 | 72 | } 73 | public override func update(prompt: String, return_val: LLMResult) async { 74 | // print("🍰 Update \(prompt) at file") 75 | do { 76 | if let data = prompt.data(using: .utf8) { 77 | let base64 = data.base64EncodedString() 78 | let cache = LLMCache(key: prompt, value: return_val.llm_output!) 79 | try await objectStore!.write(key: base64.sha256(), namespace: "llm_cache", object: cache) 80 | } 81 | } catch { 82 | print("FileCache set failed") 83 | } 84 | } 85 | public override func clear() { 86 | 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /Sources/LangChain/callbacks/BaseCallbackHandler.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/6. 6 | // 7 | 8 | import Foundation 9 | open class BaseCallbackHandler: LLMManagerMixin, ChainManagerMixin, CallbackManagerMixin, ToolManagerMixin, LoaderManagerMixin { 10 | 11 | public init() { 12 | 13 | } 14 | // Loader 15 | open func on_loader_start(type: String, metadata: [String : String]) throws { 16 | 17 | } 18 | 19 | open func on_loader_error(type: String, cause: String, metadata: [String : String]) throws { 20 | 21 | } 22 | 23 | open func on_loader_end(type: String, metadata: [String : String]) throws { 24 | 25 | } 26 | 27 | // Agent 28 | open func on_agent_start(prompt: String, metadata: [String : String]) throws { 29 | 30 | } 31 | 32 | open func on_llm_error(error: Error, metadata: [String: String]) throws { 33 | 34 | } 35 | 36 | open func on_llm_start(prompt: String, metadata: [String: String]) throws { 37 | 38 | } 39 | 40 | // Manage callback 41 | open func on_chain_start(prompts: String, metadata: [String: String]) throws { 42 | 43 | } 44 | 45 | open func on_tool_start(tool: BaseTool, input: String, metadata: [String: String]) throws { 46 | 47 | } 48 | 49 | // Chain callback 50 | open func on_chain_end(output: String, metadata: [String: String]) throws { 51 | 52 | } 53 | 54 | open func on_chain_error(error: Error, metadata: [String: String]) throws { 55 | 56 | } 57 | 58 | open func on_agent_action(action: AgentAction, metadata: [String: String]) throws { 59 | 60 | } 61 | 62 | open func on_agent_finish(action: AgentFinish, metadata: [String: String]) throws { 63 | 64 | } 65 | 66 | 67 | // LLM callback 68 | open func on_llm_new_token(metadata: [String: String]) { 69 | 70 | } 71 | 72 | open func on_llm_end(output: String, metadata: [String: String]) throws { 73 | 74 | } 75 | 76 | // Tool callback 77 | open func on_tool_end(tool: BaseTool, output: String, metadata: [String: String]) throws { 78 | 79 | } 80 | 81 | open func on_tool_error(error: Error, metadata: [String: String]) throws { 82 | 83 | } 84 | } 85 | public protocol LoaderManagerMixin { 86 | func on_loader_start(type: String, metadata: [String: String]) throws 87 | 88 | func on_loader_error(type: String, cause: String, metadata: [String: String]) throws 89 | 90 | func on_loader_end(type: String, metadata: [String: String]) throws 91 | } 92 | public protocol LLMManagerMixin { 93 | func on_llm_new_token(metadata: [String: String]) 94 | 95 | func on_llm_end(output: String, metadata: [String: String]) throws 96 | 97 | func on_llm_error(error: Error, metadata: [String: String]) throws 98 | } 99 | 100 | public protocol ChainManagerMixin { 101 | func on_chain_end(output: String, metadata: [String: String]) throws 102 | 103 | func on_chain_error(error: Error, metadata: [String: String]) throws 104 | 105 | func on_agent_action(action: AgentAction, metadata: [String: String]) throws 106 | 107 | func on_agent_finish(action: AgentFinish, metadata: [String: String]) throws 108 | } 109 | 110 | public protocol CallbackManagerMixin { 111 | func on_chain_start(prompts: String, metadata: [String: String]) throws 112 | 113 | func on_tool_start(tool: BaseTool, input: String, metadata: [String: String]) throws 114 | 115 | func on_llm_start(prompt: String, metadata: [String: String]) throws 116 | 117 | func on_agent_start(prompt: String, metadata: [String: String]) throws 118 | } 119 | 120 | public protocol ToolManagerMixin { 121 | func on_tool_end(tool: BaseTool, output: String, metadata: [String: String]) throws 122 | 123 | func on_tool_error(error: Error, metadata: [String: String]) throws 124 | } 125 | -------------------------------------------------------------------------------- /Sources/LangChain/callbacks/StdOutCallbackHandler.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/6. 6 | // 7 | 8 | import Foundation 9 | 10 | public class StdOutCallbackHandler: BaseCallbackHandler { 11 | public override init() { 12 | } 13 | public override func on_chain_end(output: String, metadata: [String: String]) { 14 | print("💁🏻‍♂️", "[DEBUG] Finished chain, output is '\(output)'.") 15 | } 16 | 17 | public override func on_chain_start(prompts: String, metadata: [String: String]) { 18 | print("💁🏻‍♂️", "[DEBUG] Entering new {class_name} chain. with '\(prompts)'..") 19 | } 20 | 21 | public override func on_chain_error(error: Error, metadata: [String: String]) throws { 22 | print("💁🏻‍♂️", "[DEBUG] Catch chain error: '\(error.localizedDescription)'") 23 | } 24 | 25 | public override func on_llm_end(output: String, metadata: [String: String]) throws { 26 | print("💁🏻‍♂️", "[DEBUG] Finished LLM, output is '\(output)'.") 27 | } 28 | 29 | public override func on_tool_start(tool: BaseTool, input: String, metadata: [String: String]) throws { 30 | print("💁🏻‍♂️", "[DEBUG] Entering Tool of \(tool.name()) ,desc: \(tool.description()) with '\(input)'..") 31 | } 32 | 33 | public override func on_tool_end(tool: BaseTool, output: String, metadata: [String: String]) throws { 34 | print("💁🏻‍♂️", "[DEBUG] Finished Tool of \(tool.name()) ,desc: \(tool.description()), output is '\(output)'.") 35 | } 36 | 37 | public override func on_agent_start(prompt: String, metadata: [String : String]) throws { 38 | print("💁🏻‍♂️", "[DEBUG] Entering new Agent. with '\(prompt)'..") 39 | } 40 | 41 | public override func on_agent_action(action: AgentAction, metadata: [String: String]) throws { 42 | print("💁🏻‍♂️", "[DEBUG] Agent step is \(action.action), log: '\(action.log)'.") 43 | } 44 | 45 | public override func on_agent_finish(action: AgentFinish, metadata: [String: String]) throws { 46 | print("💁🏻‍♂️", "[DEBUG] Agent finish: \(action.final)") 47 | } 48 | 49 | public override func on_llm_start(prompt: String, metadata: [String: String]) throws { 50 | print("💁🏻‍♂️", "[DEBUG] Entering new LLM. with '\(prompt)'..") 51 | } 52 | 53 | public override func on_llm_error(error: Error, metadata: [String: String]) throws { 54 | print("💁🏻‍♂️", "[DEBUG] Catch LLM error: '\(error.localizedDescription)'") 55 | } 56 | 57 | public override func on_loader_start(type: String, metadata: [String : String]) throws { 58 | print("💁🏻‍♂️", "[DEBUG] Entering new \(type) loader") 59 | } 60 | 61 | public override func on_loader_error(type: String, cause: String, metadata: [String : String]) throws { 62 | print("💁🏻‍♂️", "[DEBUG] Catch \(type) loader error: '\(cause)'") 63 | } 64 | 65 | public override func on_loader_end(type: String, metadata: [String : String]) throws { 66 | print("💁🏻‍♂️", "[DEBUG] Finished loader of \(type)") 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/BaseChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/19. 6 | // 7 | 8 | import Foundation 9 | 10 | public class DefaultChain { 11 | static let CHAIN_REQ_ID_KEY = "chain_req_id" 12 | static let CHAIN_COST_KEY = "cost" 13 | public init(memory: BaseMemory? = nil, outputKey: String, inputKey: String, callbacks: [BaseCallbackHandler] = []) { 14 | self.memory = memory 15 | self.outputKey = outputKey 16 | self.inputKey = inputKey 17 | var cbs: [BaseCallbackHandler] = callbacks 18 | if LC.addTraceCallbak() && !cbs.contains(where: { item in item is TraceCallbackHandler}) { 19 | cbs.append(TraceCallbackHandler()) 20 | } 21 | // assert(cbs.count == 1) 22 | self.callbacks = cbs 23 | } 24 | let memory: BaseMemory? 25 | let inputKey: String 26 | let outputKey: String 27 | let callbacks: [BaseCallbackHandler] 28 | public func _call(args: String) async -> (LLMResult?, Parsed) { 29 | print("call base.") 30 | return (LLMResult(), Parsed.unimplemented) 31 | } 32 | 33 | func callEnd(output: String, reqId: String, cost: Double) { 34 | for callback in self.callbacks { 35 | do { 36 | try callback.on_chain_end(output: output, metadata: [DefaultChain.CHAIN_REQ_ID_KEY: reqId, DefaultChain.CHAIN_COST_KEY: "\(cost)"]) 37 | } catch { 38 | print("call chain end callback errer: \(error)") 39 | } 40 | } 41 | } 42 | 43 | func callStart(prompt: String, reqId: String) { 44 | for callback in self.callbacks { 45 | do { 46 | try callback.on_chain_start(prompts: prompt, metadata: [DefaultChain.CHAIN_REQ_ID_KEY: reqId]) 47 | } catch { 48 | print("call chain end callback errer: \(error)") 49 | } 50 | } 51 | } 52 | 53 | func callCatch(error: Error, reqId: String, cost: Double) { 54 | for callback in self.callbacks { 55 | do { 56 | try callback.on_chain_error(error: error, metadata: [DefaultChain.CHAIN_REQ_ID_KEY: reqId, DefaultChain.CHAIN_COST_KEY: "\(cost)"]) 57 | } catch { 58 | print("call LLM start callback errer: \(error)") 59 | } 60 | } 61 | } 62 | 63 | // This interface alreadly return 'LLMReult', ensure 'run' method has stream style. 64 | public func run(args: String) async -> Parsed { 65 | let _ = prep_inputs(inputs: [inputKey: args]) 66 | // = Langchain's run + __call__ 67 | let reqId = UUID().uuidString 68 | var cost = 0.0 69 | let now = Date.now.timeIntervalSince1970 70 | 71 | callStart(prompt: args, reqId: reqId) 72 | let outputs = await self._call(args: args) 73 | if let llmResult = outputs.0 { 74 | cost = Date.now.timeIntervalSince1970 - now 75 | //call end trace 76 | // if !outputs.0.stream { 77 | callEnd(output: llmResult.llm_output!, reqId: reqId, cost: cost) 78 | // } else { 79 | // callEnd(output: "[LLM is streamable]", reqId: reqId, cost: cost) 80 | // } 81 | let _ = prep_outputs(inputs: [inputKey: args], outputs: [self.outputKey: llmResult.llm_output!]) 82 | return outputs.1 83 | } else { 84 | callCatch(error: LangChainError.ChainError, reqId: reqId, cost: cost) 85 | return Parsed.error 86 | } 87 | } 88 | 89 | func prep_outputs(inputs: [String: String], outputs: [String: String]) -> [String: String] { 90 | if self.memory != nil { 91 | self.memory!.save_context(inputs: inputs, outputs: outputs) 92 | } 93 | var m = inputs 94 | outputs.forEach { (key, value) in 95 | m[key] = value 96 | } 97 | return m 98 | } 99 | 100 | func prep_inputs(inputs: [String: String]) -> [String: String] { 101 | if self.memory != nil { 102 | var external_context = Dictionary(uniqueKeysWithValues: self.memory!.load_memory_variables(inputs: inputs).map {(key, value) in return (key, value.joined(separator: "\n"))}) 103 | // print("ctx: \(external_context)") 104 | inputs.forEach { (key, value) in 105 | external_context[key] = value 106 | } 107 | return external_context 108 | } else { 109 | return inputs 110 | } 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/DNChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/9. 6 | // 7 | 8 | import Foundation 9 | public class DNChain: DefaultChain { 10 | public override init(memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 11 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 12 | } 13 | public override func _call(args: String) async -> (LLMResult?, Parsed) { 14 | // print("Do nothing.") 15 | return (LLMResult(), Parsed.nothing) 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/LLMChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/19. 6 | // 7 | 8 | import Foundation 9 | 10 | public class LLMChain: DefaultChain { 11 | let llm: LLM 12 | let prompt: PromptTemplate? 13 | let parser: BaseOutputParser? 14 | let stop: [String] 15 | 16 | public init(llm: LLM, prompt: PromptTemplate? = nil, parser: BaseOutputParser? = nil, stop: [String] = [], memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 17 | self.llm = llm 18 | self.prompt = prompt 19 | self.parser = parser 20 | self.stop = stop 21 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 22 | } 23 | func create_outputs(output: LLMResult?) -> Parsed { 24 | if let output = output { 25 | if let parser = self.parser { 26 | return parser.parse(text: output.llm_output!) 27 | } else { 28 | return Parsed.str(output.llm_output!) 29 | } 30 | } else { 31 | return Parsed.error 32 | } 33 | } 34 | public override func _call(args: String) async -> (LLMResult?, Parsed) { 35 | // ["\\nObservation: ", "\\n\\tObservation: "] 36 | 37 | let llmResult = await generate(input_list: [inputKey: args]) 38 | 39 | return (llmResult, create_outputs(output: llmResult)) 40 | } 41 | func prep_prompts(input_list: [String: String]) -> String { 42 | if let prompt = self.prompt { 43 | return prompt.format(args: input_list) 44 | } else { 45 | return input_list.first!.value 46 | } 47 | } 48 | func generate(input_list: [String: String]) async -> LLMResult? { 49 | let input_prompt = prep_prompts(input_list: input_list) 50 | do { 51 | //call llm 52 | let llmResult = await self.llm.generate(text: input_prompt, stops: stop) 53 | try await llmResult?.setOutput() 54 | return llmResult 55 | } catch { 56 | print("LLM chain generate \(error.localizedDescription)") 57 | return nil 58 | } 59 | } 60 | 61 | public func apply(input_list: [String: String]) async -> Parsed { 62 | let response = await generate(input_list: input_list) 63 | return create_outputs(output: response) 64 | } 65 | 66 | public func plan(input: String, agent_scratchpad: String) async -> Parsed { 67 | return await apply(input_list: ["question": input, "thought": agent_scratchpad]) 68 | } 69 | 70 | public func predict(args: [String: String] ) async -> String? { 71 | let inputAndContext = prep_inputs(inputs: args) 72 | let outputs = await self.generate(input_list: inputAndContext) 73 | if let o = outputs { 74 | let _ = prep_outputs(inputs: args, outputs: [self.outputKey: o.llm_output!]) 75 | return o.llm_output! 76 | } else { 77 | return nil 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/SequentialChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/6. 6 | // 7 | 8 | import Foundation 9 | public class SequentialChain: DefaultChain { 10 | let chains: [DefaultChain] 11 | public init(chains: [DefaultChain], memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 12 | self.chains = chains 13 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 14 | } 15 | public func predict(args: String) async throws -> [String: String] { 16 | var result: [String: String] = [:] 17 | var input: LLMResult? = LLMResult(llm_output: args) 18 | for chain in self.chains { 19 | // assert(chain.outputKey != nil, "chain.outputKey must not be nil") 20 | if input != nil { 21 | input = await chain._call(args: input!.llm_output!).0 22 | result.updateValue(input!.llm_output!, forKey: chain.outputKey) 23 | } else { 24 | print("A chain of SequentialChain fail") 25 | } 26 | } 27 | return result 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/SimpleSequentialChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/6. 6 | // 7 | 8 | import Foundation 9 | 10 | public class SimpleSequentialChain: DefaultChain { 11 | let chains: [DefaultChain] 12 | public init(chains: [DefaultChain], memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 13 | self.chains = chains 14 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 15 | } 16 | public override func _call(args: String) async -> (LLMResult?, Parsed) { 17 | var result: LLMResult? = LLMResult(llm_output: args) 18 | for chain in self.chains { 19 | if result != nil { 20 | result = await chain._call(args: result!.llm_output!).0 21 | } else { 22 | print("A chain of SimpleSequentialChain fail") 23 | } 24 | } 25 | return (result, Parsed.nothing) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/TransformChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/6. 6 | // 7 | 8 | import Foundation 9 | 10 | public class TransformChain: DefaultChain { 11 | public init(fn: @escaping (_: String) -> LLMResult?, memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 12 | self.fn = fn 13 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 14 | } 15 | let fn: (_ args: String) async -> LLMResult? 16 | public override func _call(args: String) async -> (LLMResult?, Parsed) { 17 | return (await fn(args), Parsed.nothing) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/qa/BaseCombineDocumentsChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/4. 6 | // 7 | 8 | import Foundation 9 | 10 | public class BaseCombineDocumentsChain: DefaultChain { 11 | public func predict(args: [String: String] ) async -> String? { 12 | let output = await self.combine_docs(docs: args["docs"]!, question: args["question"]!) 13 | return output 14 | } 15 | 16 | public func combine_docs(docs: String, question: String) async -> String? { 17 | "" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/qa/BaseConversationalRetrievalChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/3. 6 | // 7 | 8 | import Foundation 9 | public class BaseConversationalRetrievalChain: DefaultChain { 10 | 11 | static let _template = """ 12 | Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language. 13 | 14 | Chat History: 15 | {chat_history} 16 | Follow Up Input: {question} 17 | Standalone question: 18 | """ 19 | static let CONDENSE_QUESTION_PROMPT = PromptTemplate(input_variables: ["chat_history", "question"], partial_variable: [:], template: _template) 20 | 21 | let combineChain: BaseCombineDocumentsChain 22 | let condense_question_chain: LLMChain 23 | 24 | init(llm: LLM) { 25 | self.combineChain = StuffDocumentsChain(llm: llm) 26 | self.condense_question_chain = LLMChain(llm: llm, prompt: BaseConversationalRetrievalChain.CONDENSE_QUESTION_PROMPT) 27 | super.init(outputKey: "", inputKey: "") 28 | } 29 | public func get_docs(question: String) async -> String { 30 | "" 31 | } 32 | 33 | public func predict(args: [String: String] ) async -> (String, String?)? { 34 | let new_question = await self.condense_question_chain.predict(args: args) 35 | if let new_question = new_question { 36 | let output = await combineChain.predict(args: ["docs": await self.get_docs(question: new_question), "question": new_question]) 37 | if let text = output { 38 | let pattern = "Helpful\\s*Answer\\s*:[\\s]*(.*)[\\s]*Dependent\\s*text\\s*:[\\s]*(.*)" 39 | let regex = try! NSRegularExpression(pattern: pattern) 40 | 41 | if let match = regex.firstMatch(in: text, options: [], range: NSRange(location: 0, length: text.utf16.count)) { 42 | 43 | let firstCaptureGroup = Range(match.range(at: 1), in: text).map { String(text[$0]) } 44 | // print(firstCaptureGroup!) 45 | 46 | 47 | let secondCaptureGroup = Range(match.range(at: 2), in: text).map { String(text[$0]) } 48 | return (firstCaptureGroup!, secondCaptureGroup!) 49 | } else { 50 | return (text.replacingOccurrences(of: "Helpful Answer:", with: ""), nil) 51 | } 52 | } 53 | } 54 | return nil 55 | } 56 | 57 | 58 | public static func get_chat_history(chat_history: [(String, String)]) -> String { 59 | var buffer = "" 60 | for dialogue_turn in chat_history { 61 | let human = "Human: " + dialogue_turn.0 62 | let ai = "Assistant: " + dialogue_turn.1 63 | buffer += "\n\(human)\n\(ai)" 64 | } 65 | return buffer 66 | // chat_history.joined(separator: "\n") 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/qa/ConversationalRetrievalChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/3. 6 | // 7 | 8 | import Foundation 9 | public class ConversationalRetrievalChain: BaseConversationalRetrievalChain { 10 | let retriver: BaseRetriever 11 | public init(retriver: BaseRetriever, llm: LLM) { 12 | self.retriver = retriver 13 | super.init(llm: llm) 14 | } 15 | 16 | public override func get_docs(question: String) async -> String { 17 | let docs = await retriver.get_relevant_documents(query: question) 18 | let docsStr = docs.map{$0.page_content}.joined(separator: "\n\n").prefix(50000) 19 | print("🦙>>Collect docs: \(docsStr.prefix(10))... \(docsStr.count) count") 20 | return "\(docsStr)" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/qa/StuffDocumentsChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/4. 6 | // 7 | 8 | import Foundation 9 | 10 | public class StuffDocumentsChain: BaseCombineDocumentsChain { 11 | 12 | static let prompt_template = """ 13 | Use the following pieces of context to answer the question at the end, use context language. If you don't know the answer, just say that you don't know, don't try to make up an answer, And give the original text on which the answer is based. 14 | Note the answer must be in the same language as the question 15 | Note that the original text must be in its original language 16 | Note that the original text must be in the original document. 17 | Note that the original text retains its formatting, including line breaks, etc., even in the middle of sentences. 18 | 19 | Use the following format: 20 | Context: pieces of context to answer 21 | Question: the input question you must answer 22 | Helpful Answer: the final answer to the original input question 23 | Dependent text: the original text on which the answer depends 24 | 25 | Begin! 26 | 27 | Context: {context} 28 | Question: {question} 29 | """ 30 | static let PROMPT = PromptTemplate(input_variables: ["context", "question"], partial_variable: [:], template: prompt_template) 31 | 32 | let llm_chain: LLMChain 33 | init(llm: LLM) { 34 | self.llm_chain = LLMChain(llm: llm, prompt: StuffDocumentsChain.PROMPT) 35 | super.init(outputKey: "input", inputKey: "output") 36 | } 37 | public override func combine_docs(docs: String, question: String) async -> String? { 38 | return await llm_chain.predict(args: ["question": question, "context": docs]) 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/router/LLMRouterChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/7. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct Route { 11 | let destination: String 12 | let next_inputs: String 13 | } 14 | 15 | public class LLMRouterChain: DefaultChain { 16 | let llmChain: LLMChain 17 | 18 | public init(llmChain: LLMChain, memory: BaseMemory? = nil, outputKey: String = "output",inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 19 | self.llmChain = llmChain 20 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 21 | } 22 | 23 | public func route(args: String) async -> Route { 24 | let parsed = await llmChain.run(args: args) 25 | // check and route 26 | switch parsed { 27 | case .dict(let d): 28 | return Route(destination: d["destination"]!, next_inputs: d["next_inputs"]!) 29 | default: 30 | return Route(destination: "", next_inputs: "") 31 | } 32 | 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /Sources/LangChain/chains/router/MultiRouteChain.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/7. 6 | // 7 | 8 | import Foundation 9 | public class MultiRouteChain: DefaultChain { 10 | let router_chain: LLMRouterChain 11 | 12 | let destination_chains: [String: DefaultChain] 13 | 14 | let default_chain: DefaultChain 15 | 16 | public init(router_chain: LLMRouterChain, destination_chains: [String : DefaultChain], default_chain: DefaultChain, memory: BaseMemory? = nil, outputKey: String = "output", inputKey: String = "input", callbacks: [BaseCallbackHandler] = []) { 17 | self.router_chain = router_chain 18 | self.destination_chains = destination_chains 19 | self.default_chain = default_chain 20 | super.init(memory: memory, outputKey: outputKey, inputKey: inputKey, callbacks: callbacks) 21 | } 22 | 23 | // call route 24 | public override func _call(args: String) async -> (LLMResult?, Parsed) { 25 | // print("call route.") 26 | // _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() 27 | // callbacks = _run_manager.get_child() 28 | // route = self.router_chain.route(inputs, callbacks=callbacks) 29 | // 30 | // _run_manager.on_text( 31 | // str(route.destination) + ": " + str(route.next_inputs), verbose=self.verbose 32 | // ) 33 | // if not route.destination: 34 | // return self.default_chain(route.next_inputs, callbacks=callbacks) 35 | // elif route.destination in self.destination_chains: 36 | // return self.destination_chains[route.destination]( 37 | // route.next_inputs, callbacks=callbacks 38 | // ) 39 | // elif self.silent_errors: 40 | // return self.default_chain(route.next_inputs, callbacks=callbacks) 41 | // else: 42 | // raise ValueError( 43 | // f"Received invalid destination chain name '{route.destination}'" 44 | // ) 45 | let route = await self.router_chain.route(args: args) 46 | if destination_chains.keys.contains(route.destination) { 47 | return await destination_chains[route.destination]!._call(args: route.next_inputs) 48 | } else { 49 | return await default_chain._call(args: route.next_inputs) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/AudioLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/3. 6 | // 7 | 8 | #if os(macOS) || os(iOS) || os(visionOS) 9 | import Foundation 10 | import NIOPosix 11 | import AsyncHTTPClient 12 | import OpenAIKit 13 | import AVFoundation 14 | 15 | public class AudioLoader: BaseLoader { 16 | static let SEG_SIZE = 60 17 | let audio: URL 18 | let fileName: String 19 | 20 | public init(audio: URL, fileName: String, callbacks: [BaseCallbackHandler] = []) { 21 | self.audio = audio 22 | self.fileName = fileName 23 | super.init(callbacks: callbacks) 24 | } 25 | 26 | public override func _load() async throws -> [Document] { 27 | var docs: [Document] = [] 28 | 29 | let asset: AVAsset = AVAsset(url: audio) 30 | // Get the length of the audio file asset 31 | let duration = CMTimeGetSeconds(asset.duration) 32 | // Determine how many segments we want 33 | let numOfSegments = Int(ceil(duration / 60) - 1) 34 | // For each segment, we need to split it up 35 | 36 | let eventLoopGroup = ThreadManager.thread 37 | 38 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 39 | 40 | let env = LC.loadEnv() 41 | 42 | if let apiKey = env["OPENAI_API_KEY"] { 43 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 44 | 45 | let configuration = Configuration(apiKey: apiKey, api: API(scheme: .https, host: baseUrl)) 46 | 47 | let openAIClient = OpenAIKit.Client(httpClient: httpClient, configuration: configuration) 48 | defer { 49 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 50 | try? httpClient.syncShutdown() 51 | } 52 | // do { 53 | // let data = try Data(contentsOf: audio) 54 | // let completion = try! await openAIClient.audio.transcribe(file: data, fileName: "\(fileName)", mimeType: .m4a) 55 | // let doc = Document(page_content: completion.text, metadata: ["fileName": "\(fileName)", "mimeType": "m4a"]) 56 | // docs.append(doc) 57 | // } catch { 58 | // print("Unable to load data: \(error)") 59 | // } 60 | for index in 0...numOfSegments { 61 | if let url = try! await splitAudio(asset: asset, segment: index) { 62 | do { 63 | let data = try Data(contentsOf: url) 64 | let completion = try! await openAIClient.audio.transcribe(file: data, fileName: "\(fileName)_\(index).m4a", mimeType: .m4a) 65 | let doc = Document(page_content: completion.text, metadata: ["fileName": "\(fileName)_\(index)", "mimeType": "m4a"]) 66 | docs.append(doc) 67 | } catch { 68 | print("Unable to load data: \(error)") 69 | throw LangChainError.LoaderError("Unable to load data: \(error)") 70 | } 71 | 72 | } else { 73 | throw LangChainError.LoaderError("Not split audio") 74 | } 75 | } 76 | return docs 77 | } else { 78 | print("Please set openai api key.") 79 | return [] 80 | } 81 | 82 | } 83 | 84 | func splitAudio(asset: AVAsset, segment: Int) async throws -> URL? { 85 | // Create a new AVAssetExportSession 86 | let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A)! 87 | // Set the output file type to m4a 88 | exporter.outputFileType = AVFileType.m4a 89 | // Create our time range for exporting 90 | let startTime = CMTimeMake(value: Int64(AudioLoader.SEG_SIZE * segment), timescale: 1) 91 | let endTime = CMTimeMake(value: Int64(AudioLoader.SEG_SIZE * (segment + 1)), timescale: 1) 92 | // Set the time range for our export session 93 | exporter.timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime) 94 | // Set the output file path 95 | exporter.outputURL = FileManager.default.temporaryDirectory.appendingPathComponent("\(segment)-\(UUID().uuidString).m4a", isDirectory: false) 96 | // Do the actual exporting 97 | return try await withCheckedThrowingContinuation { continuation in 98 | exporter.exportAsynchronously(completionHandler: { 99 | switch exporter.status { 100 | case AVAssetExportSession.Status.failed: 101 | print("Export failed. \(exporter.error!.localizedDescription)") 102 | continuation.resume(returning: nil) 103 | default: 104 | print("Export complete.") 105 | let audio = exporter.outputURL! 106 | continuation.resume(returning: audio) 107 | 108 | } 109 | }) 110 | } 111 | 112 | } 113 | 114 | override func type() -> String { 115 | "Audio" 116 | } 117 | } 118 | #endif 119 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/BaseLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/24. 6 | // 7 | 8 | import Foundation 9 | public struct Document: Equatable { 10 | public init(page_content: String, metadata: [String : String]) { 11 | self.page_content = page_content 12 | self.metadata = metadata 13 | } 14 | public let page_content: String 15 | public var metadata: [String: String] 16 | public static func == (lhs: Document, rhs: Document) -> Bool { 17 | return lhs.page_content == rhs.page_content 18 | } 19 | } 20 | public class BaseLoader { 21 | 22 | static let LOADER_TYPE_KEY = "loader_type" 23 | static let LOADER_REQ_ID = "loader_req_id" 24 | static let LOADER_COST_KEY = "cost" 25 | 26 | let callbacks: [BaseCallbackHandler] 27 | init(callbacks: [BaseCallbackHandler] = []) { 28 | var cbs: [BaseCallbackHandler] = callbacks 29 | if LC.addTraceCallbak() && !cbs.contains(where: { item in item is TraceCallbackHandler}) { 30 | cbs.append(TraceCallbackHandler()) 31 | } 32 | // assert(cbs.count == 1) 33 | self.callbacks = cbs 34 | } 35 | func callStart(type: String, reqId: String) { 36 | do { 37 | for callback in callbacks { 38 | try callback.on_loader_start(type: type, metadata: [BaseLoader.LOADER_REQ_ID: reqId, BaseLoader.LOADER_TYPE_KEY: type]) 39 | } 40 | } catch { 41 | 42 | } 43 | } 44 | 45 | func callEnd(type: String, reqId: String, cost: Double) { 46 | do { 47 | for callback in callbacks { 48 | try callback.on_loader_end(type: type, metadata: [BaseLoader.LOADER_REQ_ID: reqId, BaseLoader.LOADER_COST_KEY: "\(cost)", BaseLoader.LOADER_TYPE_KEY: type]) 49 | } 50 | } catch { 51 | 52 | } 53 | } 54 | 55 | func callError(type: String, reqId: String, cause: String) { 56 | do { 57 | for callback in callbacks { 58 | try callback.on_loader_error(type: type, cause: cause, metadata: [BaseLoader.LOADER_REQ_ID: reqId, BaseLoader.LOADER_TYPE_KEY: type]) 59 | } 60 | } catch { 61 | 62 | } 63 | } 64 | 65 | public func load() async -> [Document] { 66 | let type = type() 67 | let reqId = UUID().uuidString 68 | var cost = 0.0 69 | let now = Date.now.timeIntervalSince1970 70 | do { 71 | callStart(type: type, reqId: reqId) 72 | let docs = try await _load() 73 | cost = Date.now.timeIntervalSince1970 - now 74 | callEnd(type: type, reqId: reqId, cost: cost) 75 | return docs 76 | } catch LangChainError.LoaderError(let cause) { 77 | print("Catch langchain loader error \(cause)") 78 | callError(type: type, reqId: reqId, cause: cause) 79 | return [] 80 | } catch { 81 | print("Catch other error \(error)") 82 | return [] 83 | } 84 | } 85 | 86 | func _load() async throws -> [Document] { 87 | [] 88 | } 89 | 90 | func type() -> String { 91 | "Base" 92 | } 93 | } 94 | //class BaseLoader(ABC): 95 | // """Interface for loading documents. 96 | // 97 | // Implementations should implement the lazy-loading method using generators 98 | // to avoid loading all documents into memory at once. 99 | // 100 | // The `load` method will remain as is for backwards compatibility, but its 101 | // implementation should be just `list(self.lazy_load())`. 102 | // """ 103 | // 104 | // # Sub-classes should implement this method 105 | // # as return list(self.lazy_load()). 106 | // # This method returns a List which is materialized in memory. 107 | // @abstractmethod 108 | // def load(self) -> List[Document]: 109 | // """Load data into document objects.""" 110 | // 111 | // def load_and_split( 112 | // self, text_splitter: Optional[TextSplitter] = None 113 | // ) -> List[Document]: 114 | // """Load documents and split into chunks.""" 115 | // if text_splitter is None: 116 | // _text_splitter: TextSplitter = RecursiveCharacterTextSplitter() 117 | // else: 118 | // _text_splitter = text_splitter 119 | // docs = self.load() 120 | // return _text_splitter.split_documents(docs) 121 | // 122 | // # Attention: This method will be upgraded into an abstractmethod once it's 123 | // # implemented in all the existing subclasses. 124 | // def lazy_load( 125 | // self, 126 | // ) -> Iterator[Document]: 127 | // """A lazy loader for document content.""" 128 | // raise NotImplementedError( 129 | // f"{self.__class__.__name__} does not implement lazy_load()" 130 | // ) 131 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/BilibiliLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/29. 6 | // 7 | 8 | import Foundation 9 | 10 | public class BilibiliLoader: BaseLoader { 11 | let videoId: String 12 | 13 | public init(videoId: String, callbacks: [BaseCallbackHandler] = []) { 14 | self.videoId = videoId 15 | super.init(callbacks: callbacks) 16 | } 17 | 18 | public override func _load() async throws -> [Document] { 19 | let env = LC.loadEnv() 20 | 21 | if let session = env["BILIBILI_SESSION"], let jct = env["BILIBILI_JCT"] { 22 | let client = BilibiliClient(credential: BilibiliCredential(sessin: session, jct: jct)) 23 | let info = await client.fetchVideoInfo(bvid: videoId) 24 | if info == nil { 25 | throw LangChainError.LoaderError("Subtitle not exist") 26 | } 27 | return [Document(page_content: info!.subtitle, metadata: [ 28 | "title": info!.title, 29 | "desc": info!.desc, 30 | "thumbnail": info!.thumbnail.replacingOccurrences(of: "http", with: "https") 31 | ])] 32 | } else { 33 | print("BILIBILI_SESSION or BILIBILI_JCT not set.") 34 | return [] 35 | } 36 | } 37 | 38 | override func type() -> String { 39 | "Bilibili" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/HtmlLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/19. 6 | // 7 | import SwiftSoup 8 | import Foundation 9 | 10 | public class HtmlLoader: BaseLoader { 11 | let html: String 12 | let url: String 13 | public init(html: String, url: String, callbacks: [BaseCallbackHandler] = []) { 14 | self.html = html 15 | self.url = url 16 | super.init(callbacks: callbacks) 17 | } 18 | 19 | public override func _load() async throws -> [Document] { 20 | do { 21 | let doc: SwiftSoup.Document = try SwiftSoup.parse(html) 22 | let text = try doc.text() 23 | let title = findTitle(doc: doc) 24 | let thumbnail = findImage(text: html, doc: doc) 25 | let metadata: [String: String] = ["url": url, "title": title, "thumbnail": thumbnail] 26 | return [Document(page_content: text, metadata: metadata)] 27 | } catch Exception.Error( _, let message) { 28 | print("Get body error " + message) 29 | throw LangChainError.LoaderError("Parse html fail with \(message)") 30 | } catch { 31 | print("Get body error \(error)") 32 | throw LangChainError.LoaderError("Parse html fail with \(error)") 33 | } 34 | } 35 | func findTitle(doc: SwiftSoup.Document) -> String { 36 | var title = "" 37 | do { 38 | //try get html -> header -> String { 53 | // First, try get html -> header -> String { 79 | "Html" 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/ImageOCRLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/5. 6 | // 7 | 8 | import Foundation 9 | //import UIKit 10 | import AsyncHTTPClient 11 | import NIOPosix 12 | /* 13 | Enroll baidu cloud, and access https://console.bce.baidu.com/iam/#/iam/accesslist Get 14 | BAIDU_OCR_AK=xxx 15 | BAIDU_OCR_SK=xxx 16 | */ 17 | public class ImageOCRLoader: BaseLoader { 18 | let image: Data 19 | 20 | public init(image: Data, callbacks: [BaseCallbackHandler] = []) { 21 | self.image = image 22 | super.init(callbacks: callbacks) 23 | } 24 | 25 | public override func _load() async throws -> [Document] { 26 | let eventLoopGroup = ThreadManager.thread 27 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 28 | defer { 29 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 30 | try? httpClient.syncShutdown() 31 | } 32 | var text = "" 33 | let env = LC.loadEnv() 34 | if let ak = env["BAIDU_OCR_AK"], 35 | let sk = env["BAIDU_OCR_SK"]{ 36 | let ocr = await BaiduClient.ocrImage(ak: ak, sk: sk, httpClient: httpClient, image: image) 37 | if ocr!["error_msg"].string != nil { 38 | throw LangChainError.LoaderError(ocr!["error_msg"].stringValue) 39 | } else { 40 | let words = ocr!["words_result"].arrayValue.map{$0["words"].stringValue} 41 | text = words.joined(separator: " ") 42 | return [Document(page_content: text, metadata: [:])] 43 | } 44 | } else { 45 | return [] 46 | } 47 | } 48 | 49 | override func type() -> String { 50 | "BaiduOCR" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/NotionLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2/7/24. 6 | // 7 | 8 | import Foundation 9 | import SwiftyNotion 10 | 11 | public class NotionLoader: BaseLoader { 12 | public override init(callbacks: [BaseCallbackHandler] = []) { 13 | super.init(callbacks: callbacks) 14 | } 15 | fileprivate func appendText(prefix: String, _ content: inout String, _ t: NotionRichText) { 16 | content.append(prefix + t.plainText) 17 | content.append("\n") 18 | } 19 | 20 | fileprivate func forText(prefix: String, _ text: [NotionRichText], _ content: inout String) { 21 | for t in text { 22 | appendText(prefix: prefix, &content, t) 23 | } 24 | } 25 | 26 | fileprivate func buildBlocks(_ notion: NotionAPIGateway, withId: String, title: String) async throws -> [Document]{ 27 | let blocks = try await notion.retrieveBlockChildren(withId: withId) 28 | var foundDoc = false 29 | var content = "" 30 | var docs = [Document]() 31 | for block in blocks { 32 | if block.type == .childPage { 33 | let blockId = block.id.replacingOccurrences(of: "-", with: "") 34 | let title = block.childPage!.title 35 | let children = try await buildBlocks(notion, withId: blockId, title: title) 36 | docs.append(contentsOf: children) 37 | } else { 38 | //child 39 | foundDoc = true 40 | if let c = block.paragraph { 41 | forText(prefix: "", c.text, &content) 42 | } 43 | if let c = block.code { 44 | forText(prefix: "\(c.language) Code: " ,c.text, &content) 45 | } 46 | if let c = block.heading1 { 47 | forText(prefix: "# ",c.text, &content) 48 | } 49 | if let c = block.heading2 { 50 | forText(prefix: "## ",c.text, &content) 51 | } 52 | if let c = block.heading3 { 53 | forText(prefix: "### ",c.text, &content) 54 | } 55 | if let c = block.toggle { 56 | forText(prefix: "> ",c.text, &content) 57 | } 58 | if let c = block.toDo { 59 | forText(prefix: "[ ] ",c.text, &content) 60 | } 61 | if let c = block.numberedListItem { 62 | forText(prefix: "- ",c.text, &content) 63 | } 64 | if let c = block.bulletedListItem { 65 | forText(prefix: "- ",c.text, &content) 66 | } 67 | } 68 | } 69 | if foundDoc { 70 | docs.append(Document(page_content: content, metadata: ["title": title])) 71 | } 72 | return docs 73 | } 74 | 75 | public override func _load() async throws -> [Document] { 76 | let env = LC.loadEnv() 77 | 78 | if let apiKey = env["NOTION_API_KEY"], let rootId = env["NOTION_ROOT_NODE_ID"] { 79 | let notion = NotionAPIGateway(secretKey: apiKey) 80 | let pageId = rootId 81 | let title = try await notion.retrievePage(withId: pageId) 82 | let docs = try await buildBlocks(notion, withId: pageId, title: title.properties["title"]?.title?.first?.plainText ?? "") 83 | // 84 | // print("🥰\(docs)") 85 | // print("🍰\(docs.count)") 86 | return docs 87 | } else { 88 | print("NOTION_API_KEY or NOTION_ROOT_NODE_ID not set.") 89 | return [] 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/PDFLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/28. 6 | // 7 | 8 | import Foundation 9 | 10 | #if os(macOS) || os(iOS) || os(visionOS) 11 | import PDFKit 12 | 13 | 14 | public class PDFLoader: BaseLoader { 15 | let file_path: URL 16 | 17 | public init(file_path: URL, callbacks: [BaseCallbackHandler] = []) { 18 | self.file_path = file_path 19 | super.init(callbacks: callbacks) 20 | } 21 | 22 | public override func _load() async throws -> [Document] { 23 | // let nameAndExt = self.file_path.split(separator: ".") 24 | // let name = "\(nameAndExt[0])" 25 | // let ext = "\(nameAndExt[1])" 26 | // if let url = Bundle.main.url(forResource: name, withExtension: ext) { 27 | if let pdfDocument = PDFDocument(url: file_path) { 28 | var extractedText = "" 29 | let metadata = ["source": file_path.absoluteString] 30 | for pageIndex in 0 ..< pdfDocument.pageCount { 31 | if let pdfPage = pdfDocument.page(at: pageIndex) { 32 | if let pageContent = pdfPage.attributedString { 33 | let pageString = pageContent.string 34 | extractedText += "\n\(pageString)" 35 | // print("💼\(pageContent)") 36 | // print("🖥️\(pageString)") 37 | } 38 | } 39 | } 40 | 41 | // print(extractedText) 42 | return [Document(page_content: extractedText, metadata: metadata)] 43 | } else{ 44 | throw LangChainError.LoaderError("Parse PDF file fail.") 45 | } 46 | // } else { 47 | // throw LangChainError.LoaderError("PDF not exist") 48 | // } 49 | } 50 | 51 | override func type() -> String { 52 | "PDF" 53 | } 54 | } 55 | #endif 56 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/RSSLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2/10/24. 6 | // 7 | 8 | import Foundation 9 | import FeedKit 10 | 11 | public class RSSLoader: BaseLoader { 12 | let url: String 13 | 14 | public init(url: String, callbacks: [BaseCallbackHandler] = []) { 15 | self.url = url 16 | super.init(callbacks: callbacks) 17 | } 18 | public override func _load() async throws -> [Document] { 19 | let feedURL = URL(string: url)! 20 | let parser = FeedParser(URL: feedURL) 21 | let result = parser.parse() 22 | switch result { 23 | case .success(let feed): 24 | 25 | // Grab the parsed feed directly as an optional rss, atom or json feed object 26 | switch feed { 27 | case let .atom(feed): 28 | var content = [Document]() 29 | for f in feed.entries ?? [] { 30 | content.append(Document(page_content: f.title ?? "", metadata: [:])) 31 | } 32 | return content 33 | case let .rss(feed): 34 | var content = [Document]() 35 | for f in feed.items ?? [] { 36 | content.append(Document(page_content: f.title ?? "", metadata: [:])) 37 | } 38 | return content 39 | case let .json(feed): 40 | var content = [Document]() 41 | for f in feed.items ?? [] { 42 | content.append(Document(page_content: f.title ?? "", metadata: [:])) 43 | } 44 | return content 45 | } 46 | 47 | 48 | case .failure(let error): 49 | print(error) 50 | return [] 51 | } 52 | 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/TextLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/24. 6 | // 7 | 8 | import Foundation 9 | 10 | public class TextLoader: BaseLoader { 11 | let file_path: String 12 | 13 | public init(file_path: String, callbacks: [BaseCallbackHandler] = []) { 14 | self.file_path = file_path 15 | super.init(callbacks: callbacks) 16 | } 17 | public override func _load() async throws -> [Document] { 18 | let nameAndExt = self.file_path.split(separator: ".") 19 | let name = "\(nameAndExt[0])" 20 | let ext = "\(nameAndExt[1])" 21 | var text = "" 22 | if let res = Bundle.main.path(forResource: name, ofType: ext){ 23 | text = try String(contentsOfFile: res) 24 | let metadata = ["source": self.file_path] 25 | return [Document(page_content: text, metadata: metadata)] 26 | } else { 27 | throw LangChainError.LoaderError("Text fail not exist") 28 | } 29 | } 30 | 31 | override func type() -> String { 32 | "Text" 33 | } 34 | } 35 | //class TextLoader(BaseLoader): 36 | // """Load text files. 37 | // 38 | // 39 | // Args: 40 | // file_path: Path to the file to load. 41 | // 42 | // encoding: File encoding to use. If `None`, the file will be loaded 43 | // with the default system encoding. 44 | // 45 | // autodetect_encoding: Whether to try to autodetect the file encoding 46 | // if the specified encoding fails. 47 | // """ 48 | // 49 | // def __init__( 50 | // self, 51 | // file_path: str, 52 | // encoding: Optional[str] = None, 53 | // autodetect_encoding: bool = False, 54 | // ): 55 | // """Initialize with file path.""" 56 | // self.file_path = file_path 57 | // self.encoding = encoding 58 | // self.autodetect_encoding = autodetect_encoding 59 | // 60 | // def load(self) -> List[Document]: 61 | // """Load from file path.""" 62 | // text = "" 63 | // try: 64 | // with open(self.file_path, encoding=self.encoding) as f: 65 | // text = f.read() 66 | // except UnicodeDecodeError as e: 67 | // if self.autodetect_encoding: 68 | // detected_encodings = detect_file_encodings(self.file_path) 69 | // for encoding in detected_encodings: 70 | // logger.debug("Trying encoding: ", encoding.encoding) 71 | // try: 72 | // with open(self.file_path, encoding=encoding.encoding) as f: 73 | // text = f.read() 74 | // break 75 | // except UnicodeDecodeError: 76 | // continue 77 | // else: 78 | // raise RuntimeError(f"Error loading {self.file_path}") from e 79 | // except Exception as e: 80 | // raise RuntimeError(f"Error loading {self.file_path}") from e 81 | // 82 | // metadata = {"source": self.file_path} 83 | // return [Document(page_content=text, metadata=metadata)] 84 | -------------------------------------------------------------------------------- /Sources/LangChain/document_loaders/YoutubeLoader.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/29. 6 | // 7 | 8 | import Foundation 9 | import AsyncHTTPClient 10 | import Foundation 11 | import NIOPosix 12 | 13 | 14 | public class YoutubeLoader: BaseLoader { 15 | let video_id: String 16 | let language: String 17 | public init(video_id: String, language: String, callbacks: [BaseCallbackHandler] = []) { 18 | self.video_id = video_id 19 | self.language = language 20 | super.init(callbacks: callbacks) 21 | } 22 | public override func _load() async throws -> [Document] { 23 | 24 | let eventLoopGroup = ThreadManager.thread 25 | 26 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 27 | defer { 28 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 29 | try? httpClient.syncShutdown() 30 | } 31 | 32 | let info = await YoutubeHackClient.info(video_id: video_id, httpClient: httpClient) 33 | let metadata = ["source": self.video_id, 34 | "title": info!.title, 35 | "desc": info!.description, 36 | "thumbnail": info!.thumbnail] 37 | var transcript_list = await YoutubeHackClient.list_transcripts(video_id: self.video_id, httpClient: httpClient) 38 | if transcript_list == nil { 39 | throw LangChainError.LoaderError("Subtitle not exist") 40 | } 41 | if transcript_list!.generated_transcripts.isEmpty && transcript_list!.manually_created_transcripts.isEmpty { 42 | // return [Document(page_content: "Content is empty.", metadata: metadata)] 43 | throw LangChainError.LoaderError("Subtitle not exist") 44 | } 45 | var transcript = transcript_list!.find_transcript(language_codes: [self.language]) 46 | if transcript == nil { 47 | let en_transcript = transcript_list!.manually_created_transcripts.first!.value 48 | transcript = en_transcript.translate(language_code: self.language) 49 | } 50 | let transcript_pieces = await transcript!.fetch() 51 | 52 | let text = transcript_pieces!.map {$0["text"]!}.joined(separator: " ") 53 | 54 | return [Document(page_content: text, metadata: metadata)] 55 | 56 | } 57 | 58 | override func type() -> String { 59 | "Youtube" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /Sources/LangChain/embeddings/Distilbert.swift: -------------------------------------------------------------------------------- 1 | //// 2 | //// File.swift 3 | //// 4 | //// 5 | //// Created by 顾艳华 on 2/11/24. 6 | //// 7 | // 8 | //import Foundation 9 | //import SimilaritySearchKitDistilbert 10 | // 11 | //@available(macOS 13.0, *) 12 | //public struct Distilbert: Embeddings { 13 | // let n = DistilbertEmbeddings() 14 | // public init() { 15 | // 16 | // } 17 | // 18 | // 19 | // public func embedQuery(text: String) async -> [Float] { 20 | // await n.encode(sentence: text)! 21 | // } 22 | //} 23 | -------------------------------------------------------------------------------- /Sources/LangChain/embeddings/Embeddings.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/12. 6 | // 7 | 8 | import Foundation 9 | public protocol Embeddings { 10 | // Interface for embedding models. 11 | 12 | // func embedDocuments(texts: [String]) -> [[Float]] 13 | 14 | func embedQuery(text: String) async -> [Float] 15 | } 16 | -------------------------------------------------------------------------------- /Sources/LangChain/embeddings/OllamaEmbeddings.swift: -------------------------------------------------------------------------------- 1 | // 2 | // OllamaEmbeddings.swift 3 | // 4 | // Created by Rene Hexel on 20/4/2024. 5 | // 6 | import Foundation 7 | import AsyncHTTPClient 8 | 9 | extension Ollama: Embeddings { 10 | /// Ollama embedding request. 11 | struct EmbeddingRequest: Codable { 12 | let model: String 13 | let prompt: String 14 | } 15 | /// Ollama embedding structure. 16 | struct Embedding: Codable { 17 | let embedding: [Float] 18 | } 19 | /// Create embeddings for a given text. 20 | /// 21 | /// This function sends a text to the Ollama API and returns the resulting embeddings. 22 | /// 23 | /// - Parameter text: The text to create embeddings for. 24 | /// - Returns: An array of embeddings for the given text. 25 | public func embedQuery(text: String) async -> [Float] { 26 | do { 27 | return try await getEmbeddings(for: text) 28 | } catch { 29 | return [] 30 | } 31 | } 32 | /// Get the embeddings vector for a given text. 33 | /// 34 | /// This function sends a text to the Ollama API and returns the resulting embeddings. 35 | /// 36 | /// - Parameter text: The text to create embeddings vector for. 37 | /// - Returns: An array of embeddings for the given text. 38 | public func getEmbeddings(for text: String) async throws -> [Float] { 39 | let embeddingRequest = EmbeddingRequest(model: model, prompt: text) 40 | guard let data = try await sendJSON(request: embeddingRequest, endpoint: "embeddings") else { 41 | return [] 42 | } 43 | let apiResponse = try JSONDecoder().decode(Embedding.self, from: data) 44 | return apiResponse.embedding 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /Sources/LangChain/embeddings/OpenAIEmbeddings.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/12. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import AsyncHTTPClient 11 | import OpenAIKit 12 | 13 | public struct OpenAIEmbeddings: Embeddings { 14 | let session: URLSession 15 | public init(session: URLSession = URLSession(configuration: .default)) { 16 | self.session = session 17 | } 18 | 19 | // public func embedDocuments(texts: [String]) -> [[Float]] { 20 | // [] 21 | // } 22 | 23 | public func embedQuery(text: String) async -> [Float] { 24 | 25 | let env = LC.loadEnv() 26 | 27 | if let apiKey = env["OPENAI_API_KEY"] { 28 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 29 | 30 | let configuration = Configuration(apiKey: apiKey, api: API(scheme: .https, host: baseUrl)) 31 | 32 | let openAIClient = OpenAIKit.Client(session: session, configuration: configuration) 33 | 34 | do { 35 | let embedding = try await openAIClient.embeddings.create(input: text) 36 | 37 | // print(embedding.data[0].embedding) 38 | return embedding.data[0].embedding 39 | } catch { 40 | return [] 41 | } 42 | } else { 43 | print("Please set openai api key.") 44 | return [] 45 | } 46 | 47 | 48 | } 49 | 50 | 51 | } 52 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/Baidu.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/4. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import AsyncHTTPClient 11 | // Create ai app on https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application 12 | // And get app ak sk 13 | public class Baidu: LLM { 14 | let temperature: Double 15 | 16 | public init(temperature: Double = 0.8, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 17 | self.temperature = temperature 18 | super.init(callbacks: callbacks, cache: cache) 19 | } 20 | 21 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 22 | let eventLoopGroup = ThreadManager.thread 23 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 24 | defer { 25 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 26 | try? httpClient.syncShutdown() 27 | } 28 | let env = LC.loadEnv() 29 | if let ak = env["BAIDU_LLM_AK"], 30 | let sk = env["BAIDU_LLM_SK"]{ 31 | return LLMResult(llm_output: try await BaiduClient.llmSync(ak: ak, sk: sk, httpClient: httpClient, text: text, temperature: temperature)) 32 | } else { 33 | print("Please set baidu llm ak sk.") 34 | return LLMResult(llm_output: "Please set baidu llm ak sk.") 35 | } 36 | } 37 | 38 | } 39 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/ChatGLM.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/30. 6 | // 7 | 8 | import Foundation 9 | 10 | public class ChatGLM: LLM { 11 | public override func _send(text: String, stops: [String]) async throws -> LLMResult { 12 | return LLMResult(llm_output: try await api.call(text: text)) 13 | } 14 | 15 | let api: ChatGLMAPIWrapper 16 | 17 | public init(model: ChatGLMModel = ChatGLMModel.chatglm_std, temperature: Double = 0.0) { 18 | api = ChatGLMAPIWrapper(model: model, temperature: temperature) 19 | } 20 | 21 | } 22 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/ChatOllama.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ChatOllama.swift 3 | // 4 | // Created by Rene Hexel on 21/4/2024. 5 | // 6 | 7 | import Foundation 8 | import OpenAIKit 9 | 10 | /// Ollama class for chat functionality. 11 | /// 12 | /// This class interfaces with the Ollama chat API. 13 | public class ChatOllama: Ollama { 14 | /// The chat history. 15 | /// 16 | /// This array contains the chat history 17 | /// of the conversation so far. 18 | public var history = [ChatGLMMessage]() 19 | 20 | /// Create a new Ollama chat instance. 21 | /// 22 | /// This initialiser creates a new Ollama chat instance with the given parameters. 23 | /// 24 | /// - Parameters: 25 | /// - baseURL: The base URL for the Ollama API. 26 | /// - model: The model to use for the chat instance. 27 | /// - options: Additional options for the chat instance. 28 | /// - timeout: The request timeout in seconds. 29 | /// - callbacks: The callback handlers to use. 30 | /// - cache: The cache to use. 31 | public override init(baseURL: String? = nil, model: String? = nil, options: [String : String]? = nil, timeout: Int = 3600, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 32 | super.init(baseURL: baseURL, model: model, options: options, timeout: timeout, callbacks: callbacks, cache: cache) 33 | } 34 | 35 | /// Send a text to the Ollama API. 36 | /// 37 | /// This function implements the main interaction with the Ollama API 38 | /// through its `chat` API. 39 | /// 40 | /// - Parameters: 41 | /// - text: The text to send to the Ollama API. 42 | /// - stops: An array of strings that, if present in the response, will stop the generation. 43 | /// - Returns: 44 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 45 | let message = ChatGLMMessage(role: "user", content: text) 46 | history.append(message) 47 | let chatRequest = ChatRequest(model: model, options: modelOptions, format: "json", stream: false, messages: history) 48 | guard let data = try await sendJSON(request: chatRequest, endpoint: "chat") else { 49 | return LLMResult() 50 | } 51 | let llmResponse = try JSONDecoder().decode(ChatResponse.self, from: data) 52 | history.append(llmResponse.message) 53 | return LLMResult(llm_output: llmResponse.message.content) 54 | } 55 | } 56 | 57 | public extension ChatOllama { 58 | /// Generate the next message in a chat with a provided model. 59 | /// 60 | /// This is a streaming endpoint, so there can be a series of responses. 61 | /// Streaming can be disabled using "stream": false. 62 | struct ChatRequest: Codable, Sendable { 63 | public let model: String 64 | public let options: [String: String]? 65 | public let format: String 66 | public let stream: Bool 67 | public let messages: [ChatGLMMessage] 68 | } 69 | /// Ollama response to a `ChatRequest`. 70 | /// 71 | /// This response object includes the next message in a chat conversation. 72 | /// The final response object will include statistics and additional data from the request. 73 | struct ChatResponse: Codable, Sendable { 74 | public let message: ChatGLMMessage 75 | public let model: String 76 | public let done: Bool 77 | public let totalDuration: Int? 78 | public let loadDuration: Int? 79 | public let promptEvalDuration: Int? 80 | public let evalDuration: Int? 81 | public let promptEvalCount: Int? 82 | public let evalCount: Int? 83 | 84 | /// Return the message content. 85 | public var content: String { message.content } 86 | 87 | /// JSON coding keys for the `ChatResponse` struct. 88 | enum CodingKeys: String, CodingKey { 89 | case message 90 | case model 91 | case done 92 | case totalDuration = "total_duration" 93 | case loadDuration = "load_duration" 94 | case promptEvalDuration = "prompt_eval_duration" 95 | case evalDuration = "eval_duration" 96 | case promptEvalCount = "prompt_eval_count" 97 | case evalCount = "eval_count" 98 | } 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/ChatOpenAI.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/31. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import AsyncHTTPClient 11 | import OpenAIKit 12 | 13 | public class ChatOpenAI: LLM { 14 | let temperature: Double 15 | let model: ModelID 16 | let httpClient: HTTPClient? 17 | let urlSession: URLSession? 18 | public init(httpClient: HTTPClient? = nil, urlSession: URLSession? = nil, temperature: Double = 0.0, model: ModelID = Model.GPT3.gpt3_5Turbo16K, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 19 | self.httpClient = httpClient 20 | self.urlSession = urlSession 21 | self.temperature = temperature 22 | self.model = model 23 | super.init(callbacks: callbacks, cache: cache) 24 | } 25 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 26 | let env = LC.loadEnv() 27 | 28 | if let apiKey = env["OPENAI_API_KEY"] { 29 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 30 | 31 | let configuration = Configuration(apiKey: apiKey, api: API(scheme: .https, host: baseUrl)) 32 | 33 | #if os(macOS) || os(iOS) || os(visionOS) 34 | assert(httpClient != nil, "Http client is not nil") 35 | let openAIClient = OpenAIKit.Client(httpClient: httpClient!, configuration: configuration) 36 | #else 37 | assert(urlSession != nil, "URL Session is not nil") 38 | let openAIClient = OpenAIKit.Client(session: urlSession!, configuration: configuration) 39 | #endif 40 | let buffer = try await openAIClient.chats.stream(model: model, messages: [.user(content: text)], temperature: temperature) 41 | return OpenAIResult(generation: buffer) 42 | } else { 43 | print("Please set openai api key.") 44 | return LLMResult() 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/Dalle.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/22. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import AsyncHTTPClient 11 | import OpenAIKit 12 | 13 | public class Dalle: LLM { 14 | let size: DalleImage.Size 15 | public init(size: DalleImage.Size, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 16 | self.size = size 17 | super.init(callbacks: callbacks, cache: cache) 18 | } 19 | 20 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 21 | let env = LC.loadEnv() 22 | 23 | if let apiKey = env["OPENAI_API_KEY"] { 24 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 25 | let eventLoopGroup = ThreadManager.thread 26 | 27 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 28 | defer { 29 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 30 | try? httpClient.syncShutdown() 31 | } 32 | let configuration = Configuration(apiKey: apiKey, api: API(scheme: .https, host: baseUrl)) 33 | 34 | let openAIClient = OpenAIKit.Client(httpClient: httpClient, configuration: configuration) 35 | let reps = try await openAIClient.images.create(prompt: text, size: dalleTo(size: size)) 36 | return LLMResult(llm_output: reps.data.first!.url) 37 | } else { 38 | print("Please set openai api key.") 39 | return LLMResult(llm_output: "Please set openai api key.") 40 | } 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/Gemini.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 12/25/23. 6 | // 7 | 8 | import Foundation 9 | import GoogleGenerativeAI 10 | 11 | public class Gemini: LLM { 12 | override func _send(text: String, stops: [String]) async throws -> LLMResult { 13 | let env = LC.loadEnv() 14 | 15 | if let apiKey = env["GOOGLEAI_API_KEY"] { 16 | let model = GenerativeModel(name: "gemini-pro", apiKey: apiKey) 17 | let response = try await model.generateContent(text) 18 | return LLMResult(llm_output: response.text) 19 | } else { 20 | print("Please set googleai api key.") 21 | return LLMResult(llm_output: "Please set googleai api key.") 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/HuggingFace.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/27. 6 | // 7 | 8 | import Foundation 9 | public class HuggingFace: LLM { 10 | let repo: String 11 | let task: String 12 | 13 | public init(repo: String, task: String = "text-generation", callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 14 | self.repo = repo 15 | self.task = task 16 | super.init(callbacks: callbacks, cache: cache) 17 | } 18 | 19 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 20 | let wrapper = HFInferenceApi(repo: repo, task: task) 21 | let response = try await wrapper.inference(text: text) 22 | let result = response[0]["generated_text"].stringValue 23 | var result2 = String(result[text.endIndex...]) 24 | print("inf result:\(result2)") 25 | 26 | if !stops.isEmpty { 27 | result2 = result2.components(separatedBy: stops[0])[0] 28 | } 29 | return LLMResult(llm_output: result2) 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/LLM.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/10. 6 | // 7 | 8 | import Foundation 9 | 10 | public class LLM { 11 | static let LLM_REQ_ID_KEY = "llm_req_id" 12 | static let LLM_COST_KEY = "cost" 13 | public init(callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 14 | var cbs: [BaseCallbackHandler] = callbacks 15 | if LC.addTraceCallbak() && !cbs.contains(where: { item in item is TraceCallbackHandler}) { 16 | cbs.append(TraceCallbackHandler()) 17 | } 18 | // assert(cbs.count == 1) 19 | self.callbacks = cbs 20 | self.cache = cache 21 | } 22 | let callbacks: [BaseCallbackHandler] 23 | let cache: BaseCache? 24 | 25 | public func generate(text: String, stops: [String] = []) async -> LLMResult? { 26 | let reqId = UUID().uuidString 27 | var cost = 0.0 28 | let now = Date.now.timeIntervalSince1970 29 | callStart(prompt: text, reqId: reqId) 30 | do { 31 | if let cache = self.cache { 32 | if let llmResult = await cache.lookup(prompt: text) { 33 | callEnd(output: llmResult.llm_output!, reqId: reqId, cost: 0) 34 | return llmResult 35 | } 36 | } 37 | let llmResult = try await _send(text: text, stops: stops) 38 | if let cache = self.cache { 39 | if llmResult.llm_output != nil { 40 | await cache.update(prompt: text, return_val: llmResult) 41 | } 42 | } 43 | cost = Date.now.timeIntervalSince1970 - now 44 | if !llmResult.stream { 45 | callEnd(output: llmResult.llm_output!, reqId: reqId, cost: cost) 46 | } else { 47 | callEnd(output: "[LLM is streamable]", reqId: reqId, cost: cost) 48 | } 49 | return llmResult 50 | } catch { 51 | callCatch(error: error, reqId: reqId, cost: cost) 52 | print("LLM generate \(error.localizedDescription)") 53 | return nil 54 | } 55 | 56 | } 57 | 58 | 59 | func callEnd(output: String, reqId: String, cost: Double) { 60 | for callback in self.callbacks { 61 | do { 62 | try callback.on_llm_end(output: output, metadata: [LLM.LLM_REQ_ID_KEY: reqId, LLM.LLM_COST_KEY: "\(cost)"]) 63 | } catch { 64 | print("call LLM end callback errer: \(error)") 65 | } 66 | } 67 | } 68 | 69 | func callStart(prompt: String, reqId: String) { 70 | for callback in self.callbacks { 71 | do { 72 | try callback.on_llm_start(prompt: prompt, metadata: [LLM.LLM_REQ_ID_KEY: reqId]) 73 | } catch { 74 | print("call LLM start callback errer: \(error)") 75 | } 76 | } 77 | } 78 | 79 | func callCatch(error: Error, reqId: String, cost: Double) { 80 | for callback in self.callbacks { 81 | do { 82 | try callback.on_llm_error(error: error, metadata: [LLM.LLM_REQ_ID_KEY: reqId, LLM.LLM_COST_KEY: "\(cost)"]) 83 | } catch { 84 | print("call LLM start callback errer: \(error)") 85 | } 86 | } 87 | } 88 | 89 | func _send(text: String, stops: [String]) async throws -> LLMResult { 90 | LLMResult() 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/LMStudio.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 1/17/24. 6 | // 7 | 8 | // 9 | // File.swift 10 | // 11 | // 12 | // Created by 顾艳华 on 2023/6/10. 13 | // 14 | 15 | import Foundation 16 | import NIOPosix 17 | import AsyncHTTPClient 18 | import OpenAIKit 19 | //{ 20 | // "id": "chatcmpl-8wiceqlk0jhyverlvd028", 21 | // "object": "chat.completion", 22 | // "created": 1705470413, 23 | // "model": "/Users/guyanhua/.cache/lm-studio/models/TheBloke/Mythalion-13B-GGUF/mythalion-13b.Q4_0.gguf", 24 | // "choices": [ 25 | // { 26 | // "index": 0, 27 | // "message": { 28 | // "role": "assistant", 29 | // "content": " Hello!" 30 | // }, 31 | // "finish_reason": "stop" 32 | // } 33 | // ], 34 | // "usage": { 35 | // "prompt_tokens": 13, 36 | // "completion_tokens": 2, 37 | // "total_tokens": 15 38 | // } 39 | //} 40 | struct LMStudioResponceChoices: Codable { 41 | let index: Int 42 | let message: ChatGLMMessage 43 | } 44 | struct LMStudioResponce: Codable { 45 | let choices: [LMStudioResponceChoices] 46 | } 47 | public class LMStudio: LLM { 48 | 49 | let temperature: Double 50 | 51 | public init(temperature: Double = 0.0, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 52 | self.temperature = temperature 53 | super.init(callbacks: callbacks, cache: cache) 54 | } 55 | 56 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 57 | let env = LC.loadEnv() 58 | let baseUrl = env["LMSTUDIO_URL"] ?? "localhost:1234" 59 | let eventLoopGroup = ThreadManager.thread 60 | 61 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 62 | defer { 63 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 64 | try? httpClient.syncShutdown() 65 | } 66 | let url = "http://\(baseUrl)/v1/chat/completions" 67 | var request = HTTPClientRequest(url: url) 68 | request.method = .POST 69 | request.headers.add(name: "Content-Type", value: "application/json") 70 | request.headers.add(name: "Accept", value: "application/json") 71 | let requestBody = try! JSONEncoder().encode(BaiduLLMRequest(temperature: temperature, messages: [ChatGLMMessage(role: "user", content: text)])) 72 | request.body = .bytes(requestBody) 73 | let response = try await httpClient.execute(request, timeout: .seconds(1800)) 74 | if response.status == .ok { 75 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 76 | let data = str.data(using: .utf8)! 77 | let llmResponse = try! JSONDecoder().decode(LMStudioResponce.self, from: data) 78 | return LLMResult(llm_output: llmResponse.choices.first!.message.content) 79 | } else { 80 | // handle remote error 81 | print("http code is not 200.") 82 | return LLMResult() 83 | } 84 | } 85 | 86 | 87 | } 88 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/Llama2.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/7. 6 | // 7 | 8 | 9 | 10 | // https://www.llama-api.com/account/api-token 11 | import Foundation 12 | import NIOPosix 13 | import AsyncHTTPClient 14 | import OpenAIKit 15 | 16 | public class Llama2: LLM { 17 | 18 | let temperature: Double 19 | 20 | public init(temperature: Double = 0.0) { 21 | self.temperature = temperature 22 | } 23 | 24 | public override func _send(text: String, stops: [String] = []) async -> LLMResult { 25 | let env = LC.loadEnv() 26 | 27 | if let apiKey = env["LLAMA2_API_KEY"] { 28 | let responce = await LlamaAPIWrapper().execute(text: text, key: apiKey, temperature: self.temperature, max_tokens: 2048, topP: 1.0, n: 1, stops: []) 29 | return LLMResult(llm_output: responce) 30 | } else { 31 | print("Please set llama2 api key.") 32 | return LLMResult(llm_output: "Please set llama2 api key.") 33 | } 34 | 35 | } 36 | 37 | 38 | } 39 | 40 | 41 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/Local.swift: -------------------------------------------------------------------------------- 1 | //// 2 | //// File.swift 3 | //// 4 | //// 5 | //// Created by 顾艳华 on 1/22/24. 6 | //// 7 | //import llmfarm_core 8 | //import Foundation 9 | // 10 | //public class Local: LLM { 11 | // let modelPath: String 12 | // let useMetal: Bool 13 | // let inference: ModelInference 14 | // 15 | // public init(inference: ModelInference, modelPath: String, useMetal: Bool = false, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 16 | // self.inference = inference 17 | // self.modelPath = modelPath 18 | // self.useMetal = useMetal 19 | // super.init(callbacks: callbacks, cache: cache) 20 | // } 21 | // public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 22 | // let ai = AI(_modelPath: self.modelPath, _chatName: "chat") 23 | // var params:ModelAndContextParams = .default 24 | // params.use_metal = useMetal 25 | // params.promptFormat = .Custom 26 | // params.custom_prompt_format = "{{prompt}}" 27 | // try? ai.loadModel(inference, contextParams: params) 28 | // let output = try? ai.model.predict(text, mainCallback) 29 | //// print("🚗\(output)") 30 | // total_output = 0 31 | // return LLMResult(llm_output: output) 32 | // } 33 | // 34 | // let maxOutputLength = 256 35 | // var total_output = 0 36 | // 37 | // func mainCallback(_ str: String, _ time: Double) -> Bool { 38 | // print("\(str)",terminator: "") 39 | // total_output += str.count 40 | // if(total_output>maxOutputLength){ 41 | // return true 42 | // } 43 | // return false 44 | // } 45 | //} 46 | // 47 | -------------------------------------------------------------------------------- /Sources/LangChain/llms/OpenAI.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/10. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import AsyncHTTPClient 11 | import OpenAIKit 12 | 13 | public class OpenAI: LLM { 14 | 15 | let temperature: Double 16 | let model: ModelID 17 | 18 | public init(temperature: Double = 0.0, model: ModelID = Model.GPT3.gpt3_5Turbo16K, callbacks: [BaseCallbackHandler] = [], cache: BaseCache? = nil) { 19 | self.temperature = temperature 20 | self.model = model 21 | super.init(callbacks: callbacks, cache: cache) 22 | } 23 | 24 | public override func _send(text: String, stops: [String] = []) async throws -> LLMResult { 25 | let env = LC.loadEnv() 26 | 27 | if let apiKey = env["OPENAI_API_KEY"] { 28 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 29 | let configuration = Configuration(apiKey: apiKey, api: API(scheme: .https, host: baseUrl)) 30 | #if os(macOS) || os(iOS) || os(visionOS) 31 | let eventLoopGroup = ThreadManager.thread 32 | 33 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 34 | defer { 35 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 36 | try? httpClient.syncShutdown() 37 | } 38 | 39 | let openAIClient = OpenAIKit.Client(httpClient: httpClient, configuration: configuration) 40 | #else 41 | let urlSession = URLSession(configuration: .default) 42 | let openAIClient = OpenAIKit.Client(session: urlSession, configuration: configuration) 43 | #endif 44 | let completion = try await openAIClient.chats.create(model: model, messages: [.user(content: text)], temperature: temperature, stops: stops) 45 | return LLMResult(llm_output: completion.choices.first!.message.content) 46 | } else { 47 | print("Please set openai api key.") 48 | return LLMResult(llm_output: "Please set openai api key.") 49 | } 50 | 51 | } 52 | 53 | 54 | } 55 | -------------------------------------------------------------------------------- /Sources/LangChain/memory/Base.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/22. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct BaseMessage { 11 | let content: String 12 | let type: String 13 | } 14 | public protocol BaseMemory { 15 | func load_memory_variables(inputs: [String: Any]) -> [String: [String]] 16 | 17 | func save_context(inputs: [String: String], outputs: [String: String]) 18 | 19 | func clear() 20 | } 21 | 22 | 23 | 24 | public class BaseChatMessageHistory { 25 | public func add_user_message(message: String) { 26 | self.add_message(message: BaseMessage(content: message, type: "human")) 27 | } 28 | 29 | public func add_ai_message(message: String) { 30 | self.add_message(message: BaseMessage(content: message, type: "ai")) 31 | } 32 | 33 | public func add_message(message: BaseMessage) { 34 | 35 | } 36 | 37 | public func clear() { 38 | 39 | } 40 | } 41 | //class BaseMemory(Serializable, ABC): 42 | // """Base interface for memory in chains.""" 43 | // 44 | // class Config: 45 | // """Configuration for this pydantic object.""" 46 | // 47 | // arbitrary_types_allowed = True 48 | // 49 | // @property 50 | // @abstractmethod 51 | // def memory_variables(self) -> List[str]: 52 | // """Input keys this memory class will load dynamically.""" 53 | // 54 | // @abstractmethod 55 | // def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]: 56 | // """Return key-value pairs given the text input to the chain. 57 | // 58 | // If None, return all memories 59 | // """ 60 | // 61 | // @abstractmethod 62 | // def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None: 63 | // """Save the context of this model run to memory.""" 64 | // 65 | // @abstractmethod 66 | // def clear(self) -> None: 67 | // """Clear memory contents.""" 68 | // 69 | // 70 | //class BaseChatMessageHistory(ABC): 71 | // """Base interface for chat message history 72 | // See `ChatMessageHistory` for default implementation. 73 | // """ 74 | // 75 | // """ 76 | // Example: 77 | // .. code-block:: python 78 | // 79 | // class FileChatMessageHistory(BaseChatMessageHistory): 80 | // storage_path: str 81 | // session_id: str 82 | // 83 | // @property 84 | // def messages(self): 85 | // with open(os.path.join(storage_path, session_id), 'r:utf-8') as f: 86 | // messages = json.loads(f.read()) 87 | // return messages_from_dict(messages) 88 | // 89 | // def add_message(self, message: BaseMessage) -> None: 90 | // messages = self.messages.append(_message_to_dict(message)) 91 | // with open(os.path.join(storage_path, session_id), 'w') as f: 92 | // json.dump(f, messages) 93 | // 94 | // def clear(self): 95 | // with open(os.path.join(storage_path, session_id), 'w') as f: 96 | // f.write("[]") 97 | // """ 98 | // 99 | // messages: List[BaseMessage] 100 | // 101 | // def add_user_message(self, message: str) -> None: 102 | // """Add a user message to the store""" 103 | // self.add_message(HumanMessage(content=message)) 104 | // 105 | // def add_ai_message(self, message: str) -> None: 106 | // """Add an AI message to the store""" 107 | // self.add_message(AIMessage(content=message)) 108 | // 109 | // def add_message(self, message: BaseMessage) -> None: 110 | // """Add a self-created message to the store""" 111 | // raise NotImplementedError 112 | // 113 | // @abstractmethod 114 | // def clear(self) -> None: 115 | // """Remove all messages from the store""" 116 | -------------------------------------------------------------------------------- /Sources/LangChain/memory/Chat.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/22. 6 | // 7 | 8 | import Foundation 9 | public class BaseChatMemory: BaseMemory { 10 | let chat_memory: ChatMessageHistory = ChatMessageHistory() 11 | 12 | public func load_memory_variables(inputs: [String : Any]) -> [String : [String]] { 13 | [:] 14 | } 15 | 16 | public func save_context(inputs: [String: String], outputs: [String: String]) { 17 | for (_, input_str) in inputs { 18 | self.chat_memory.add_user_message(message: input_str) 19 | } 20 | for (_, output_str) in outputs { 21 | self.chat_memory.add_ai_message(message: output_str) 22 | } 23 | } 24 | 25 | public func clear() { 26 | 27 | } 28 | 29 | 30 | } 31 | 32 | public class ConversationBufferWindowMemory: BaseChatMemory { 33 | let memory_key = "history" 34 | let k: Int 35 | public init(k: Int = 2) { 36 | self.k = k 37 | } 38 | public override func load_memory_variables(inputs: [String: Any]) -> [String: [String]] { 39 | // Return history buffer. 40 | 41 | let buffer = self.chat_memory.messages.suffix(k) 42 | 43 | let bufferString = buffer.map{ "\($0.type): \($0.content)" } 44 | return [self.memory_key: bufferString] 45 | } 46 | } 47 | 48 | public class ChatMessageHistory: BaseChatMessageHistory { 49 | public var messages: [BaseMessage] = [] 50 | 51 | public override func add_message(message: BaseMessage) { 52 | // """Add a self-created message to the store""" 53 | self.messages.append(message) 54 | } 55 | 56 | public override func clear(){ 57 | self.messages = [] 58 | } 59 | } 60 | // 61 | //class ChatMessageHistory(BaseChatMessageHistory, BaseModel): 62 | // messages: List[BaseMessage] = [] 63 | // 64 | // def add_message(self, message: BaseMessage) -> None: 65 | // """Add a self-created message to the store""" 66 | // self.messages.append(message) 67 | // 68 | // def clear(self) -> None: 69 | // self.messages = [] 70 | 71 | //class BaseChatMemory(BaseMemory, ABC): 72 | // chat_memory: BaseChatMessageHistory = Field(default_factory=ChatMessageHistory) 73 | // output_key: Optional[str] = None 74 | // input_key: Optional[str] = None 75 | // return_messages: bool = False 76 | // 77 | // def _get_input_output( 78 | // self, inputs: Dict[str, Any], outputs: Dict[str, str] 79 | // ) -> Tuple[str, str]: 80 | // if self.input_key is None: 81 | // prompt_input_key = get_prompt_input_key(inputs, self.memory_variables) 82 | // else: 83 | // prompt_input_key = self.input_key 84 | // if self.output_key is None: 85 | // if len(outputs) != 1: 86 | // raise ValueError(f"One output key expected, got {outputs.keys()}") 87 | // output_key = list(outputs.keys())[0] 88 | // else: 89 | // output_key = self.output_key 90 | // return inputs[prompt_input_key], outputs[output_key] 91 | // 92 | // def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None: 93 | // """Save context from this conversation to buffer.""" 94 | // input_str, output_str = self._get_input_output(inputs, outputs) 95 | // self.chat_memory.add_user_message(input_str) 96 | // self.chat_memory.add_ai_message(output_str) 97 | // 98 | // def clear(self) -> None: 99 | // """Clear memory contents.""" 100 | // self.chat_memory.clear() 101 | // 102 | // 103 | // class ChatMessageHistory(BaseChatMessageHistory, BaseModel): 104 | // messages: List[BaseMessage] = [] 105 | // 106 | // def add_message(self, message: BaseMessage) -> None: 107 | // """Add a self-created message to the store""" 108 | // self.messages.append(message) 109 | // 110 | // def clear(self) -> None: 111 | // self.messages = [] 112 | -------------------------------------------------------------------------------- /Sources/LangChain/memory/ReadOnlySharedMemory.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/31. 6 | // 7 | 8 | import Foundation 9 | public struct ReadOnlySharedMemory: BaseMemory { 10 | 11 | let base: BaseMemory 12 | public init(base: BaseMemory) { 13 | self.base = base 14 | } 15 | 16 | public func load_memory_variables(inputs: [String : Any]) -> [String : [String]] { 17 | base.load_memory_variables(inputs: inputs) 18 | } 19 | 20 | public func save_context(inputs: [String : String], outputs: [String : String]) { 21 | 22 | } 23 | 24 | public func clear() { 25 | 26 | } 27 | 28 | 29 | } 30 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/BaseOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/21. 6 | // 7 | 8 | import Foundation 9 | import SwiftyJSON 10 | 11 | public struct AgentAction{ 12 | public let action: String 13 | public let input: String 14 | public let log: String 15 | public init(action: String, input: String, log: String) { 16 | self.action = action 17 | self.input = input 18 | self.log = log 19 | } 20 | } 21 | public struct AgentFinish { 22 | public let final: String 23 | public init(final: String) { 24 | self.final = final 25 | } 26 | } 27 | 28 | public enum Parsed { 29 | case action(AgentAction) 30 | case finish(AgentFinish) 31 | case error 32 | case unimplemented 33 | case nothing 34 | case str(String) 35 | case list([String]) 36 | case json(JSON) 37 | case dict([String: String]) 38 | case object(Codable) 39 | case enumType(Any) 40 | case date(Date) 41 | } 42 | public protocol BaseOutputParser { 43 | func parse(text: String) -> Parsed 44 | } 45 | 46 | public struct StrOutputParser: BaseOutputParser { 47 | public init() {} 48 | public func parse(text: String) -> Parsed { 49 | Parsed.str(text) 50 | } 51 | 52 | 53 | } 54 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/DateOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/10/25. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct DateOutputParser: BaseOutputParser { 11 | public init() { 12 | } 13 | 14 | let format = "yyyy MM dd" 15 | static func _generate_end_date() -> Date { 16 | let currentDate = Date() 17 | var dateComponent = DateComponents() 18 | 19 | dateComponent.year = 10 20 | 21 | let futureDate = Calendar.current.date(byAdding: dateComponent, to: currentDate) 22 | return futureDate! 23 | } 24 | func _generate_random_datetime_strings( 25 | pattern: String, n: Int = 3, start_date: Date = Date.now, end_date: Date = _generate_end_date() 26 | ) -> String { 27 | let formatter = DateFormatter() 28 | formatter.dateFormat = pattern 29 | let startString = formatter.string(from: start_date) 30 | return startString 31 | } 32 | public func parse(text: String) -> Parsed { 33 | let dateFormatter = DateFormatter() 34 | dateFormatter.dateFormat = self.format 35 | let dateFromString = dateFormatter.date(from: text) 36 | if dateFromString != nil { 37 | return Parsed.date(dateFromString!) 38 | } else { 39 | return Parsed.error 40 | } 41 | } 42 | let PYDANTIC_FORMAT_INSTRUCTIONS = """ 43 | The output should be formatted as a date string below. 44 | 45 | %@ 46 | 47 | The output must be remove all other content and only keep the date string. 48 | 49 | Provide an output example such as: 50 | 51 | %@ 52 | Question: 53 | """ 54 | public func get_format_instructions() -> String { 55 | String(format: PYDANTIC_FORMAT_INSTRUCTIONS, self.format, _generate_random_datetime_strings(pattern: self.format)) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/EnumOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/8. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct EnumOutputParser : BaseOutputParser where T: RawRepresentable ,T: CaseIterable, T.RawValue == String { 11 | public init(enumType: T.Type) { 12 | self.enumType = enumType 13 | } 14 | 15 | let enumType: T.Type 16 | public func parse(text: String) -> Parsed { 17 | if let e = T(rawValue: text){ 18 | return Parsed.enumType(e) 19 | } else { 20 | return Parsed.str("Parse fail.") 21 | } 22 | } 23 | 24 | public func get_format_instructions() -> String { 25 | var all: [String] = [] 26 | for value in T.allCases { 27 | all.append(value.rawValue) 28 | } 29 | return String(format: "Select one of the following options: {%@}, The output is simply the value %@", all.joined(separator: ", "), all.joined(separator: " or ")) 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/ListOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/4. 6 | // 7 | 8 | import Foundation 9 | public struct ListOutputParser: BaseOutputParser { 10 | public func parse(text: String) -> Parsed { 11 | Parsed.list(text.components(separatedBy: ",")) 12 | } 13 | 14 | 15 | } 16 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/MRKLOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/21. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct MRKLOutputParser: BaseOutputParser { 11 | public init() {} 12 | public func parse(text: String) -> Parsed { 13 | print(text.uppercased()) 14 | if text.uppercased().contains(FINAL_ANSWER_ACTION) { 15 | return Parsed.finish(AgentFinish(final: text)) 16 | } 17 | let pattern = "Action\\s*:[\\s]*(.*)[\\s]*Action\\s*Input\\s*:[\\s]*(.*)" 18 | let regex = try! NSRegularExpression(pattern: pattern) 19 | 20 | if let match = regex.firstMatch(in: text, options: [], range: NSRange(location: 0, length: text.utf16.count)) { 21 | 22 | let firstCaptureGroup = Range(match.range(at: 1), in: text).map { String(text[$0]) } 23 | // print(firstCaptureGroup!) 24 | 25 | 26 | let secondCaptureGroup = Range(match.range(at: 2), in: text).map { String(text[$0]) } 27 | // print(secondCaptureGroup!) 28 | return Parsed.action(AgentAction(action: firstCaptureGroup!, input: secondCaptureGroup!, log: text)) 29 | } else { 30 | return Parsed.error 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/ObjectOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/28. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct ObjectOutputParser: BaseOutputParser { 11 | var schema = "" 12 | public init(demo: T) { 13 | self.demo = demo 14 | } 15 | 16 | let demo: T 17 | let PYDANTIC_FORMAT_INSTRUCTIONS = """ 18 | The output should be formatted as a JSON instance that conforms to the JSON schema below. 19 | 20 | As an example, for the schema {title: String,content: String,unit: {num: Int,},} 21 | the object {"content":"b","title":"a","unit":{"num":1}} is a well-formatted instance of the schema. The object {{"properties": {"content":"b","title":"a","unit":{"num":1}}}} is not well-formatted. 22 | 23 | Here is the output schema: 24 | %@ 25 | """ 26 | 27 | public func parse(text: String) -> Parsed { 28 | let r = try! JSONDecoder().decode(T.self, from: text.data(using: .utf8)!) 29 | return Parsed.object(r) 30 | } 31 | fileprivate func isPrimitive(_ t: String) -> Bool { 32 | return t == "Int" || t == "String" || t == "Double" || t == "Float" || t == "Bool" 33 | } 34 | 35 | mutating func printStruct(structObject: Any) { 36 | let mirror = Mirror(reflecting: structObject) 37 | for (name, value) in mirror.children { 38 | // guard let name = name else { continue } 39 | let t = "\(type(of: value))" 40 | // print("type: \(t)") 41 | if isPrimitive(t) { 42 | let s = "\(name!): \(t)" 43 | schema += "\(s)," 44 | // print(s) 45 | } else if t.starts(with: "Array<") { 46 | // let s = "\(name): [" 47 | schema += "[" 48 | printStruct(structObject: value) 49 | schema += "]," 50 | } else { 51 | if let name = name { 52 | let s = "\(name): {" 53 | schema += "\(s)" 54 | } else { 55 | schema += "{" 56 | } 57 | 58 | // print(s) 59 | 60 | printStruct(structObject: value) 61 | schema += "}," 62 | // print("}") 63 | } 64 | } 65 | } 66 | public mutating func get_format_instructions() -> String { 67 | // print("{") 68 | schema += "{" 69 | printStruct(structObject: demo) 70 | // print("}") 71 | schema += "}" 72 | // print("schema: \(schema)") 73 | let i = String(format: PYDANTIC_FORMAT_INSTRUCTIONS, schema) 74 | //reset 75 | schema = "" 76 | return i 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/RouterOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/7. 6 | // 7 | 8 | import Foundation 9 | import SwiftyJSON 10 | 11 | public struct RouterOutputParser: BaseOutputParser { 12 | let default_destination = "DEFAULT" 13 | // next_inputs_type: Type = str 14 | // let next_inputs_inner_key = "input" 15 | public init() { 16 | 17 | } 18 | public func parse(text: String) -> Parsed { 19 | // "```(json)?(.*)```" 20 | print("router text: \(text)") 21 | if let jsonText = findJSON(text: text) { 22 | // let expected_keys = ["destination", "next_inputs"] 23 | let json = JSON(jsonText.data(using: .utf8)!) 24 | return Parsed.dict(["destination": json["destination"].stringValue, "next_inputs": json["next_inputs"].stringValue]) 25 | } else { 26 | return .error 27 | } 28 | // try: 29 | // expected_keys = ["destination", "next_inputs"] 30 | // parsed = parse_and_check_json_markdown(text, expected_keys) 31 | // if not isinstance(parsed["destination"], str): 32 | // raise ValueError("Expected 'destination' to be a string.") 33 | // if not isinstance(parsed["next_inputs"], self.next_inputs_type): 34 | // raise ValueError( 35 | // f"Expected 'next_inputs' to be {self.next_inputs_type}." 36 | // ) 37 | // parsed["next_inputs"] = {self.next_inputs_inner_key: parsed["next_inputs"]} 38 | // if ( 39 | // parsed["destination"].strip().lower() 40 | // == self.default_destination.lower() 41 | // ): 42 | // parsed["destination"] = None 43 | // else: 44 | // parsed["destination"] = parsed["destination"].strip() 45 | // return parsed 46 | // except Exception as e: 47 | // raise OutputParserException( 48 | // f"Parsing text\n{text}\n raised following error:\n{e}" 49 | // ) 50 | 51 | 52 | } 53 | 54 | func findJSON(text: String) -> String? { 55 | // let pattern = "```(json)?(.*)```" 56 | // 57 | // do { 58 | //// print(text) 59 | // let regex = try NSRegularExpression(pattern: pattern, options: .caseInsensitive) 60 | // let matches = regex.matches(in: text, options: [], range: NSRange(location: 0, length: text.utf16.count)) 61 | // if matches.isEmpty { 62 | // return nil 63 | // } else { 64 | // return String(text[Range(matches.first!.range, in: text)!]) 65 | // } 66 | // } catch { 67 | // print("Error: \(error.localizedDescription)") 68 | // return nil 69 | // } 70 | text 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /Sources/LangChain/parser/SimpleJsonOutputParser.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/4. 6 | // 7 | 8 | import Foundation 9 | import SwiftyJSON 10 | 11 | public struct SimpleJsonOutputParser: BaseOutputParser { 12 | public func parse(text: String) -> Parsed { 13 | do { 14 | return Parsed.json(try JSON(data: text.data(using: .utf8)!)) 15 | } catch { 16 | print("Parse json error: \(text)") 17 | return Parsed.error 18 | } 19 | } 20 | 21 | 22 | } 23 | -------------------------------------------------------------------------------- /Sources/LangChain/prompts/MultiPromptRouter.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/7. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct MultiPromptRouter { 11 | public static func formatDestinations(destinations: String) -> String { 12 | """ 13 | Given a raw text input to a language model select the model prompt best suited for 14 | the input. You will be given the names of the available prompts and a description of 15 | what the prompt is best suited for. You may also revise the original input if you 16 | think that revising it will ultimately lead to a better response from the language 17 | model. 18 | 19 | << FORMATTING >> 20 | Return a JSON object formatted to look like: 21 | { 22 | "destination": string \\ name of the prompt to use or "DEFAULT" 23 | "next_inputs": string \\ a potentially modified version of the original input 24 | } 25 | 26 | REMEMBER: "destination" MUST be one of the candidate prompt names specified below OR \ 27 | it can be "DEFAULT" if the input is not well suited for any of the candidate prompts. 28 | REMEMBER: "next_inputs" can just be the original input if you don't think any \ 29 | modifications are needed. 30 | 31 | << CANDIDATE PROMPTS >> 32 | \(destinations) 33 | 34 | << INPUT >> 35 | {input} 36 | 37 | << OUTPUT >> 38 | """ 39 | } 40 | 41 | // public static func formatInput(rawString: String, input: String) -> String { 42 | // let newString = rawString.replacingOccurrences(of: "%input", with: "%@") 43 | // return String(format: newString, input) 44 | // } 45 | } 46 | -------------------------------------------------------------------------------- /Sources/LangChain/prompts/PromptTemplate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/16. 6 | // 7 | 8 | import Foundation 9 | 10 | public class PromptTemplate { 11 | // Schema to represent a prompt for an LLM. 12 | public init(input_variables: [String], partial_variable: [String : String], template: String) { 13 | self.input_variables = input_variables 14 | self.partial_variable = partial_variable 15 | self.template = template 16 | } 17 | 18 | public let input_variables: [String] 19 | public let partial_variable: [String: String] 20 | // A list of the names of the variables the prompt template expects. 21 | 22 | public let template: String 23 | // The prompt template. 24 | public func format(args: [String: String]) -> String { 25 | var templateCopy = template 26 | for (k, v) in partial_variable { 27 | let replace = "{\(k)}" 28 | templateCopy = templateCopy.replacingOccurrences(of: replace, with: v) 29 | } 30 | // assert(args.count == input_variables.count) 31 | // var argsCopy = args 32 | for k in input_variables { 33 | let replace = "{\(k)}" 34 | let input = args[k] 35 | if input != nil { 36 | templateCopy = templateCopy.replacingOccurrences(of: replace, with: input!) 37 | } 38 | } 39 | return templateCopy 40 | } 41 | 42 | public static func from_template(input_variables: [String], partial_variable: [String : String], template: String) -> PromptTemplate { 43 | PromptTemplate(input_variables: input_variables, partial_variable: partial_variable, template: template) 44 | } 45 | } 46 | 47 | -------------------------------------------------------------------------------- /Sources/LangChain/prompts/mrkl/MrklPrompt.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/16. 6 | // 7 | 8 | import Foundation 9 | //# flake8: noqa 10 | public let PREFIX = """ 11 | Answer the following questions as best you can. You have access to the following tools: 12 | """ 13 | public let FORMAT_INSTRUCTIONS = """ 14 | Use the following format: 15 | 16 | Question: the input question you must answer 17 | Thought: you should always think about what to do 18 | Action: the action to take, should be one of [%@] 19 | Action Input: the input to the action 20 | Observation: the result of the action 21 | ... (this Thought/Action/Action Input/Observation can repeat N times) 22 | Thought: I now know the final answer 23 | Final Answer: the final answer to the original input question 24 | """ 25 | public let SUFFIX = """ 26 | Begin! 27 | 28 | Question: {question} 29 | Thought: {thought} 30 | """ 31 | 32 | public let FINAL_ANSWER_ACTION = "FINAL ANSWER" 33 | -------------------------------------------------------------------------------- /Sources/LangChain/retriever/BaseRetriever.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/1. 6 | // 7 | 8 | import Foundation 9 | 10 | public class BaseRetriever { 11 | public func _get_relevant_documents(query: String) async throws -> [Document] { 12 | [] 13 | } 14 | 15 | public func get_relevant_documents(query: String) async -> [Document] { 16 | do { 17 | return try await self._get_relevant_documents(query: query) 18 | } catch { 19 | print("get_relevant_documents error \(error.localizedDescription)") 20 | return [] 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /Sources/LangChain/retriever/MultiVectorRetriever.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/17. 6 | // 7 | 8 | import Foundation 9 | public class MultiVectorRetriever: BaseRetriever { 10 | let vectorstore: VectorStore 11 | let docstore: BaseStore 12 | let id_key = "doc_id" 13 | 14 | public init(vectorstore: VectorStore, docstore: BaseStore) { 15 | self.vectorstore = vectorstore 16 | self.docstore = docstore 17 | } 18 | 19 | public override func _get_relevant_documents(query: String) async throws -> [Document] { 20 | let sub_docs = await self.vectorstore.similaritySearch(query: query, k: 2) 21 | var ids: [String] = [] 22 | for d in sub_docs { 23 | ids.append(d.metadata[self.id_key]!) 24 | } 25 | let docs = await self.docstore.mget(keys: ids) 26 | return docs.map{Document(page_content: $0, metadata: [:])} 27 | } 28 | 29 | // def _get_relevant_documents( 30 | // self, query: str, *, run_manager: CallbackManagerForRetrieverRun 31 | // ) -> List[Document]: 32 | // """Get documents relevant to a query. 33 | // Args: 34 | // query: String to find relevant documents for 35 | // run_manager: The callbacks handler to use 36 | // Returns: 37 | // List of relevant documents 38 | // """ 39 | // sub_docs = self.vectorstore.similarity_search(query, **self.search_kwargs) 40 | // # We do this to maintain the order of the ids that are returned 41 | // ids = [] 42 | // for d in sub_docs: 43 | // if d.metadata[self.id_key] not in ids: 44 | // ids.append(d.metadata[self.id_key]) 45 | // docs = self.docstore.mget(ids) 46 | // return [d for d in docs if d is not None] 47 | } 48 | -------------------------------------------------------------------------------- /Sources/LangChain/retriever/PubmedRetriever.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | 8 | import Foundation 9 | public class PubmedRetriever: BaseRetriever { 10 | let client = PubmedAPIWrapper() 11 | 12 | public override func _get_relevant_documents(query: String) async throws -> [Document] { 13 | try await client.load(query: query) 14 | } 15 | 16 | public override init() { 17 | 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /Sources/LangChain/retriever/WikipediaRetriever.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | 8 | import Foundation 9 | public class WikipediaRetriever: BaseRetriever { 10 | let client = WikipediaAPIWrapper() 11 | 12 | public override func _get_relevant_documents(query: String) async throws -> [Document] { 13 | try await client.load(query: query) 14 | } 15 | 16 | public override init() { 17 | 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /Sources/LangChain/schema/BaseStore.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/17. 6 | // 7 | 8 | import Foundation 9 | public class BaseStore { 10 | public func mget(keys: [String]) async -> [String] { 11 | [] 12 | } 13 | 14 | public func mset(kvpairs: [(String, String)]) async { 15 | 16 | } 17 | 18 | public func mdelete(keys: [String]) async { 19 | 20 | } 21 | 22 | public func keys(prefix: String? = nil) async -> [String] { 23 | [] 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /Sources/LangChain/schema/InMemoryStore.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/17. 6 | // 7 | 8 | import Foundation 9 | public class InMemoryStore: BaseStore { 10 | var store:[String: String] = [:] 11 | public override init() { 12 | super.init() 13 | } 14 | public override func mget(keys: [String]) async -> [String] { 15 | var values: [String] = [] 16 | for k in keys { 17 | let v = self.store[k] 18 | if v != nil { 19 | values.append(v!) 20 | } 21 | } 22 | return values 23 | } 24 | 25 | public override func mset(kvpairs: [(String, String)]) async { 26 | for kv in kvpairs { 27 | self.store[kv.0] = kv.1 28 | } 29 | } 30 | 31 | public override func mdelete(keys: [String]) async { 32 | for k in keys { 33 | self.store.removeValue(forKey: k) 34 | } 35 | } 36 | 37 | public override func keys(prefix: String? = nil) async -> [String] { 38 | if prefix == nil { 39 | return Array(self.store.keys) 40 | } else { 41 | var matched: [String] = [] 42 | for k in self.store.keys { 43 | if k.hasPrefix(prefix!) { 44 | matched.append(k) 45 | } 46 | } 47 | return matched 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /Sources/LangChain/schema/LocalFileStore.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/19. 6 | // 7 | 8 | import Foundation 9 | import SwiftFileStore 10 | struct StoreEntry: Codable, JSONDataRepresentable { 11 | let key: String 12 | let value: String 13 | } 14 | public class LocalFileStore: BaseStore { 15 | let objectStore: FileObjectStore? 16 | var STORE_NS = "store" 17 | public init(prefix: String? = nil) { 18 | if let p = prefix { 19 | STORE_NS = STORE_NS + p 20 | } 21 | do { 22 | self.objectStore = try FileObjectStore.create() 23 | } catch { 24 | self.objectStore = nil 25 | } 26 | } 27 | 28 | public override func mget(keys: [String]) async -> [String] { 29 | // print("🍰 Get \(keys) from \(STORE_NS)") 30 | var values: [String] = [] 31 | do { 32 | for key in keys { 33 | if let data = key.data(using: .utf8) { 34 | let base64 = data.base64EncodedString() 35 | 36 | let cache = try await objectStore!.read(key: base64.sha256(), namespace: STORE_NS, objectType: StoreEntry.self) 37 | if let c = cache { 38 | values.append(c.value) 39 | } 40 | } 41 | } 42 | } catch { 43 | print("FileStore get failed") 44 | } 45 | return values 46 | } 47 | 48 | public override func mset(kvpairs: [(String, String)]) async { 49 | // print("🍰 Update \(kvpairs.map{$0.0}) at \(STORE_NS)") 50 | do { 51 | for kv in kvpairs { 52 | if let data = kv.0.data(using: .utf8) { 53 | let base64 = data.base64EncodedString() 54 | // TODO workaround https://developer.apple.com/forums/thread/739394 55 | let v = kv.1.replacingOccurrences(of: "\0", with: "") 56 | let cache = StoreEntry(key: kv.0, value: v) 57 | try await objectStore!.write(key: base64.sha256(), namespace: STORE_NS, object: cache) 58 | } 59 | } 60 | } catch { 61 | print("FileStore set failed") 62 | } 63 | } 64 | 65 | public override func mdelete(keys: [String]) async { 66 | // print("🍰 Delete \(keys) at \(STORE_NS)") 67 | do { 68 | for key in keys { 69 | if let data = key.data(using: .utf8) { 70 | let base64 = data.base64EncodedString() 71 | try await objectStore!.remove(key: base64.sha256(), namespace: STORE_NS) 72 | } 73 | } 74 | } catch { 75 | print("FileStore set failed") 76 | } 77 | } 78 | 79 | public override func keys(prefix: String? = nil) async -> [String] { 80 | do { 81 | if prefix == nil { 82 | // print("🍰 Get all keys from \(STORE_NS)") 83 | return Array(try await self.allKeys()) 84 | } else { 85 | // print("🍰 Get keys \(prefix!) from \(STORE_NS)") 86 | var matched: [String] = [] 87 | for k in try await self.allKeys() { 88 | if k.hasPrefix(prefix!) { 89 | matched.append(k) 90 | } 91 | } 92 | return matched 93 | } 94 | } catch { 95 | print("FileStore get keys failed \(error.localizedDescription)") 96 | return [] 97 | } 98 | 99 | } 100 | 101 | func allKeys() async throws -> [String] { 102 | var allKeys: [String] = [] 103 | let allSHA = try await objectStore!.readAllKeys(namespace: STORE_NS) 104 | for sha in allSHA { 105 | // print("sha: \(sha)") 106 | if sha == ".DS_Store" { 107 | continue 108 | } 109 | let cache = try await objectStore!.read(key: sha, namespace: STORE_NS, objectType: StoreEntry.self) 110 | allKeys.append(cache!.key) 111 | } 112 | return allKeys 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /Sources/LangChain/schema/Schema.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/31. 6 | // 7 | 8 | import Foundation 9 | import OpenAIKit 10 | // TODO - remove OpenAIKit 11 | 12 | public class LLMResult { 13 | init(llm_output: String? = nil, stream: Bool = false) { 14 | self.llm_output = llm_output 15 | self.stream = stream 16 | } 17 | 18 | public var llm_output: String? 19 | 20 | public var stream: Bool 21 | 22 | public func setOutput() async throws { 23 | 24 | } 25 | public func getGeneration() -> AsyncThrowingStream? { 26 | nil 27 | } 28 | } 29 | 30 | public class OpenAIResult: LLMResult { 31 | public let generation: AsyncThrowingStream? 32 | 33 | init(generation: AsyncThrowingStream? = nil, llm_output: String? = nil) { 34 | self.generation = generation 35 | super.init(llm_output: llm_output, stream: generation != nil && llm_output == nil) 36 | } 37 | 38 | public override func setOutput() async throws { 39 | if stream { 40 | llm_output = "" 41 | for try await c in generation! { 42 | if let message = c.choices.first?.delta.content { 43 | llm_output! += message 44 | } 45 | } 46 | } 47 | } 48 | 49 | public override func getGeneration() -> AsyncThrowingStream { 50 | return AsyncThrowingStream { continuation in 51 | Task { 52 | do { 53 | for try await c in generation! { 54 | if let message = c.choices.first?.delta.content { 55 | continuation.yield(message) 56 | } 57 | } 58 | continuation.finish() 59 | } catch { 60 | continuation.finish(throwing: error) 61 | } 62 | } 63 | 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/BaseTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/16. 6 | // 7 | 8 | import Foundation 9 | 10 | public protocol Tool { 11 | // Interface LangChain tools must implement. 12 | 13 | func name() -> String 14 | // The unique name of the tool that clearly communicates its purpose. 15 | func description() -> String 16 | 17 | func _run(args: String) async throws -> String 18 | } 19 | public class BaseTool: NSObject, Tool { 20 | static let TOOL_REQ_ID = "tool_req_id" 21 | static let TOOL_COST_KEY = "cost" 22 | static let TOOL_NAME_KEY = "tool_name" 23 | let callbacks: [BaseCallbackHandler] 24 | init(callbacks: [BaseCallbackHandler] = []) { 25 | var cbs: [BaseCallbackHandler] = callbacks 26 | if LC.addTraceCallbak() && !cbs.contains(where: { item in item is TraceCallbackHandler}) { 27 | cbs.append(TraceCallbackHandler()) 28 | } 29 | // assert(cbs.count == 1) 30 | self.callbacks = cbs 31 | } 32 | func callStart(tool: BaseTool, input: String, reqId: String) { 33 | do { 34 | for callback in callbacks { 35 | try callback.on_tool_start(tool: tool, input: input, metadata: [BaseTool.TOOL_REQ_ID: reqId, BaseTool.TOOL_NAME_KEY: tool.name()]) 36 | } 37 | } catch { 38 | 39 | } 40 | } 41 | 42 | func callEnd(tool: BaseTool, output: String, reqId: String, cost: Double) { 43 | do { 44 | for callback in callbacks { 45 | try callback.on_tool_end(tool: tool, output: output, metadata: [BaseTool.TOOL_REQ_ID: reqId, BaseTool.TOOL_COST_KEY: "\(cost)", BaseTool.TOOL_NAME_KEY: tool.name()]) 46 | } 47 | } catch { 48 | 49 | } 50 | } 51 | 52 | public func name() -> String { 53 | "" 54 | } 55 | 56 | public func description() -> String { 57 | "" 58 | } 59 | 60 | public func _run(args: String) async throws -> String { 61 | "" 62 | } 63 | 64 | public func run(args: String) async throws -> String { 65 | let reqId = UUID().uuidString 66 | var cost = 0.0 67 | let now = Date.now.timeIntervalSince1970 68 | callStart(tool: self, input: args, reqId: reqId) 69 | let result = try await _run(args: args) 70 | cost = Date.now.timeIntervalSince1970 - now 71 | callEnd(tool: self, output: result, reqId: reqId, cost: cost) 72 | return result 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/Dummy.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/23. 6 | // 7 | 8 | import Foundation 9 | 10 | public class Dummy: BaseTool { 11 | public override init(callbacks: [BaseCallbackHandler] = []) { 12 | super.init(callbacks: callbacks) 13 | } 14 | public override func name() -> String { 15 | "dummy" 16 | } 17 | 18 | public override func description() -> String { 19 | "Useful for test." 20 | } 21 | 22 | public override func _run(args: String) async throws -> String { 23 | "Dummy test" 24 | } 25 | 26 | 27 | } 28 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/GetLocationTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/7. 6 | // 7 | 8 | import Foundation 9 | import CoreLocation 10 | // !! Add "Privacy - Location When In Use Usage Description" to Info.plist 11 | public class GetLocationTool: BaseTool, CLLocationManagerDelegate { 12 | 13 | let locationManager:CLLocationManager = CLLocationManager() 14 | var authorizationStatus: CLAuthorizationStatus? 15 | private var locationContinuation: CheckedContinuation? 16 | public override init(callbacks: [BaseCallbackHandler] = []) { 17 | super.init(callbacks: callbacks) 18 | // callback locationManagerDidChangeAuthorization 19 | locationManager.delegate = self 20 | } 21 | public override func name() -> String { 22 | "GetLocation" 23 | } 24 | 25 | public override func description() -> String { 26 | """ 27 | Tool of get current location. 28 | Input must be "here". 29 | Returns the current longitude and latitude, such as -78.4:38.5. 30 | """ 31 | } 32 | 33 | public override func _run(args: String) async throws -> String { 34 | 35 | locationManager.requestLocation() 36 | //wait 37 | return try await withCheckedThrowingContinuation { continuation in 38 | locationContinuation = continuation 39 | } 40 | 41 | } 42 | 43 | public func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) { 44 | let currLocation:CLLocation = locations.last! 45 | let longitude = currLocation.coordinate.longitude 46 | let latitude = currLocation.coordinate.latitude 47 | // signal 48 | locationContinuation?.resume(returning: "\(longitude):\(latitude)") 49 | } 50 | 51 | public func locationManagerDidChangeAuthorization(_ manager: CLLocationManager) { 52 | switch manager.authorizationStatus { 53 | case .authorizedWhenInUse: // Location services are available. 54 | // Insert code here of what should happen when Location services are authorized 55 | // authorizationStatus = .authorizedWhenInUse 56 | // locationManager.requestLocation() 57 | break 58 | 59 | case .restricted: // Location services currently unavailable. 60 | // Insert code here of what should happen when Location services are NOT authorized 61 | authorizationStatus = .restricted 62 | break 63 | 64 | case .denied: // Location services currently unavailable. 65 | // Insert code here of what should happen when Location services are NOT authorized 66 | authorizationStatus = .denied 67 | break 68 | 69 | case .notDetermined: // Authorization not determined yet. 70 | authorizationStatus = .notDetermined 71 | manager.requestWhenInUseAuthorization() 72 | break 73 | 74 | default: 75 | break 76 | } 77 | } 78 | 79 | public func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) { 80 | print("error: \(error.localizedDescription)") 81 | locationContinuation?.resume(throwing: error) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/InvalidTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/23. 6 | // 7 | 8 | import Foundation 9 | 10 | 11 | 12 | public class InvalidTool: BaseTool { 13 | let tool_name: String 14 | 15 | public init(tool_name: String) { 16 | self.tool_name = tool_name 17 | } 18 | 19 | public override func name() -> String { 20 | "invalid_tool" 21 | } 22 | 23 | public override func description() -> String { 24 | "Called when tool name is invalid." 25 | } 26 | 27 | public override func _run(args: String) async throws -> String { 28 | "\(tool_name) is not a valid tool, try another one." 29 | } 30 | 31 | 32 | } 33 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/JavascriptREPLTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/27. 6 | // 7 | 8 | import Foundation 9 | 10 | #if os(macOS) || os(iOS) || os(visionOS) 11 | import JavaScriptCore 12 | 13 | 14 | public class JavascriptREPLTool: BaseTool { 15 | var context: JSContext = JSContext() 16 | public override init(callbacks: [BaseCallbackHandler] = []) { 17 | super.init(callbacks: callbacks) 18 | } 19 | public override func name() -> String { 20 | "javascript_REPL" 21 | } 22 | 23 | public override func description() -> String { 24 | """ 25 | A javascript shell. Use this to execute javascript commands. 26 | Input should be a valid javascript command. 27 | If you want to see the output of a value, you should print it out 28 | with `console.log(...)`. 29 | """ 30 | } 31 | 32 | public override func _run(args: String) async throws -> String { 33 | let jsResult = context.evaluateScript(args) 34 | if jsResult != nil { 35 | return (jsResult?.toString())! 36 | } else { 37 | return "javascript eval error." 38 | } 39 | } 40 | 41 | 42 | } 43 | #endif 44 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/Serper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/26. 6 | // 7 | 8 | import Foundation 9 | public class Serper: BaseTool{ 10 | let client = GoogleSerperAPIWrapper() 11 | let gl: String 12 | let hl: String 13 | public init(gl: String = "us", hl: String = "en", callbacks: [BaseCallbackHandler] = []) { 14 | self.gl = gl 15 | self.hl = hl 16 | super.init(callbacks: callbacks) 17 | } 18 | public override func name() -> String { 19 | "Google Serper Results JSON" 20 | } 21 | 22 | public override func description() -> String { 23 | """ 24 | A low-cost Google Search API. 25 | Useful for when you need to answer questions about current events. 26 | Input should be a search query. Output is a JSON object of the query results 27 | """ 28 | } 29 | 30 | public override func _run(args: String) async throws -> String { 31 | let json = await client._google_serper_api_results(search_term: args, gl: self.gl, hl: self.hl) 32 | return json 33 | } 34 | 35 | 36 | } 37 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/TTSTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/10. 6 | // 7 | 8 | import Foundation 9 | import AVFoundation 10 | 11 | public class TTSTool: BaseTool { 12 | var audioPlayer: AVAudioPlayer? 13 | public override init(callbacks: [BaseCallbackHandler] = []) { 14 | super.init(callbacks: callbacks) 15 | } 16 | public override func name() -> String { 17 | "TTS" 18 | } 19 | 20 | public override func description() -> String { 21 | """ 22 | useful for convert text into sound and play it, returning the sound file path 23 | """ 24 | } 25 | 26 | public override func _run(args: String) async throws -> String { 27 | let env = LC.loadEnv() 28 | 29 | if let apiKey = env["OPENAI_API_KEY"] { 30 | let baseUrl = env["OPENAI_API_BASE"] ?? "api.openai.com" 31 | let data = await OpenAITTSAPIWrapper().tts(text: args, key: apiKey, base: baseUrl) 32 | let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 33 | 34 | guard let path = paths.first else { 35 | throw LangChainError.ToolError 36 | } 37 | 38 | let url = path.appendingPathComponent("tts-\(UUID().uuidString).mp3") 39 | do { 40 | try data?.write(to: url) 41 | audioPlayer = try AVAudioPlayer(contentsOf: url) 42 | audioPlayer?.play() 43 | return url.absoluteString 44 | } catch { 45 | throw LangChainError.ToolError 46 | } 47 | } else { 48 | throw LangChainError.ToolError 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /Sources/LangChain/tools/WeatherTool.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/23. 6 | // 7 | 8 | import Foundation 9 | //class WeatherTool(BaseTool): 10 | // name = "Weather" 11 | // description = "useful for When you want to know about the weather" 12 | // 13 | // def _run(self, query: str) -> str: 14 | // return "Sunny^_^" 15 | // 16 | // async def _arun(self, query: str) -> str: 17 | // """Use the tool asynchronously.""" 18 | // raise NotImplementedError("BingSearchRun does not support async") 19 | 20 | public class WeatherTool: BaseTool { 21 | 22 | public override init(callbacks: [BaseCallbackHandler] = []) { 23 | super.init(callbacks: callbacks) 24 | } 25 | public override func name() -> String { 26 | "Weather" 27 | } 28 | 29 | public override func description() -> String { 30 | """ 31 | useful for When you want to know about the weather 32 | Input must be longitude and latitude, such as -78.4:38.5. 33 | """ 34 | } 35 | 36 | public override func _run(args: String) async throws -> String { 37 | let env = LC.loadEnv() 38 | 39 | if let apiKey = env["OPENWEATHER_API_KEY"] { 40 | do { 41 | let client = OpenWeatherAPIWrapper() 42 | let weather = try await client.search(query: args, apiKey: apiKey) 43 | if let weather = weather { 44 | return weather 45 | } else { 46 | throw LangChainError.ToolError 47 | } 48 | } catch { 49 | throw LangChainError.ToolError 50 | } 51 | } else { 52 | print("Please set open weather api key.") 53 | throw LangChainError.ToolError 54 | } 55 | } 56 | 57 | 58 | } 59 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/GoogleSerperAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/26. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import NIOPosix 10 | 11 | struct GooglrRequest: Encodable { 12 | let k: Int 13 | let gl: String 14 | let hl: String 15 | let q: String 16 | } 17 | struct GoogleSerperAPIWrapper { 18 | // def _google_serper_api_results( 19 | // self, search_term: str, search_type: str = "search", **kwargs: Any 20 | // ) -> dict: 21 | // headers = { 22 | // "X-API-KEY": self.serper_api_key or "", 23 | // "Content-Type": "application/json", 24 | // } 25 | // params = { 26 | // "q": search_term, 27 | // **{key: value for key, value in kwargs.items() if value is not None}, 28 | // } 29 | // response = requests.post( 30 | // f"https://google.serper.dev/{search_type}", headers=headers, params=params 31 | // ) 32 | // response.raise_for_status() 33 | // search_results = response.json() 34 | // return search_results 35 | 36 | func _google_serper_api_results(search_term: String, search_type: String = "search", k: Int = 10, gl: String = "us", hl: String = "en") async -> String { 37 | let env = LC.loadEnv() 38 | let eventLoopGroup = ThreadManager.thread 39 | 40 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 41 | defer { 42 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 43 | try? httpClient.syncShutdown() 44 | } 45 | do { 46 | var request = HTTPClientRequest(url: "https://google.serper.dev/\(search_type)") 47 | request.method = .POST 48 | request.headers.add(name: "X-API-KEY", value: env["SERPER_API_KEY"]!) 49 | request.headers.add(name: "Content-Type", value: "application/json") 50 | let requestBody = try! JSONEncoder().encode(GooglrRequest(k: k, gl: gl, hl: hl, q: search_term)) 51 | request.body = .bytes(requestBody) 52 | 53 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 54 | if response.status == .ok { 55 | return String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 56 | } else { 57 | // handle remote error 58 | print("http code is not 200.") 59 | return "Bad requset." 60 | } 61 | } catch { 62 | // handle error 63 | print(error) 64 | return "Bad request." 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/HFInferenceApi.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/27. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import NIOPosix 10 | import SwiftyJSON 11 | 12 | struct InferenceRequest: Encodable { 13 | let options = ["wait_for_model": true] 14 | let inputs: String 15 | } 16 | 17 | struct HFInferenceApi { 18 | let repo: String 19 | let task: String 20 | 21 | func inference(text: String) async throws -> JSON { 22 | let env = LC.loadEnv() 23 | let eventLoopGroup = ThreadManager.thread 24 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 25 | defer { 26 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 27 | try? httpClient.syncShutdown() 28 | } 29 | var request = HTTPClientRequest(url: "https://api-inference.huggingface.co/pipeline/\(task)/\(repo)") 30 | request.method = .POST 31 | request.headers.add(name: "Authorization", value: "Bearer \(env["HF_API_KEY"]!)") 32 | request.headers.add(name: "Content-Type", value: "application/json") 33 | let requestBody = try! JSONEncoder().encode(InferenceRequest(inputs: text)) 34 | request.body = .bytes(requestBody) 35 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 36 | if response.status == .ok { 37 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 38 | return try JSON(data: str.data(using: .utf8)!) 39 | } else { 40 | // handle remote error 41 | print("http code is not 200.") 42 | return "Bad requset." 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/LC.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/11. 6 | // 7 | 8 | import Foundation 9 | public struct LC { 10 | // static var printTrace = true 11 | static var printTrace = false 12 | static let ID_KEY = "TRACE_ID" 13 | static let SKIP_TRACE_KEY = "SKIP_TRACE" 14 | static let TRACE_ID = UUID().uuidString + "-" + UUID().uuidString 15 | static var env: [String: String] = [:] 16 | static var trace = false 17 | public static func initSet(_ env: [String: String]) { 18 | LC.env = env 19 | if printTrace { 20 | if env[LC.ID_KEY] == nil && (env[LC.SKIP_TRACE_KEY] == nil || env[LC.SKIP_TRACE_KEY] == "false") { 21 | print("⚠️ [WARING]", "\(LC.ID_KEY) not found, Please enter '\(LC.ID_KEY)=\(LC.TRACE_ID)' to trace LLM or enter '\(LC.SKIP_TRACE_KEY)=true' to skip trace at env.txt .") 22 | 23 | } else { 24 | if env[LC.ID_KEY] != nil { 25 | print("✅ [INFO]", "Found trace id: \(env[LC.ID_KEY]!) .") 26 | trace = true 27 | } 28 | 29 | if env[LC.SKIP_TRACE_KEY] == "true" { 30 | print("✅ [INFO]", "Skip trace.") 31 | trace = false 32 | } 33 | } 34 | printTrace = false 35 | } 36 | } 37 | static func addTraceCallbak() -> Bool { 38 | return trace 39 | } 40 | 41 | static func loadEnv() -> [String: String] { 42 | LC.env 43 | } 44 | } 45 | 46 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/LlamaAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import SwiftyJSON 10 | import NIOPosix 11 | import OpenAIKit 12 | 13 | struct LlamaRequest: Encodable { 14 | let temperature: Double 15 | let max_tokens: Int 16 | let topP: Double 17 | let n: Int 18 | let stops: [String] 19 | let messages: [Chat.Message] 20 | } 21 | 22 | 23 | struct LlamaAPIWrapper { 24 | 25 | func execute(text: String, key: String, temperature: Double, max_tokens: Int, topP: Double, n: Int, stops: [String] = []) async -> String? { 26 | let eventLoopGroup = ThreadManager.thread 27 | 28 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 29 | defer { 30 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 31 | try? httpClient.syncShutdown() 32 | } 33 | do { 34 | var request = HTTPClientRequest(url: "https://api.llama-api.com/chat/completions") 35 | request.method = .POST 36 | request.headers.add(name: "Authorization", value: "Bearer \(key)") 37 | request.headers.add(name: "Content-Type", value: "application/json") 38 | let requestBody = try! JSONEncoder().encode(LlamaRequest(temperature: temperature, max_tokens: max_tokens, topP: topP, n: n, stops: stops, messages: [.user(content: text)])) 39 | request.body = .bytes(requestBody) 40 | 41 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 42 | if response.status == .ok { 43 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 44 | // print(str) 45 | let json = try JSON(data: str.data(using: .utf8)!) 46 | return json["choices"].arrayValue[0]["message"]["content"].stringValue 47 | } else { 48 | // handle remote error 49 | print("http code is not 200.") 50 | return nil 51 | } 52 | } catch { 53 | // handle error 54 | print(error.localizedDescription) 55 | return nil 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/OpenWeatherAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/8. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import SwiftyJSON 11 | import NIOPosix 12 | 13 | public struct OpenWeatherAPIWrapper { 14 | 15 | // { 16 | // "coord": { 17 | // "lon": 10.99, 18 | // "lat": 44.34 19 | // }, 20 | // "weather": [ 21 | // { 22 | // "id": 501, 23 | // "main": "Rain", 24 | // "description": "moderate rain", 25 | // "icon": "10d" 26 | // } 27 | // ], 28 | // "base": "stations", 29 | // "main": { 30 | // "temp": 298.48, 31 | // "feels_like": 298.74, 32 | // "temp_min": 297.56, 33 | // "temp_max": 300.05, 34 | // "pressure": 1015, 35 | // "humidity": 64, 36 | // "sea_level": 1015, 37 | // "grnd_level": 933 38 | // }, 39 | // "visibility": 10000, 40 | // "wind": { 41 | // "speed": 0.62, 42 | // "deg": 349, 43 | // "gust": 1.18 44 | // }, 45 | // "rain": { 46 | // "1h": 3.16 47 | // }, 48 | // "clouds": { 49 | // "all": 100 50 | // }, 51 | // "dt": 1661870592, 52 | // "sys": { 53 | // "type": 2, 54 | // "id": 2075663, 55 | // "country": "IT", 56 | // "sunrise": 1661834187, 57 | // "sunset": 1661882248 58 | // }, 59 | // "timezone": 7200, 60 | // "id": 3163858, 61 | // "name": "Zocca", 62 | // "cod": 200 63 | // } 64 | public init() { 65 | 66 | } 67 | func search(query: String, apiKey: String) async throws -> String? { 68 | let coord = query.split(separator: ":") 69 | if coord.count != 2 { 70 | return nil 71 | } 72 | let eventLoopGroup = ThreadManager.thread 73 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 74 | defer { 75 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 76 | try? httpClient.syncShutdown() 77 | } 78 | 79 | let baseURL = "https://api.openweathermap.org/data/2.5/weather" 80 | var components = URLComponents(string: baseURL)! 81 | components.queryItems = [ 82 | URLQueryItem(name: "lon", value: "\(coord[0])"), 83 | URLQueryItem(name: "lat", value: "\(coord[1])"), 84 | URLQueryItem(name: "appid", value: apiKey), 85 | URLQueryItem(name: "units", value: "metric"), 86 | ] 87 | // print(components.url!.absoluteString) 88 | var request = HTTPClientRequest(url: components.url!.absoluteString) 89 | request.method = .GET 90 | 91 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 92 | if response.status == .ok { 93 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 94 | // print(str) 95 | let json = try JSON(data: str.data(using: .utf8)!) 96 | 97 | return """ 98 | In \(json["coord"]["lon"].doubleValue):\(json["coord"]["lat"].doubleValue), the current weather is as follows:" 99 | \(json["weather"][0]["main"].stringValue) 100 | Detailed status: 101 | Wind speed: \(json["wind"]["speed"].doubleValue) m/s, direction: \(json["wind"]["deg"].doubleValue)°" 102 | Humidity: \(json["main"]["humidity"].doubleValue)% 103 | Temperature: 104 | - Current: \(json["main"]["temp"].doubleValue)°C 105 | - High: \(json["main"]["temp_max"].doubleValue)°C 106 | - Low: \(json["main"]["temp_min"].doubleValue)°C 107 | - Feels like: \(json["main"]["feels_like"].doubleValue)°C 108 | Rain: \(json["rain"]["1h"].doubleValue)% 109 | Cloud cover: \(json["clouds"]["all"].doubleValue)% 110 | """ 111 | } else { 112 | // handle remote error 113 | print("http code is not 200.") 114 | return nil 115 | } 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/ThreadManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/10. 6 | // 7 | 8 | import Foundation 9 | import NIOPosix 10 | import NIOCore 11 | 12 | struct ThreadManager { 13 | static let thread: MultiThreadedEventLoopGroup = MultiThreadedEventLoopGroup(numberOfThreads: System.coreCount) 14 | } 15 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/anotheropenai/OpenAITTSAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import SwiftyJSON 10 | import NIOPosix 11 | struct OpenAITTSRequest: Encodable { 12 | let model: String 13 | let input: String 14 | let voice: String 15 | let response_format: String 16 | let speed: String 17 | } 18 | enum Voice: String, Encodable { 19 | case alloy 20 | case echo 21 | case fable 22 | case onyx 23 | case nova 24 | case shimmer 25 | } 26 | enum TTSModel: String, Encodable{ 27 | case tts1hd = "tts-1-hd" 28 | case tts1 = "tts-1" 29 | } 30 | enum TTSFormat: String { 31 | case mp3 32 | case opus 33 | case aac 34 | case flac 35 | } 36 | struct OpenAITTSAPIWrapper { 37 | 38 | func tts(voice: Voice = .alloy, model: TTSModel = .tts1, format: TTSFormat = .mp3, speed: String = "1.0", text: String, key: String, base: String) async -> Data? { 39 | let eventLoopGroup = ThreadManager.thread 40 | 41 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 42 | defer { 43 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 44 | try? httpClient.syncShutdown() 45 | } 46 | do { 47 | var request = HTTPClientRequest(url: "https://\(base)/v1/audio/speech") 48 | request.method = .POST 49 | request.headers.add(name: "Authorization", value: "Bearer \(key)") 50 | request.headers.add(name: "Content-Type", value: "application/json") 51 | let requestBody = try! JSONEncoder().encode(OpenAITTSRequest(model: model.rawValue, input: text, voice: voice.rawValue, response_format: format.rawValue, speed: speed)) 52 | request.body = .bytes(requestBody) 53 | 54 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 55 | if response.status == .ok { 56 | return Data(buffer: try await response.body.collect(upTo: 1024 * 10240)) 57 | } else { 58 | // handle remote error 59 | print("http code is not 200.") 60 | return nil 61 | } 62 | } catch { 63 | // handle error 64 | print(error.localizedDescription) 65 | return nil 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/bilibili/BilibiliCredential.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/31. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct BilibiliCredential { 11 | let sessin: String 12 | let jct: String 13 | 14 | public init(sessin: String, jct: String) { 15 | self.sessin = sessin 16 | self.jct = jct 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/bilibili/BilibiliVideo.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/31. 6 | // 7 | 8 | import Foundation 9 | public struct BilibiliVideo { 10 | let title: String 11 | let desc: String 12 | let subtitle: String 13 | let thumbnail: String 14 | } 15 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/chatglm/ChatGLMAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/29. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | 12 | /// A ChatGLM message. 13 | /// 14 | /// This structure represents a message in a chat. 15 | public struct ChatGLMMessage: Codable, Sendable { 16 | /// The role of the entity sending the message, 17 | /// such as "user", "assistant", or "developer". 18 | public let role: String 19 | /// The content of the message. 20 | public let content: String 21 | } 22 | struct ChatGLMPayload: Codable { 23 | let prompt: [ChatGLMMessage] 24 | let temperature: Double 25 | } 26 | struct ChatGLMResponseDataUsage: Codable { 27 | let prompt_tokens: Int? 28 | let completion_tokens: Int? 29 | let total_tokens: Int 30 | } 31 | struct ChatGLMResponseData: Codable { 32 | let request_id: String 33 | let task_id: String 34 | let task_status: String 35 | let choices: [ChatGLMMessage] 36 | let usage: ChatGLMResponseDataUsage 37 | } 38 | struct ChatGLMResponse: Codable { 39 | let code: Int 40 | let msg: String 41 | let success: Bool 42 | let data: ChatGLMResponseData? 43 | } 44 | struct ChatGLMAPIWrapper { 45 | let model: ChatGLMModel 46 | let temperature: Double 47 | 48 | init(model: ChatGLMModel, temperature: Double) { 49 | self.model = model 50 | self.temperature = temperature 51 | } 52 | private func jwt(secret: String, id: String) -> String { 53 | let jwt = JWT(secret: secret) 54 | jwt.header = ["sign_type": "SIGN", "alg": "HS256"] 55 | jwt.payload = ["api_key": id, "timestamp": Int(Date.now.timeIntervalSince1970), "exp": Int(Date.now.timeIntervalSince1970) + 3600] 56 | return jwt.token! 57 | } 58 | func call(text: String) async throws -> String { 59 | let env = LC.loadEnv() 60 | if let apiKey = env["CHATGLM_API_KEY"] { 61 | let splited = apiKey.components(separatedBy: ".") 62 | let eventLoopGroup = ThreadManager.thread 63 | 64 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 65 | 66 | var request = HTTPClientRequest(url: String(format: "https://open.bigmodel.cn/api/paas/v3/model-api/%@/invoke",model.rawValue)) 67 | request.method = .POST 68 | request.headers.add(name: "Content-Type", value: "application/json") 69 | request.headers.add(name: "Authorization", value: "Bearer " + jwt(secret: splited[1], id: splited[0])) 70 | let requestBody = try! JSONEncoder().encode(ChatGLMPayload(prompt: [ChatGLMMessage(role: "user", content: text)], temperature: 0.8)) 71 | request.body = .bytes(requestBody) 72 | defer { 73 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 74 | try? httpClient.syncShutdown() 75 | } 76 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 77 | if response.status == .ok { 78 | let string = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 79 | let reps = try! JSONDecoder().decode(ChatGLMResponse.self, from: string.data(using: .utf8)!) 80 | if let data = reps.data{ 81 | return data.choices.first!.content 82 | } else { 83 | return reps.msg 84 | } 85 | } else { 86 | // handle remote error 87 | print("http code is not 200.") 88 | return "Bad requset." 89 | } 90 | } else { 91 | print("Please set chatglm api key.") 92 | return "Please set chatglm api key." 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/chatglm/ChatGLMModel.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/30. 6 | // 7 | 8 | import Foundation 9 | public enum ChatGLMModel: String { 10 | case chatglm_pro 11 | case chatglm_std 12 | case chatglm_lite 13 | } 14 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/dalle/DalleImage.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/8/25. 6 | // 7 | 8 | import Foundation 9 | import OpenAIKit 10 | 11 | public struct DalleImage { 12 | } 13 | 14 | extension DalleImage: Decodable {} 15 | 16 | extension DalleImage { 17 | public enum Size: String { 18 | case twoFiftySix = "256x256" 19 | case fiveTwelve = "512x512" 20 | case tenTwentyFour = "1024x1024" 21 | } 22 | } 23 | 24 | extension DalleImage.Size: Codable {} 25 | 26 | func dalleTo(size: DalleImage.Size) -> Image.Size { 27 | switch size { 28 | case .fiveTwelve: 29 | return Image.Size.fiveTwelve 30 | case .tenTwentyFour: 31 | return Image.Size.tenTwentyFour 32 | case .twoFiftySix: 33 | return Image.Size.twoFiftySix 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/jwt/Cryptor.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import CommonCrypto 3 | 4 | class Cryptor { 5 | 6 | static func hmac(string: String, algorithm: Algorithm, key: String) -> String? { 7 | guard let key = key.data(using: .utf8) else { return nil } 8 | guard let string = string.data(using: .utf8) else { return nil } 9 | 10 | let context = UnsafeMutablePointer.allocate(capacity: 1) 11 | defer { context.deallocate() } 12 | 13 | key.withUnsafeBytes { (buffer: UnsafePointer) in 14 | CCHmacInit(context, algorithm.HMACAlgorithm, buffer, size_t(key.count)) 15 | } 16 | 17 | string.withUnsafeBytes { (buffer: UnsafePointer) in 18 | CCHmacUpdate(context, buffer, size_t(string.count)) 19 | } 20 | 21 | var hmac = Array(repeating: 0, count: Int(algorithm.digestLength)) 22 | CCHmacFinal(context, &hmac) 23 | 24 | return Data(hmac).base64URLEncodedString() 25 | } 26 | 27 | enum Algorithm { 28 | case MD5, SHA1, SHA224, SHA256, SHA384, SHA512 29 | 30 | var HMACAlgorithm: CCHmacAlgorithm { 31 | var result: Int = 0 32 | switch self { 33 | case .MD5: result = kCCHmacAlgMD5 34 | case .SHA1: result = kCCHmacAlgSHA1 35 | case .SHA224: result = kCCHmacAlgSHA224 36 | case .SHA256: result = kCCHmacAlgSHA256 37 | case .SHA384: result = kCCHmacAlgSHA384 38 | case .SHA512: result = kCCHmacAlgSHA512 39 | } 40 | return CCHmacAlgorithm(result) 41 | } 42 | 43 | var digestLength: Int { 44 | var result: Int32 = 0 45 | switch self { 46 | case .MD5: result = CC_MD5_DIGEST_LENGTH 47 | case .SHA1: result = CC_SHA1_DIGEST_LENGTH 48 | case .SHA224: result = CC_SHA224_DIGEST_LENGTH 49 | case .SHA256: result = CC_SHA256_DIGEST_LENGTH 50 | case .SHA384: result = CC_SHA384_DIGEST_LENGTH 51 | case .SHA512: result = CC_SHA512_DIGEST_LENGTH 52 | } 53 | return Int(result) 54 | } 55 | } 56 | } 57 | 58 | 59 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/jwt/Extensions/String.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | /// Extension for making base64 representations of `Data` safe for 3 | /// transmitting via URL query parameters 4 | extension Data { 5 | 6 | /// Instantiates data by decoding a base64url string into base64 7 | /// 8 | /// - Parameter string: A base64url encoded string 9 | init?(base64URLEncoded string: String) { 10 | self.init(base64Encoded: string.toggleBase64URLSafe(on: false)) 11 | } 12 | 13 | /// Encodes the string into a base64url safe representation 14 | /// 15 | /// - Returns: A string that is base64 encoded but made safe for passing 16 | /// in as a query parameter into a URL string 17 | func base64URLEncodedString() -> String { 18 | return self.base64EncodedString().toggleBase64URLSafe(on: true) 19 | } 20 | 21 | } 22 | 23 | extension String { 24 | 25 | var base64String: String? { 26 | return data(using: .utf8)?.base64EncodedString() 27 | } 28 | 29 | var base64UrlString: String? { 30 | return data(using: .utf8)?.base64URLEncodedString() 31 | } 32 | 33 | /// Encodes or decodes into a base64url safe representation 34 | /// 35 | /// - Parameter on: Whether or not the string should be made safe for URL strings 36 | /// - Returns: if `on`, then a base64url string; if `off` then a base64 string 37 | func toggleBase64URLSafe(on: Bool) -> String { 38 | if on { 39 | // Make base64 string safe for passing into URL query params 40 | let base64url = self.replacingOccurrences(of: "/", with: "_") 41 | .replacingOccurrences(of: "+", with: "-") 42 | .replacingOccurrences(of: "=", with: "") 43 | return base64url 44 | } else { 45 | // Return to base64 encoding 46 | var base64 = self.replacingOccurrences(of: "_", with: "/") 47 | .replacingOccurrences(of: "-", with: "+") 48 | // Add any necessary padding with `=` 49 | if base64.count % 4 != 0 { 50 | base64.append(String(repeating: "=", count: 4 - base64.count % 4)) 51 | } 52 | return base64 53 | } 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/jwt/JWT.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | 3 | public class JWT { 4 | 5 | public init(alg: JWT.Algorithm = .HS256, secret: String = "") { 6 | self.alg = alg 7 | self.header["alg"] = alg.rawValue 8 | self.header["typ"] = "JWT" 9 | self.secret = secret 10 | } 11 | 12 | public enum Algorithm: String { 13 | case HS256 14 | case HS384 15 | case HS512 16 | 17 | var forCryptor: Cryptor.Algorithm { 18 | switch self { 19 | case .HS256: return .SHA256 20 | case .HS384: return .SHA384 21 | case .HS512: return .SHA512 22 | } 23 | } 24 | } 25 | var alg: Algorithm 26 | 27 | public var header: [String: String] = [:] 28 | public var payload: [String: Any] = [:] 29 | public var secret: String 30 | 31 | public var subject: String? { return payload["sub"] as? String } 32 | public var identifier: String? { return payload["jti"] as? String } 33 | public var issuer: String? { return payload["iss"] as? String } 34 | 35 | public var notValidBefore: Date? { 36 | if let interval = payload["nbf"] as? TimeInterval { 37 | return Date(timeIntervalSince1970: interval) 38 | } 39 | return nil 40 | } 41 | public var issuedAt: Date? { 42 | if let interval = payload["iat"] as? TimeInterval { 43 | return Date(timeIntervalSince1970: interval) 44 | } 45 | return nil 46 | } 47 | public var expiresAt: Date? { 48 | if let interval = payload["exp"] as? TimeInterval { 49 | return Date(timeIntervalSince1970: interval) 50 | } 51 | return nil 52 | } 53 | 54 | public var isExpired: Bool? { 55 | guard let expireDate = expiresAt else { return nil } 56 | return expireDate.compare(Date()) != .orderedDescending ? true : false} 57 | 58 | public var token: String? { 59 | do { 60 | let headerString = try JSONSerialization.data(withJSONObject: header, options: []).base64URLEncodedString() 61 | let payloadString = try JSONSerialization.data(withJSONObject: payload, options: []).base64URLEncodedString() 62 | 63 | let rawSign = "\(headerString).\(payloadString)" 64 | 65 | if let sign = Cryptor.hmac(string: rawSign, algorithm: alg.forCryptor, key: secret) { 66 | return "\(rawSign).\(sign)" 67 | } else { 68 | print("JWT: Can't compute sign.") 69 | return nil 70 | } 71 | } catch { 72 | print(error.localizedDescription) 73 | return nil 74 | } 75 | } 76 | } 77 | 78 | //MARK: - Token decoding 79 | extension JWT { 80 | public convenience init?(token: String) { 81 | let elements = token.split(separator: ".").map({String($0)}) 82 | guard 83 | elements.count == 3 else { 84 | print("JWT: Wrong format!") 85 | return nil 86 | } 87 | 88 | guard let headerData = Data(base64URLEncoded: elements[0]), 89 | let payloadData = Data(base64URLEncoded: elements[1]) else { 90 | print("JWT: Wrong format!") 91 | print("Failed to parse header/payload.") 92 | return nil 93 | } 94 | 95 | do { 96 | guard let header = try JSONSerialization.jsonObject(with: headerData, options: []) as? [String: String], 97 | let payload = try JSONSerialization.jsonObject(with: payloadData, options: []) as? [String: Any] 98 | else { 99 | print("JWT: Failed to parse header/payload.") 100 | return nil 101 | } 102 | 103 | guard let algString = header["alg"] else { 104 | print("JWT: Can't define algorithm.") 105 | return nil 106 | } 107 | guard let alg = Algorithm(rawValue: algString) else { 108 | print("JWT: Alghoritm doesn't support.") 109 | return nil 110 | } 111 | 112 | self.init(alg: alg, secret: "") 113 | self.header = header 114 | self.payload = payload 115 | 116 | } catch { 117 | print(error.localizedDescription) 118 | return nil 119 | } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/pubmed/PubmedAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import SwiftyJSON 10 | import NIOPosix 11 | 12 | struct PubmedAPIWrapper { 13 | func search(query: String) async throws -> [PubmedPage] { 14 | let eventLoopGroup = ThreadManager.thread 15 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 16 | defer { 17 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 18 | try? httpClient.syncShutdown() 19 | } 20 | 21 | let baseURL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi" 22 | var components = URLComponents(string: baseURL)! 23 | components.queryItems = [ 24 | URLQueryItem(name: "db", value: "pubmed"), 25 | URLQueryItem(name: "retmode", value: "json"), 26 | URLQueryItem(name: "term", value: query), 27 | URLQueryItem(name: "retmax", value: "5"), 28 | URLQueryItem(name: "usehistory", value: "y"), 29 | ] 30 | print(components.url!.absoluteString) 31 | var request = HTTPClientRequest(url: components.url!.absoluteString) 32 | request.method = .GET 33 | 34 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 35 | if response.status == .ok { 36 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 37 | // print(str) 38 | let json = try JSON(data: str.data(using: .utf8)!) 39 | var pubmeds: [PubmedPage] = [] 40 | let webenv = json["esearchresult"]["webenv"].stringValue 41 | let searchResults = json["esearchresult"]["idlist"].arrayValue 42 | 43 | for uid in searchResults { 44 | pubmeds.append(PubmedPage(uid: uid.stringValue, webenv: webenv)) 45 | } 46 | return pubmeds 47 | } else { 48 | // handle remote error 49 | print("http code is not 200.") 50 | return [] 51 | } 52 | } 53 | 54 | func load(query: String) async throws -> [Document] { 55 | let pages = try await self.search(query: query) 56 | var docs: [Document] = [] 57 | for page in pages { 58 | let content = try await page.content() 59 | docs.append(Document(page_content: content, metadata: [:])) 60 | } 61 | return docs 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/pubmed/PubmedPage.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/3. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import SWXMLHash 11 | import NIOPosix 12 | 13 | struct PubmedPage { 14 | let uid: String 15 | let webenv: String 16 | 17 | func content() async throws -> String { 18 | let eventLoopGroup = ThreadManager.thread 19 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 20 | defer { 21 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 22 | try? httpClient.syncShutdown() 23 | } 24 | 25 | let baseURL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi" 26 | var components = URLComponents(string: baseURL)! 27 | components.queryItems = [ 28 | URLQueryItem(name: "db", value: "pubmed"), 29 | URLQueryItem(name: "retmode", value: "xml"), 30 | URLQueryItem(name: "id", value: self.uid), 31 | URLQueryItem(name: "webenv", value: self.webenv), 32 | ] 33 | print(components.url!.absoluteString) 34 | var request = HTTPClientRequest(url: components.url!.absoluteString) 35 | request.method = .GET 36 | 37 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 38 | if response.status == .ok { 39 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 40 | let xml = XMLHash.parse(str.data(using: .utf8)!) 41 | var ar = xml["PubmedArticleSet"]["PubmedArticle"]["MedlineCitation"][ 42 | "Article" 43 | ] 44 | if ar.element == nil { 45 | ar = xml["PubmedArticleSet"]["PubmedBookArticle"]["BookDocument"] 46 | } 47 | let summarys = ar["Abstract"]["AbstractText"].all 48 | let summarysStr = summarys.map{$0.element?.text} 49 | if !summarysStr.isEmpty && summarysStr.first != nil { 50 | return summarysStr.map{$0!}.joined(separator: "\n") 51 | } else { 52 | return "" 53 | } 54 | } else { 55 | // handle remote error 56 | print("http code is not 200.") 57 | return "" 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/report/ReportKey.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/13. 6 | // 7 | 8 | import Foundation 9 | 10 | struct ReportKey { 11 | static let STEP_START_KEY = "start" 12 | static let STEP_END_KEY = "end" 13 | static let STEP_ERROR_KEY = "error" 14 | 15 | static let TRUE = "true" 16 | static let FALSE = "false" 17 | } 18 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/report/TraceManager.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/9/11. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | 12 | struct Report: Codable { 13 | let appDisplayName: String? 14 | let reportId: String 15 | let type: String 16 | let message: String 17 | let metadata: [String: String] 18 | let createAt: Date 19 | } 20 | 21 | struct TraceManager { 22 | // var reports: [Report] = [] 23 | static let REPORT_URL = "http://192.168.31.60:8083/rest/agent" 24 | static var shared: TraceManager = TraceManager() 25 | 26 | mutating func insertReport(report: Report) async { 27 | // reports.append(report) 28 | // TODO: end or error - start time, remove start entry at memery 29 | await sendServer(report: report) 30 | } 31 | 32 | func sendServer(report: Report) async { 33 | // TODO: Http keep alive 34 | let eventLoopGroup = ThreadManager.thread 35 | 36 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 37 | defer { 38 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 39 | try? httpClient.syncShutdown() 40 | } 41 | do { 42 | var request = HTTPClientRequest(url: TraceManager.REPORT_URL) 43 | request.method = .POST 44 | request.headers.add(name: "Content-Type", value: "application/json") 45 | let requestBody = try! JSONEncoder().encode(report) 46 | request.body = .bytes(requestBody) 47 | 48 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 49 | if response.status == .ok { 50 | let _ = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 51 | } else { 52 | // handle remote error 53 | print("http code is not 200.") 54 | } 55 | } catch { 56 | // handle error 57 | print(error) 58 | } 59 | } 60 | } 61 | 62 | extension Bundle { 63 | var appDisplayName: String? { 64 | return infoDictionary?["CFBundleExecutable"] as? String 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/wikipedia/WikipediaAPIWrapper.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/2. 6 | // 7 | import AsyncHTTPClient 8 | import Foundation 9 | import SwiftyJSON 10 | import NIOPosix 11 | 12 | struct WikipediaAPIWrapper { 13 | func search(query: String) async throws -> [WikipediaPage] { 14 | let eventLoopGroup = ThreadManager.thread 15 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 16 | defer { 17 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 18 | try? httpClient.syncShutdown() 19 | } 20 | 21 | let baseURL = "http://en.wikipedia.org/w/api.php" 22 | var components = URLComponents(string: baseURL)! 23 | components.queryItems = [ 24 | URLQueryItem(name: "srlimit", value: "3"), 25 | URLQueryItem(name: "list", value: "search"), 26 | URLQueryItem(name: "srsearch", value: query), 27 | URLQueryItem(name: "action", value: "query"), 28 | URLQueryItem(name: "format", value: "json"), 29 | ] 30 | // print(components.url!.absoluteString) 31 | var request = HTTPClientRequest(url: components.url!.absoluteString) 32 | request.method = .GET 33 | 34 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 35 | if response.status == .ok { 36 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 37 | // print(str) 38 | let json = try JSON(data: str.data(using: .utf8)!) 39 | var wikis: [WikipediaPage] = [] 40 | let searchResults = json["query"]["search"].arrayValue 41 | 42 | for wiki in searchResults { 43 | wikis.append(WikipediaPage(title: wiki["title"].stringValue, pageid: wiki["pageid"].intValue)) 44 | } 45 | return wikis 46 | } else { 47 | // handle remote error 48 | print("http code is not 200.") 49 | return [] 50 | } 51 | } 52 | 53 | func load(query: String) async throws -> [Document] { 54 | let pages = try await self.search(query: query) 55 | var docs: [Document] = [] 56 | for page in pages { 57 | let content = try await page.content() 58 | docs.append(Document(page_content: content, metadata: [:])) 59 | } 60 | return docs 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/wikipedia/WikipediaPage.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/3. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import SwiftyJSON 11 | import NIOPosix 12 | 13 | struct WikipediaPage { 14 | let title: String 15 | let pageid: Int 16 | 17 | func content() async throws -> String { 18 | let eventLoopGroup = ThreadManager.thread 19 | let httpClient = HTTPClient(eventLoopGroupProvider: .shared(eventLoopGroup)) 20 | defer { 21 | // it's important to shutdown the httpClient after all requests are done, even if one failed. See: https://github.com/swift-server/async-http-client 22 | try? httpClient.syncShutdown() 23 | } 24 | 25 | let baseURL = "http://en.wikipedia.org/w/api.php" 26 | var components = URLComponents(string: baseURL)! 27 | components.queryItems = [ 28 | URLQueryItem(name: "prop", value: "extracts|revisions"), 29 | URLQueryItem(name: "rvprop", value: "ids"), 30 | URLQueryItem(name: "titles", value: self.title), 31 | URLQueryItem(name: "action", value: "query"), 32 | URLQueryItem(name: "format", value: "json"), 33 | ] 34 | // print(components.url!.absoluteString) 35 | var request = HTTPClientRequest(url: components.url!.absoluteString) 36 | request.method = .GET 37 | 38 | let response = try await httpClient.execute(request, timeout: .seconds(30)) 39 | if response.status == .ok { 40 | let str = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 41 | // print(str) 42 | let json = try JSON(data: str.data(using: .utf8)!) 43 | return json["query"]["pages"]["\(self.pageid)"]["extract"].stringValue 44 | 45 | } else { 46 | // handle remote error 47 | print("http code is not 200.") 48 | return "" 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/youtube/Transcript.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/30. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | import SWXMLHash 12 | 13 | struct Transcript { 14 | let http_client: HTTPClient 15 | let video_id: String 16 | let url: String 17 | let language: String 18 | let language_code: String 19 | let is_generated: Bool 20 | let translation_languages: [[String: String]] 21 | var translation_languages_dict: [String: String] 22 | init(http_client: HTTPClient, video_id: String, url: String, language: String, language_code: String, is_generated: Bool, translation_languages: [[String : String]]) { 23 | self.http_client = http_client 24 | self.video_id = video_id 25 | self.url = url 26 | self.language = language 27 | self.language_code = language_code 28 | self.is_generated = is_generated 29 | self.translation_languages = translation_languages 30 | // self._translation_languages_dict = { 31 | // translation_language['language_code']: translation_language['language'] 32 | // for translation_language in translation_languages 33 | // } 34 | self.translation_languages_dict = [:] 35 | for t in self.translation_languages { 36 | self.translation_languages_dict[t["language_code"]!] = t["language"] 37 | } 38 | } 39 | func translate(language_code: String) -> Transcript { 40 | // for i in translation_languages_dict { 41 | // print(i.key) 42 | // print(i.value) 43 | // } 44 | return Transcript( 45 | http_client: self.http_client, 46 | video_id: self.video_id, 47 | url: String(format: "%@&tlang=%@", self.url, language_code), 48 | language: language_code, 49 | // language: self.translation_languages_dict[language_code]!,//self._translation_languages_dict[language_code], 50 | language_code: language_code, 51 | is_generated: true, 52 | translation_languages: [] 53 | ) 54 | } 55 | 56 | func fetch() async -> [[String: String]]? { 57 | do { 58 | var request = HTTPClientRequest(url: self.url) 59 | request.method = .GET 60 | request.headers.add(name: "Accept-Language", value: "en-US") 61 | 62 | let response = try await http_client.execute(request, timeout: .seconds(30)) 63 | if response.status == .ok { 64 | let plain = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 65 | return _TranscriptParser().parse(plain_data: plain) 66 | } else { 67 | // handle remote error 68 | print("get transcript http code is not 200.") 69 | return nil 70 | } 71 | } catch { 72 | // handle error 73 | print(error) 74 | return nil 75 | } 76 | } 77 | } 78 | 79 | 80 | struct _TranscriptParser { 81 | func parse(plain_data: String) -> [[String: String]] { 82 | let xml = XMLHash.parse(plain_data) 83 | let textArray = xml["transcript"]["text"] 84 | var texts: [[String: String]] = [] 85 | for text in textArray.all { 86 | let start = text.element!.attribute(by: "start")!.text 87 | let dur = text.element!.attribute(by: "dur")!.text 88 | let t = text.element!.text 89 | texts.append([ 90 | "start": start, 91 | "dur": dur, 92 | "text": t, 93 | ]) 94 | 95 | } 96 | return texts 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/youtube/TranscriptListFetcher.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/30. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | import SwiftyJSON 12 | 13 | let WATCH_URL = "https://www.youtube.com/watch?v=%@" 14 | 15 | struct TranscriptListFetcher { 16 | let http_client: HTTPClient 17 | 18 | init(http_client: HTTPClient) { 19 | self.http_client = http_client 20 | } 21 | 22 | func fetch(video_id: String) async -> TranscriptList? { 23 | return await TranscriptList.build(http_client: 24 | self.http_client, video_id: video_id, captions_json: self._extract_captions_json(html: self._fetch_video_html(video_id: video_id), video_id: video_id) 25 | ) 26 | } 27 | 28 | func _extract_captions_json(html: String, video_id: String) async -> JSON? { 29 | let splitted_html = html.components(separatedBy: "\"captions\":") 30 | if splitted_html.count != 2 { 31 | return nil 32 | } 33 | let details = splitted_html[1].components(separatedBy: ",\"videoDetails") 34 | let _2 = details[0].replacingOccurrences(of: "\n", with: "") 35 | // print(_2) 36 | let json = try! JSON(data: 37 | _2.data(using: .utf8)! 38 | ) 39 | let captions_json = json["playerCaptionsTracklistRenderer"] 40 | return captions_json 41 | } 42 | // def _extract_captions_json(self, html, video_id): 43 | // splitted_html = html.split('"captions":') 44 | // 45 | // if len(splitted_html) <= 1: 46 | // if video_id.startswith('http://') or video_id.startswith('https://'): 47 | // raise InvalidVideoId(video_id) 48 | // if 'class="g-recaptcha"' in html: 49 | // raise TooManyRequests(video_id) 50 | // if '"playabilityStatus":' not in html: 51 | // raise VideoUnavailable(video_id) 52 | // 53 | // raise TranscriptsDisabled(video_id) 54 | // 55 | // captions_json = json.loads( 56 | // splitted_html[1].split(',"videoDetails')[0].replace('\n', '') 57 | // ).get('playerCaptionsTracklistRenderer') 58 | // if captions_json is None: 59 | // raise TranscriptsDisabled(video_id) 60 | // 61 | // if 'captionTracks' not in captions_json: 62 | // raise NoTranscriptAvailable(video_id) 63 | // 64 | // return captions_json 65 | 66 | // def _fetch_video_html(self, video_id): 67 | // html = self._fetch_html(video_id) 68 | // if 'action="https://consent.youtube.com/s"' in html: 69 | // self._create_consent_cookie(html, video_id) 70 | // html = self._fetch_html(video_id) 71 | // if 'action="https://consent.youtube.com/s"' in html: 72 | // raise FailedToCreateConsentCookie(video_id) 73 | // return html 74 | // 75 | // def _fetch_html(self, video_id): 76 | // response = self._http_client.get(WATCH_URL.format(video_id=video_id), headers={'Accept-Language': 'en-US'}) 77 | // return unescape(_raise_http_errors(response, video_id).text) 78 | func _fetch_video_html(video_id: String) async -> String { 79 | let html = await self._fetch_html(video_id: video_id) 80 | return html 81 | } 82 | 83 | func _fetch_html(video_id: String) async -> String { 84 | do { 85 | var request = HTTPClientRequest(url: String(format: WATCH_URL, video_id)) 86 | request.method = .GET 87 | request.headers.add(name: "Accept-Language", value: "en-US") 88 | 89 | let response = try await http_client.execute(request, timeout: .seconds(30)) 90 | if response.status == .ok { 91 | return String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 92 | } else { 93 | // handle remote error 94 | print("get list http code is not 200.\(response.body)") 95 | return "Bad requset." 96 | } 97 | } catch { 98 | // handle error 99 | print(error) 100 | return "Bad request." 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/youtube/YoutubeHackClient.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/29. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | 12 | public struct YoutubeHackClient { 13 | 14 | public static func list_transcripts(video_id: String, httpClient: HTTPClient) async -> TranscriptList? { 15 | return await TranscriptListFetcher(http_client: httpClient).fetch(video_id: video_id) 16 | } 17 | 18 | public static func info(video_id: String, httpClient: HTTPClient) async -> YoutubeInfo? { 19 | return await YoutubeInfoFetcher().fetch(http_client: httpClient, video_id: video_id) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /Sources/LangChain/utilities/youtube/YoutubeInfo.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/7/4. 6 | // 7 | 8 | import AsyncHTTPClient 9 | import Foundation 10 | import NIOPosix 11 | import SwiftyJSON 12 | 13 | struct YoutubeInfoFetcher { 14 | func fetch(http_client: HTTPClient, video_id: String) async -> YoutubeInfo? { 15 | let url = "https://www.youtube.com/youtubei/v1/player?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8" 16 | 17 | let requestBody = YoutubeInfoRequest(videoId: video_id, context: YoutubeInfoRequestContext(client: YoutubeInfoRequestContextClient(clientName: "WEB", clientVersion: "2.20210721.00.00"))) 18 | do { 19 | var request = HTTPClientRequest(url: url) 20 | request.method = .POST 21 | request.headers.add(name: "Content-Type", value: "application/json") 22 | request.body = .bytes(try! JSONEncoder().encode(requestBody)) 23 | 24 | let response = try await http_client.execute(request, timeout: .seconds(30)) 25 | if response.status == .ok { 26 | let plain = String(buffer: try await response.body.collect(upTo: 1024 * 1024)) 27 | return YoutubeInfoParse().parse(plain_data: plain) 28 | } else { 29 | // handle remote error 30 | print("get video info http code is not 200.") 31 | return nil 32 | } 33 | } catch { 34 | // handle error 35 | print(error) 36 | return nil 37 | } 38 | } 39 | 40 | } 41 | struct YoutubeInfoRequestContextClient: Encodable { 42 | let clientName: String 43 | let clientVersion: String 44 | } 45 | struct YoutubeInfoRequestContext: Encodable { 46 | let client: YoutubeInfoRequestContextClient 47 | } 48 | struct YoutubeInfoRequest: Encodable { 49 | let videoId: String 50 | let context: YoutubeInfoRequestContext 51 | } 52 | 53 | public struct YoutubeInfo { 54 | public let title: String 55 | public let description: String 56 | public let thumbnail: String 57 | } 58 | 59 | struct YoutubeInfoParse { 60 | func parse(plain_data: String) -> YoutubeInfo { 61 | let tag = "videoDetails" 62 | let json = try! JSON(data: 63 | plain_data.data(using: .utf8)! 64 | ) 65 | let detail = json[tag] 66 | var url = "" 67 | if detail["thumbnail"]["thumbnails"].count >= 4 { 68 | url = detail["thumbnail"]["thumbnails"][3]["url"].stringValue 69 | } else { 70 | url = detail["thumbnail"]["thumbnails"][0]["url"].stringValue 71 | } 72 | return YoutubeInfo(title: detail["title"].stringValue, description: detail["shortDescription"].stringValue, thumbnail: url) 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /Sources/LangChain/vectorstores/SimilaritySearchKit.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/11/18. 6 | // 7 | 8 | import Foundation 9 | 10 | #if os(macOS) || os(iOS) || os(visionOS) 11 | import SimilaritySearchKit 12 | import CryptoKit 13 | 14 | private struct LangChainEmbeddingBridge: EmbeddingsProtocol { 15 | 16 | var tokenizer: _T? 17 | 18 | var model: _M? 19 | 20 | class _M { 21 | 22 | } 23 | class _T: TokenizerProtocol { 24 | func tokenize(text: String) -> [String] { 25 | [] 26 | } 27 | 28 | func detokenize(tokens: [String]) -> String { 29 | "" 30 | } 31 | 32 | 33 | } 34 | let embeddings: Embeddings 35 | func encode(sentence: String) async -> [Float]? { 36 | let e = await embeddings.embedQuery(text: sentence) 37 | if e.isEmpty { 38 | print("⚠️\(sentence.prefix(100))") 39 | } 40 | return e 41 | } 42 | 43 | 44 | } 45 | public class SimilaritySearchKit: VectorStore { 46 | let vs: SimilarityIndex 47 | 48 | public init(embeddings: Embeddings, autoLoad: Bool = false) { 49 | self.vs = SimilarityIndex( 50 | model: LangChainEmbeddingBridge(embeddings: embeddings), 51 | metric: DotProduct() 52 | ) 53 | if #available(macOS 13.0, *) { 54 | if #available(iOS 16.0, *) { 55 | if autoLoad { 56 | let _ = try? vs.loadIndex() 57 | } else { 58 | // Fallback on earlier versions 59 | } 60 | } 61 | } else { 62 | // Fallback on earlier versions 63 | } 64 | } 65 | 66 | override func similaritySearch(query: String, k: Int) async -> [MatchedModel] { 67 | await vs.search(query, top: k).map{MatchedModel(content: $0.text, similarity: $0.score, metadata: $0.metadata)} 68 | } 69 | 70 | override func addText(text: String, metadata: [String: String]) async { 71 | await vs.addItem(id: sha256(str: text), text: text, metadata: metadata) 72 | } 73 | 74 | @available(iOS 16.0, *) 75 | @available(macOS 13.0, *) 76 | public func writeToFile() { 77 | let _ = try? vs.saveIndex() 78 | } 79 | 80 | override func removeText(sha256: String) async { 81 | vs.removeItem(id: sha256) 82 | } 83 | 84 | func sha256(str: String) -> String { 85 | let data = Data(str.utf8) 86 | let hash = SHA256.hash(data: data) 87 | return hash.compactMap { String(format: "%02x", $0) }.joined() 88 | } 89 | } 90 | #endif 91 | -------------------------------------------------------------------------------- /Sources/LangChain/vectorstores/Supabase.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/12. 6 | // 7 | 8 | import Foundation 9 | import Supabase 10 | 11 | struct SearchVectorParams: Codable { 12 | let query_embedding: [Float] 13 | let match_count: Int 14 | } 15 | struct DocModel: Encodable, Decodable { 16 | let content: String? 17 | let embedding: [Float] 18 | let metadata: [String: String] 19 | } 20 | 21 | public class Supabase: VectorStore { 22 | let client: SupabaseClient 23 | let embeddings: Embeddings 24 | public init(embeddings: Embeddings) { 25 | self.embeddings = embeddings 26 | let env = LC.loadEnv() 27 | client = SupabaseClient(supabaseURL: URL(string: env["SUPABASE_URL"]!)!, supabaseKey: env["SUPABASE_KEY"]!) 28 | } 29 | 30 | public override func similaritySearch(query: String, k: Int) async -> [MatchedModel] { 31 | let params = SearchVectorParams(query_embedding: await embeddings.embedQuery(text: query), match_count: k) 32 | let rpcQuery = client.database.rpc(fn: "match_documents", params: params) 33 | 34 | do { 35 | let response: [MatchedModel] = try await rpcQuery.execute().value // Where DataModel is the model of the data returned by the function 36 | // print("### RPC Returned: \(response.first!.content!)") 37 | return response 38 | } catch { 39 | print("### RPC Error: \(error)") 40 | return [] 41 | } 42 | 43 | } 44 | 45 | public override func addText(text: String, metadata: [String: String]) async { 46 | let embedding = await embeddings.embedQuery(text: text) 47 | let insertData = DocModel(content: text, embedding: embedding, metadata: metadata) 48 | let query = client.database 49 | .from("documents") 50 | .insert(values: insertData, 51 | returning: .representation) // you will need to add this to return the added data 52 | // .select(columns: "id") // specifiy which column names to be returned. Leave it empty for all columns 53 | .single() // specify you want to return a single value. 54 | 55 | do { 56 | let _: String = try await query.execute().value 57 | // print("### Save Returned: \(response)") 58 | } catch { 59 | print("### Insert Error: \(error)") 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /Sources/LangChain/vectorstores/VectorStore.swift: -------------------------------------------------------------------------------- 1 | // 2 | // File.swift 3 | // 4 | // 5 | // Created by 顾艳华 on 2023/6/14. 6 | // 7 | 8 | import Foundation 9 | 10 | public struct MatchedModel: Encodable, Decodable { 11 | let content: String? 12 | let similarity: Float 13 | let metadata: [String: String] 14 | } 15 | public class VectorStore { 16 | func addText(text: String, metadata: [String: String]) async { 17 | 18 | } 19 | func removeText(sha256: String) async { 20 | 21 | } 22 | func similaritySearch(query: String, k: Int) async -> [MatchedModel] { 23 | [] 24 | } 25 | 26 | func add_documents(documents: [Document]) async { 27 | for document in documents { 28 | await self.addText(text: document.page_content, metadata: document.metadata) 29 | } 30 | } 31 | 32 | 33 | func remove_documents(sha256s: [String]) async { 34 | for sha256 in sha256s { 35 | await self.removeText(sha256: sha256) 36 | } 37 | } 38 | // def add_documents(self, documents: List[Document], **kwargs: Any) -> List[str]: 39 | // """Run more documents through the embeddings and add to the vectorstore. 40 | // 41 | // Args: 42 | // documents (List[Document]: Documents to add to the vectorstore. 43 | // 44 | // Returns: 45 | // List[str]: List of IDs of the added texts. 46 | // """ 47 | // # TODO: Handle the case where the user doesn't provide ids on the Collection 48 | // texts = [doc.page_content for doc in documents] 49 | // metadatas = [doc.metadata for doc in documents] 50 | // return self.add_texts(texts, metadatas, **kwargs) 51 | } 52 | -------------------------------------------------------------------------------- /Sources/LangChain/vectorstores/supabase/supabase.sql: -------------------------------------------------------------------------------- 1 | 2 | 3 | -- Create a table to store your documents 4 | create table documents ( 5 | id bigserial primary key, 6 | content text, -- corresponds to Document.pageContent 7 | embedding vector(1536), -- 1536 works for OpenAI embeddings, change if needed 8 | metadata jsonb 9 | ); 10 | 11 | -- Create a function to search for documents 12 | create function match_documents(query_embedding vector(1536), match_count int) 13 | returns table(id bigint, content text, metadata jsonb, similarity float) 14 | language plpgsql 15 | as $$ 16 | #variable_conflict use_column 17 | begin 18 | return query 19 | select 20 | id, 21 | content, 22 | metadata, 23 | 1 - (documents.embedding <=> query_embedding) as similarity 24 | from documents 25 | order by documents.embedding <=> query_embedding 26 | limit match_count; 27 | end; 28 | $$ 29 | ; 30 | -------------------------------------------------------------------------------- /techstack.md: -------------------------------------------------------------------------------- 1 | 26 |
27 | 28 | # Tech Stack File 29 | ![](https://img.stackshare.io/repo.svg "repo") [buhe/langchain-swift](https://github.com/buhe/langchain-swift)![](https://img.stackshare.io/public_badge.svg "public") 30 |

31 | |4
Tools used|12/14/23
Report generated| 32 | |------|------| 33 |
34 | 35 | ## Languages (2) 36 | 37 | 44 | 45 | 52 | 53 | 54 |
38 | SQL 39 |
40 | SQL 41 |
42 | 43 |
46 | Swift 47 |
48 | Swift 49 |
50 | 51 |
55 | 56 | ## DevOps (2) 57 | 58 | 65 | 66 | 73 | 74 | 75 |
59 | Git 60 |
61 | Git 62 |
63 | 64 |
67 | GitHub Actions 68 |
69 | GitHub Actions 70 |
71 | 72 |
76 | 77 |
78 |
79 | 80 | Generated via [Stack File](https://github.com/marketplace/stack-file) 81 | -------------------------------------------------------------------------------- /techstack.yml: -------------------------------------------------------------------------------- 1 | repo_name: buhe/langchain-swift 2 | report_id: 64713165d26cff75e4a785647213e3da 3 | repo_type: Public 4 | timestamp: '2023-12-14T09:27:20+00:00' 5 | requested_by: buhe 6 | provider: github 7 | branch: main 8 | detected_tools_count: 4 9 | tools: 10 | - name: SQL 11 | description: It is a domain-specific language used in programming 12 | website_url: https://en.wikipedia.org/wiki/SQL 13 | open_source: true 14 | hosted_saas: false 15 | category: Languages & Frameworks 16 | sub_category: Languages 17 | image_url: https://img.stackshare.io/service/2271/default_068d33483bba6b81ee13fbd4dc7aab9780896a54.png 18 | detection_source: Sources/LangChain/vectorstores/supabase/supabase.sql 19 | last_updated_by: buhe 20 | last_updated_on: 2023-06-14 09:04:42.000000000 Z 21 | - name: Swift 22 | description: 'An innovative new programming language for Cocoa and Cocoa Touch. ' 23 | website_url: https://developer.apple.com/swift/ 24 | license: Apache-2.0 25 | open_source: true 26 | hosted_saas: false 27 | category: Languages & Frameworks 28 | sub_category: Languages 29 | image_url: https://img.stackshare.io/service/1009/tuHsaI2U.png 30 | detection_source: Repo Metadata 31 | - name: Git 32 | description: Fast, scalable, distributed revision control system 33 | website_url: http://git-scm.com/ 34 | open_source: true 35 | hosted_saas: false 36 | category: Build, Test, Deploy 37 | sub_category: Version Control System 38 | image_url: https://img.stackshare.io/service/1046/git.png 39 | detection_source: Repo Metadata 40 | - name: GitHub Actions 41 | description: Automate your workflow from idea to production 42 | website_url: https://github.com/features/actions 43 | open_source: false 44 | hosted_saas: true 45 | category: Build, Test, Deploy 46 | sub_category: Continuous Integration 47 | image_url: https://img.stackshare.io/service/11563/actions.png 48 | detection_source: ".github/workflows/swift.yml" 49 | last_updated_by: buhe 50 | last_updated_on: 2023-11-18 07:17:21.000000000 Z 51 | --------------------------------------------------------------------------------