├── .commitlintrc.json
├── .config
├── typedoc.css
└── typedoc.json
├── .editorconfig
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ ├── bug-report.yml
│ ├── config.yml
│ ├── documentation-issue.yml
│ └── feature-request.yml
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── build.yml
│ ├── prLint.yml
│ └── test.yml
├── .gitignore
├── .husky
└── commit-msg
├── .prettierignore
├── .releaserc.ts
├── .vitepress
├── assets
│ ├── ogTemplate.svg
│ ├── ogTemplate.v1.svg
│ ├── ogTemplate.v2.svg
│ └── social.poster.svg
├── components.d.ts
├── components
│ ├── BlogEntry
│ │ └── BlogEntry.vue
│ ├── CommentsSection
│ │ └── CommentsSection.vue
│ ├── DataBadge
│ │ └── DataBadge.vue
│ ├── HomePage
│ │ ├── HomePage.vue
│ │ └── utils
│ │ │ └── getElectronExampleAppDownloadLink.ts
│ ├── LatestVersionHomeBadge
│ │ └── LatestVersionHomeBadge.vue
│ └── YouTubePlayer
│ │ └── YouTubePlayer.vue
├── config.ts
├── config
│ ├── BlogPageInfoPlugin.ts
│ ├── apiReferenceSidebar.ts
│ ├── getBlogPosts.ts
│ └── sidebar.ts
├── theme
│ ├── LayoutContainer.vue
│ ├── assets
│ │ ├── theme-pattern.dark.svg
│ │ └── theme-pattern.light.svg
│ ├── index.ts
│ ├── smoothLoad.css
│ └── style.css
├── tsconfig.json
└── utils
│ ├── buildHtmlHeading.ts
│ ├── buildHtmlTable.ts
│ ├── ensureLocalImage.ts
│ ├── getCommandHtmlDoc.ts
│ ├── getExcerptFromMarkdownFile.ts
│ ├── getInlineCodeBlockHtml.ts
│ ├── getMarkdownRenderer.ts
│ ├── htmlEscape.ts
│ ├── htmlEscapeWithCodeMarkdown.ts
│ ├── parseCmakeListsTxtOptions.ts
│ └── renderHtmlTag.ts
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── assets
├── electronTemplate.icon.png
├── icon.svg
├── icon.v3.svg
├── logo.png
├── logo.roundEdges.png
├── logo.v3.png
├── logo.v3.roundEdges.avif
├── logo.v3.roundEdges.png
├── social.poster.png
├── star.please.png
└── star.please.roundEdges.png
├── docs
├── blog
│ ├── blog.data.ts
│ ├── index.md
│ ├── v3.6-deepseek-r1.md
│ └── v3.md
├── cli
│ ├── chat.md
│ ├── cli.data.ts
│ ├── complete.md
│ ├── index.md
│ ├── infill.md
│ ├── init.md
│ ├── inspect.md
│ ├── inspect
│ │ ├── estimate.md
│ │ ├── gguf.md
│ │ ├── gpu.md
│ │ └── measure.md
│ ├── pull.md
│ ├── source.md
│ └── source
│ │ ├── build.md
│ │ ├── clear.md
│ │ └── download.md
├── guide
│ ├── CUDA.md
│ ├── Metal.md
│ ├── Vulkan.md
│ ├── awesome.md
│ ├── batching.md
│ ├── building-from-source.md
│ ├── chat-context-shift.md
│ ├── chat-session.md
│ ├── chat-wrapper.md
│ ├── choosing-a-model.md
│ ├── cmakeOptions.data.ts
│ ├── contributing.md
│ ├── development.md
│ ├── docker.md
│ ├── downloading-models.md
│ ├── electron.md
│ ├── embedding.md
│ ├── external-chat-state.md
│ ├── function-calling.md
│ ├── grammar.md
│ ├── index.md
│ ├── llama-text.md
│ ├── low-level-api.md
│ ├── objects-lifecycle.md
│ ├── text-completion.md
│ ├── tips-and-tricks.md
│ ├── token-bias.md
│ ├── token-prediction.md
│ ├── tokens.md
│ └── troubleshooting.md
├── index.md
└── public
│ ├── favicon.ico
│ ├── favicon.png
│ ├── favicon.svg
│ ├── giscus
│ ├── dark.css
│ ├── light.css
│ ├── original
│ │ ├── dark.css
│ │ └── light.css
│ └── style.css
│ ├── icon.svg
│ ├── logo.jpg
│ └── robots.txt
├── eslint.config.js
├── giscus.json
├── llama
├── .clang-format
├── CMakeLists.txt
├── addon
│ ├── AddonContext.cpp
│ ├── AddonContext.h
│ ├── AddonGrammar.cpp
│ ├── AddonGrammar.h
│ ├── AddonGrammarEvaluationState.cpp
│ ├── AddonGrammarEvaluationState.h
│ ├── AddonModel.cpp
│ ├── AddonModel.h
│ ├── AddonModelData.cpp
│ ├── AddonModelData.h
│ ├── AddonModelLora.cpp
│ ├── AddonModelLora.h
│ ├── AddonSampler.cpp
│ ├── AddonSampler.h
│ ├── RingBuffer.h
│ ├── addon.cpp
│ ├── addonGlobals.cpp
│ ├── addonGlobals.h
│ └── globals
│ │ ├── addonLog.cpp
│ │ ├── addonLog.h
│ │ ├── addonProgress.cpp
│ │ ├── addonProgress.h
│ │ ├── getGpuInfo.cpp
│ │ ├── getGpuInfo.h
│ │ ├── getMemoryInfo.cpp
│ │ ├── getMemoryInfo.h
│ │ ├── getSwapInfo.cpp
│ │ └── getSwapInfo.h
├── binariesGithubRelease.json
├── cmake
│ ├── win32.ensureNinjaPath.cmake
│ ├── win32.ensureNodeLib.cmake
│ ├── win32.llvmApplyGnuModeAdaptations.cmake
│ ├── win32.llvmEnsureCmakeAr.cmake
│ ├── win32.llvmUseGnuModeCompilers.cmake
│ └── win32.programFilesPaths.cmake
├── gpuInfo
│ ├── vulkan-gpu-info.cpp
│ └── vulkan-gpu-info.h
├── package.json
├── profiles
│ ├── llvm.win32.host-arm64.target-arm64.cmake
│ ├── llvm.win32.host-x64.target-arm64.cmake
│ └── llvm.win32.host-x64.target-x64.cmake
├── toolchains
│ ├── darwin.host-x64.target-arm64.cmake
│ ├── linux.host-arm64.target-x64.cmake
│ ├── linux.host-x64.target-arm64.cmake
│ ├── linux.host-x64.target-arm71.cmake
│ ├── llvm.win32.host-x64.target-x64.cmake
│ ├── win32.host-arm64.target-arm64.cmake
│ └── win32.host-x64.target-arm64.cmake
└── xpack
│ └── package.json
├── package-lock.json
├── package.json
├── packages
├── @node-llama-cpp
│ ├── linux-arm64
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── linux-armv7l
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── linux-x64-cuda
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── linux-x64-vulkan
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── linux-x64
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── mac-arm64-metal
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── mac-x64
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── win-arm64
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── win-x64-cuda
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ ├── win-x64-vulkan
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ │ └── index.ts
│ │ └── tsconfig.json
│ └── win-x64
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ ├── src
│ │ └── index.ts
│ │ └── tsconfig.json
└── create-node-llama-cpp
│ ├── .gitignore
│ ├── LICENSE
│ ├── README.md
│ ├── package-lock.json
│ ├── package.json
│ ├── src
│ ├── cli.ts
│ └── index.ts
│ └── tsconfig.json
├── scripts
├── movePrebuiltBinariesToStandaloneModules.ts
├── packTemplates.ts
├── patches
│ ├── @semantic-release+github+11.0.0.patch
│ ├── @semantic-release+npm+12.0.1.patch
│ ├── semantic-release+24.1.1.patch
│ └── vitepress+1.3.4.patch
├── postVersion.ts
├── prepareCreateNodeLlamaCppModuleForPublish.ts
├── prepareStandalonePrebuiltBinaryModules.ts
├── publishStandalonePrebuiltBinaryModules.ts
├── rerunSemanticReleaseGithubSuccess.ts
├── resolveLatestReleaseVersion.ts
├── resolveNextReleaseVersion.ts
└── scaffoldElectronExampleForCiBuild.ts
├── src
├── ChatWrapper.ts
├── apiDocsIndex.ts
├── bindings
│ ├── AddonTypes.ts
│ ├── Llama.ts
│ ├── consts.ts
│ ├── getLlama.ts
│ ├── types.ts
│ └── utils
│ │ ├── MemoryOrchestrator.ts
│ │ ├── NoBinaryFoundError.ts
│ │ ├── asyncEvery.ts
│ │ ├── asyncSome.ts
│ │ ├── binariesGithubRelease.ts
│ │ ├── clearAllLocalBuilds.ts
│ │ ├── cloneLlamaCppRepo.ts
│ │ ├── compileLLamaCpp.ts
│ │ ├── detectAvailableComputeLayers.ts
│ │ ├── detectBuildTools.ts
│ │ ├── detectGlibc.ts
│ │ ├── getBestComputeLayersAvailable.ts
│ │ ├── getBuildFolderNameForBuildOptions.ts
│ │ ├── getCanUsePrebuiltBinaries.ts
│ │ ├── getExampleUsageCodeOfGetLlama.ts
│ │ ├── getGpuTypesToUseForOption.ts
│ │ ├── getLinuxDistroInfo.ts
│ │ ├── getLlamaGpuTypes.ts
│ │ ├── getLlamaWithoutBackend.ts
│ │ ├── getPlatform.ts
│ │ ├── getPlatformInfo.ts
│ │ ├── hasBuildingFromSourceDependenciesInstalled.ts
│ │ ├── hasFileInPath.ts
│ │ ├── lastBuildInfo.ts
│ │ ├── logBinaryUsageExampleToConsole.ts
│ │ ├── logDistroInstallInstruction.ts
│ │ ├── resolveActualBindingBinaryPath.ts
│ │ ├── resolveCustomCmakeOptions.ts
│ │ ├── testBindingBinary.ts
│ │ └── testCmakeBinary.ts
├── chatWrappers
│ ├── AlpacaChatWrapper.ts
│ ├── ChatMLChatWrapper.ts
│ ├── DeepSeekChatWrapper.ts
│ ├── EmptyChatWrapper.ts
│ ├── FalconChatWrapper.ts
│ ├── FunctionaryChatWrapper.ts
│ ├── GemmaChatWrapper.ts
│ ├── GeneralChatWrapper.ts
│ ├── Llama2ChatWrapper.ts
│ ├── Llama3ChatWrapper.ts
│ ├── Llama3_1ChatWrapper.ts
│ ├── Llama3_2LightweightChatWrapper.ts
│ ├── MistralChatWrapper.ts
│ ├── QwenChatWrapper.ts
│ ├── generic
│ │ ├── JinjaTemplateChatWrapper.ts
│ │ ├── TemplateChatWrapper.ts
│ │ └── utils
│ │ │ ├── UniqueIdGenerator.ts
│ │ │ ├── chatHistoryFunctionCallMessageTemplate.ts
│ │ │ ├── extractFunctionCallSettingsFromJinjaTemplate.ts
│ │ │ ├── extractSegmentSettingsFromTokenizerAndChatTemplate.ts
│ │ │ ├── getFirstValidResult.ts
│ │ │ ├── squashChatHistoryItems.ts
│ │ │ └── templateSegmentOptionsToChatWrapperSettings.ts
│ └── utils
│ │ ├── ChatModelFunctionsDocumentationGenerator.ts
│ │ ├── chunkChatItems.ts
│ │ ├── getModelLinageNames.ts
│ │ ├── isJinjaTemplateEquivalentToSpecializedChatWrapper.ts
│ │ ├── isLlama3_2LightweightModel.ts
│ │ ├── jsonDumps.ts
│ │ └── resolveChatWrapper.ts
├── cli
│ ├── cli.ts
│ ├── commands
│ │ ├── ChatCommand.ts
│ │ ├── CompleteCommand.ts
│ │ ├── DebugCommand.ts
│ │ ├── InfillCommand.ts
│ │ ├── InitCommand.ts
│ │ ├── OnPostInstallCommand.ts
│ │ ├── PullCommand.ts
│ │ ├── inspect
│ │ │ ├── InspectCommand.ts
│ │ │ └── commands
│ │ │ │ ├── InspectEstimateCommand.ts
│ │ │ │ ├── InspectGgufCommand.ts
│ │ │ │ ├── InspectGpuCommand.ts
│ │ │ │ └── InspectMeasureCommand.ts
│ │ └── source
│ │ │ ├── SourceCommand.ts
│ │ │ └── commands
│ │ │ ├── BuildCommand.ts
│ │ │ ├── ClearCommand.ts
│ │ │ └── DownloadCommand.ts
│ ├── projectTemplates.ts
│ ├── recommendedModels.ts
│ ├── startCreateCli.ts
│ └── utils
│ │ ├── ConsoleInteraction.ts
│ │ ├── ConsoleTable.ts
│ │ ├── basicChooseFromListConsoleInteraction.ts
│ │ ├── consolePromptQuestion.ts
│ │ ├── getReadablePath.ts
│ │ ├── interactivelyAskForModel.ts
│ │ ├── isRunningUnderRosetta.ts
│ │ ├── logUsedGpuTypeOption.ts
│ │ ├── printCommonInfoLines.ts
│ │ ├── printInfoLine.ts
│ │ ├── printModelDestination.ts
│ │ ├── projectTemplates.ts
│ │ ├── renderModelCompatibilityPercentageWithColors.ts
│ │ ├── resolveCommandGgufPath.ts
│ │ ├── resolveHeaderFlag.ts
│ │ ├── resolveModelRecommendationFileOptions.ts
│ │ ├── splitAnsiToLines.ts
│ │ ├── toBytes.ts
│ │ └── withCliCommandDescriptionDocsUrl.ts
├── commands.ts
├── config.ts
├── consts.ts
├── evaluator
│ ├── LlamaChat
│ │ ├── LlamaChat.ts
│ │ └── utils
│ │ │ ├── FunctionCallNameGrammar.ts
│ │ │ ├── FunctionCallParamsGrammar.ts
│ │ │ ├── LlamaFunctionCallValidationError.ts
│ │ │ └── contextShiftStrategies
│ │ │ └── eraseFirstResponseAndKeepFirstSystemChatContextShiftStrategy.ts
│ ├── LlamaChatSession
│ │ ├── LlamaChatSession.ts
│ │ └── utils
│ │ │ ├── LlamaChatSessionPromptCompletionEngine.ts
│ │ │ └── defineChatSessionFunction.ts
│ ├── LlamaCompletion.ts
│ ├── LlamaContext
│ │ ├── LlamaContext.ts
│ │ ├── LlamaSampler.ts
│ │ ├── TokenPredictor.ts
│ │ ├── tokenPredictors
│ │ │ ├── DraftSequenceTokenPredictor.ts
│ │ │ └── InputLookupTokenPredictor.ts
│ │ ├── types.ts
│ │ └── utils
│ │ │ ├── batchItemsPrioritizationStrategies
│ │ │ ├── firstInFirstOutStrategy.ts
│ │ │ └── maximumParallelismStrategy.ts
│ │ │ └── resolveBatchItemsPrioritizationStrategy.ts
│ ├── LlamaEmbedding.ts
│ ├── LlamaEmbeddingContext.ts
│ ├── LlamaGrammar.ts
│ ├── LlamaGrammarEvaluationState.ts
│ ├── LlamaJsonSchemaGrammar.ts
│ ├── LlamaModel
│ │ ├── LlamaModel.ts
│ │ └── utils
│ │ │ └── TokenAttributes.ts
│ ├── LlamaRankingContext.ts
│ ├── TokenBias.ts
│ ├── TokenMeter.ts
│ └── utils
│ │ └── chunkDocument.ts
├── gguf
│ ├── consts.ts
│ ├── errors
│ │ ├── InvalidGgufMagicError.ts
│ │ └── UnsupportedGgufValueTypeError.ts
│ ├── fileReaders
│ │ ├── GgufFileReader.ts
│ │ ├── GgufFsFileReader.ts
│ │ └── GgufNetworkFetchFileReader.ts
│ ├── insights
│ │ ├── GgufInsights.ts
│ │ ├── GgufInsightsConfigurationResolver.ts
│ │ └── utils
│ │ │ ├── getRamUsageFromUnifiedVram.ts
│ │ │ ├── resolveContextContextSizeOption.ts
│ │ │ ├── resolveModelGpuLayersOption.ts
│ │ │ └── scoreLevels.ts
│ ├── parser
│ │ ├── GgufV2Parser.ts
│ │ ├── GgufV3Parser.ts
│ │ └── parseGguf.ts
│ ├── readGgufFileInfo.ts
│ ├── types
│ │ ├── GgufFileInfoTypes.ts
│ │ ├── GgufMetadataTypes.ts
│ │ └── GgufTensorInfoTypes.ts
│ └── utils
│ │ ├── GgufReadOffset.ts
│ │ ├── convertMetadataKeyValueRecordToNestedObject.ts
│ │ ├── getGgufFileTypeName.ts
│ │ ├── getGgufMetadataArchitectureData.ts
│ │ ├── getGgufMetadataKeyValue.ts
│ │ ├── ggufQuantNames.ts
│ │ ├── normalizeGgufDownloadUrl.ts
│ │ ├── resolveBinarySplitGgufPartUrls.ts
│ │ └── resolveSplitGgufParts.ts
├── index.ts
├── state.ts
├── types.ts
└── utils
│ ├── DisposeGuard.ts
│ ├── InsufficientMemoryError.ts
│ ├── LlamaText.ts
│ ├── LruCache.ts
│ ├── OpenAIFormat.ts
│ ├── OverridesObject.ts
│ ├── ReplHistory.ts
│ ├── StopGenerationDetector.ts
│ ├── ThreadsSplitter.ts
│ ├── TokenStreamRegulator.ts
│ ├── UnsupportedError.ts
│ ├── appendUserMessageToChatHistory.ts
│ ├── clearTempFolder.ts
│ ├── cmake.ts
│ ├── compareTokens.ts
│ ├── createModelDownloader.ts
│ ├── findBestOption.ts
│ ├── findCharacterRemovalCountToFitChatHistoryInContext.ts
│ ├── gbnfJson
│ ├── GbnfGrammarGenerator.ts
│ ├── GbnfTerminal.ts
│ ├── getGbnfGrammarForGbnfJsonSchema.ts
│ ├── terminals
│ │ ├── GbnfAnyJson.ts
│ │ ├── GbnfArray.ts
│ │ ├── GbnfBoolean.ts
│ │ ├── GbnfBooleanValue.ts
│ │ ├── GbnfCommaWhitespace.ts
│ │ ├── GbnfFormatString.ts
│ │ ├── GbnfGrammar.ts
│ │ ├── GbnfInsideStringChar.ts
│ │ ├── GbnfNull.ts
│ │ ├── GbnfNumber.ts
│ │ ├── GbnfNumberValue.ts
│ │ ├── GbnfObjectMap.ts
│ │ ├── GbnfOr.ts
│ │ ├── GbnfRepetition.ts
│ │ ├── GbnfString.ts
│ │ ├── GbnfStringValue.ts
│ │ ├── GbnfVerbatimText.ts
│ │ ├── GbnfWhitespace.ts
│ │ └── gbnfConsts.ts
│ ├── types.ts
│ └── utils
│ │ ├── GbnfJsonScopeState.ts
│ │ ├── getGbnfJsonTerminalForGbnfJsonSchema.ts
│ │ ├── getGbnfJsonTerminalForLiteral.ts
│ │ └── validateObjectAgainstGbnfSchema.ts
│ ├── getBuildDefaults.ts
│ ├── getChatWrapperSegmentDefinition.ts
│ ├── getConsoleLogPrefix.ts
│ ├── getGrammarsFolder.ts
│ ├── getLlamaClasses.ts
│ ├── getModuleVersion.ts
│ ├── getQueuedTokensBeforeStopTrigger.ts
│ ├── getReadableContextSize.ts
│ ├── getTypeScriptTypeStringForGbnfJsonSchema.ts
│ ├── gitReleaseBundles.ts
│ ├── hashString.ts
│ ├── includesText.ts
│ ├── isLockfileActive.ts
│ ├── isToken.ts
│ ├── isUrl.ts
│ ├── mergeUnionTypes.ts
│ ├── modelDownloadEndpoints.ts
│ ├── modelFileAccessTokens.ts
│ ├── optionsMatrix.ts
│ ├── parseModelFileName.ts
│ ├── parseModelUri.ts
│ ├── parseTextTemplate.ts
│ ├── prettyPrintObject.ts
│ ├── pushAll.ts
│ ├── removeNullFields.ts
│ ├── resolveGithubRelease.ts
│ ├── resolveLastTokens.ts
│ ├── resolveModelDestination.ts
│ ├── resolveModelFile.ts
│ ├── runtime.ts
│ ├── safeEventCallback.ts
│ ├── spawnCommand.ts
│ ├── tokenizeInput.ts
│ ├── tokenizerUtils.ts
│ ├── transformPromisable.ts
│ ├── truncateTextAndRoundToWords.ts
│ ├── utilTypes.ts
│ ├── waitForLockfileRelease.ts
│ ├── withLockfile.ts
│ ├── withOra.ts
│ ├── withProgressLog.ts
│ ├── withStatusLogs.ts
│ └── wrapAbortSignal.ts
├── templates
├── .gitignore
├── README.md
├── electron-typescript-react
│ ├── .editorconfig
│ ├── .gitignore
│ ├── README.md
│ ├── electron-builder.ts
│ ├── electron
│ │ ├── electron-env.d.ts
│ │ ├── index.ts
│ │ ├── llm
│ │ │ └── modelFunctions.ts
│ │ ├── preload.ts
│ │ ├── rpc
│ │ │ └── llmRpc.ts
│ │ ├── state
│ │ │ └── llmState.ts
│ │ └── utils
│ │ │ └── createElectronSideBirpc.ts
│ ├── eslint.config.js
│ ├── package.json
│ ├── public
│ │ └── vite.svg
│ ├── src
│ │ ├── App
│ │ │ ├── App.css
│ │ │ ├── App.tsx
│ │ │ └── components
│ │ │ │ ├── ChatHistory
│ │ │ │ ├── ChatHistory.css
│ │ │ │ ├── ChatHistory.tsx
│ │ │ │ └── components
│ │ │ │ │ ├── ModelMessage
│ │ │ │ │ ├── ModelMessage.css
│ │ │ │ │ ├── ModelMessage.tsx
│ │ │ │ │ └── components
│ │ │ │ │ │ └── ModelMessageCopyButton
│ │ │ │ │ │ ├── ModelMessageCopyButton.css
│ │ │ │ │ │ └── ModelMessageCopyButton.tsx
│ │ │ │ │ ├── ModelResponseThought
│ │ │ │ │ ├── ModelResponseThought.css
│ │ │ │ │ └── ModelResponseThought.tsx
│ │ │ │ │ └── UserMessage
│ │ │ │ │ ├── UserMessage.css
│ │ │ │ │ └── UserMessage.tsx
│ │ │ │ ├── Header
│ │ │ │ ├── Header.css
│ │ │ │ ├── Header.tsx
│ │ │ │ └── components
│ │ │ │ │ ├── UpdateBadge.css
│ │ │ │ │ └── UpdateBadge.tsx
│ │ │ │ ├── InputRow
│ │ │ │ ├── InputRow.css
│ │ │ │ └── InputRow.tsx
│ │ │ │ ├── MarkdownContent
│ │ │ │ ├── MarkdownContent.css
│ │ │ │ └── MarkdownContent.tsx
│ │ │ │ └── MessageMarkdown
│ │ │ │ ├── MessageMarkdown.css
│ │ │ │ └── MessageMarkdown.tsx
│ │ ├── hooks
│ │ │ └── useExternalState.ts
│ │ ├── icons
│ │ │ ├── AbortIconSVG.tsx
│ │ │ ├── AddMessageIconSVG.tsx
│ │ │ ├── CheckIconSVG.tsx
│ │ │ ├── CopyIconSVG.tsx
│ │ │ ├── DeleteIconSVG.tsx
│ │ │ ├── DownloadIconSVG.tsx
│ │ │ ├── LoadFileIconSVG.tsx
│ │ │ ├── RightChevronIconSVG.tsx
│ │ │ ├── SearchIconSVG.tsx
│ │ │ └── StarIconSVG.tsx
│ │ ├── index.css
│ │ ├── index.html
│ │ ├── index.tsx
│ │ ├── rpc
│ │ │ └── llmRpc.ts
│ │ ├── state
│ │ │ └── llmState.ts
│ │ ├── utils
│ │ │ └── createRendererSideBirpc.ts
│ │ └── vite-env.d.ts
│ ├── tsconfig.json
│ ├── tsconfig.node.json
│ └── vite.config.ts
├── node-typescript
│ ├── .editorconfig
│ ├── .gitignore
│ ├── README.md
│ ├── eslint.config.js
│ ├── package.json
│ ├── src
│ │ └── index.ts
│ └── tsconfig.json
├── package-lock.json
└── package.json
├── test
├── modelDependent
│ ├── bge
│ │ └── embedding.test.ts
│ ├── bgeReranker
│ │ └── rank.test.ts
│ ├── codegemma
│ │ ├── completion.test.ts
│ │ └── parallel.test.ts
│ ├── functionary
│ │ ├── chatSession.test.ts
│ │ ├── embedding.test.ts
│ │ ├── functionaryModelGpuLayersOptions.test.ts
│ │ ├── functions.test.ts
│ │ ├── gguf
│ │ │ ├── __snapshots__
│ │ │ │ └── ggufParser.test.ts.snap
│ │ │ ├── ggufInsights.test.ts
│ │ │ └── ggufParser.test.ts
│ │ ├── grammar.test.ts
│ │ └── sanity.test.ts
│ ├── llama3.1
│ │ ├── chunkDocument.test.ts
│ │ ├── completion.test.ts
│ │ ├── controlledEvaluate.test.ts
│ │ ├── evaluateWithMetadata.test.ts
│ │ ├── tokenBias.test.ts
│ │ └── tokenPredictor.test.ts
│ ├── llama3.2
│ │ ├── completion.test.ts
│ │ ├── promptCompletion.test.ts
│ │ └── sequenceState.test.ts
│ ├── llama3
│ │ ├── chatSession.test.ts
│ │ ├── functions.test.ts
│ │ ├── grammar.test.ts
│ │ └── lora.test.ts
│ ├── nomicEmbedText
│ │ └── embedding.test.ts
│ ├── qwen3-0.6b
│ │ └── reasoningBudget.test.ts
│ └── stableCode
│ │ ├── asyncContextLoad.test.ts
│ │ ├── asyncModelLoad.test.ts
│ │ ├── completion.test.ts
│ │ ├── metadataOverrides.test.ts
│ │ └── stableCodeModelGpuLayersOptions.test.ts
├── standalone
│ ├── chatWrappers
│ │ ├── ChatMLChatWrapper.test.ts
│ │ ├── DeepSeekChatWrapper.test.ts
│ │ ├── FalconChatWrapper.test.ts
│ │ ├── FunctionaryChatWrapper.test.ts
│ │ ├── GemmaChatWrapper.test.ts
│ │ ├── GeneralChatWrapper.test.ts
│ │ ├── Llama2ChatWrapper.test.ts
│ │ ├── Llama3ChatWrapper.test.ts
│ │ ├── Llama3_1ChatWrapper.test.ts
│ │ ├── MistralChatWrapper.test.ts
│ │ ├── QwenChatWrapper.test.ts
│ │ ├── generic
│ │ │ ├── JinjaTemplateChatWrapper.test.ts
│ │ │ ├── TemplateChatWrapper.test.ts
│ │ │ └── utils
│ │ │ │ └── getChatWrapperSegmentsOptionsFromTemplateOption.test.ts
│ │ └── utils
│ │ │ └── resolveChatWrapper.test.ts
│ ├── cli
│ │ └── recommendedModels.test.ts
│ ├── gguf
│ │ ├── __snapshots__
│ │ │ └── ggufStandaloneParser.test.ts.snap
│ │ └── ggufStandaloneParser.test.ts
│ ├── llamaEvaluator
│ │ ├── LlamaGrammar.test.ts
│ │ └── functionCallGrammar.test.ts
│ ├── parseModelFileName.test.ts
│ └── utils
│ │ ├── LlamaText.test.ts
│ │ ├── ThreadsSplitter.test.ts
│ │ ├── createSplitPartFilename.test.ts
│ │ ├── modelDownloader.test.ts
│ │ ├── optionsMatrix.test.ts
│ │ ├── parseModelUri.test.ts
│ │ └── parseTextTemplate.test.ts
└── utils
│ ├── getTestLlama.ts
│ ├── helpers
│ ├── SpecialTokenSerializer.ts
│ ├── SpecialTokensTextSerializer.ts
│ ├── getTempTestDir.ts
│ ├── llamaTextSerializer.ts
│ ├── simplifyGgufInfoForTestSnapshot.ts
│ └── testSetup.ts
│ ├── jsonSchemas
│ └── message.json
│ ├── modelFiles.ts
│ ├── prompts
│ └── longSystemPrompt.txt
│ ├── scripts
│ └── downloadAllTestModels.ts
│ └── setupAndTestOnPaperspace.sh
├── tsconfig.json
└── vitest.config.ts
/.commitlintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["@commitlint/config-conventional"],
3 | "rules": {
4 | "subject-case": [0, "never"]
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/.config/typedoc.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://typedoc.org/schema.json",
3 | "entryPoints": ["../src/apiDocsIndex.ts"],
4 | "out": "../docs/api",
5 | "tsconfig": "../tsconfig.json",
6 | "customCss": "./typedoc.css",
7 | "readme": "../README.md",
8 | "excludeExternals": true,
9 | "excludeInternal": true,
10 | "excludePrivate": true,
11 | "githubPages": true,
12 | "hideGenerator": true,
13 | "jsDocCompatibility": true,
14 | "lang": "en",
15 | "plugin": ["typedoc-plugin-markdown", "typedoc-vitepress-theme", "typedoc-plugin-mdn-links"],
16 | "hideBreadcrumbs": true,
17 | "hidePageHeader": true,
18 | "preserveAnchorCasing": true,
19 | "useCodeBlocks": true,
20 | "expandObjects": true,
21 | "expandParameters": true,
22 | "parametersFormat": "table",
23 | "propertiesFormat": "list",
24 | "enumMembersFormat": "table",
25 | "typeDeclarationFormat": "list",
26 | "classPropertiesFormat": "list",
27 | "interfacePropertiesFormat": "list",
28 | "sort": ["source-order"],
29 | "docsRoot": "../docs",
30 | "intentionallyNotExported": ["MergeOptionalUnionTypes", "PickOptions", "GbnfJsonSchemaToTSType", "_LlamaText"],
31 | "useHTMLEncodedBrackets": true
32 | }
33 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | indent_style = space
5 | indent_size = 4
6 |
7 | [{*.ts,*.tsx,*.js,*.jsx,*.css,*.scss}]
8 | insert_final_newline = true
9 |
10 | [{package.json,package-lock.json,manifest.json}]
11 | indent_size = 2
12 |
13 | [*.yml]
14 | indent_size = 2
15 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: giladgd
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: 🤔 Questions, General Support, and Help
4 | url: https://github.com/withcatai/node-llama-cpp/discussions
5 | about: This issue tracker is not for support questions. Please use GitHub discussions instead.
6 |
--------------------------------------------------------------------------------
/.github/workflows/prLint.yml:
--------------------------------------------------------------------------------
1 | name: PR Lint
2 | on:
3 | pull_request:
4 | pull_request_target:
5 | types: [opened, reopened, edited, synchronize]
6 |
7 | jobs:
8 | lint:
9 | name: Lint
10 | runs-on: ubuntu-latest
11 | permissions:
12 | pull-requests: read
13 | steps:
14 | - uses: amannn/action-semantic-pull-request@v5
15 | env:
16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.idea
2 | /.vscode
3 | node_modules
4 | .DS_Store
5 | *.cpuprofile
6 |
7 | /dist
8 | /docs-site
9 | /docs/api
10 | /templates/packed
11 |
12 | /.env
13 | /.eslintcache
14 | /.vitepress/.cache
15 | /test/.models
16 | /test/temp
17 | /test/.temp
18 | /temp
19 | /coverage
20 | /test-runner-profile
21 |
22 | /llama/compile_commands.json
23 | /llama/llama.cpp
24 | /llama/llama.cpp.lock
25 | /llama/llama.cpp.info.json
26 | /llama/lastBuild.json
27 | /llama/gitRelease.bundle
28 | /llama/.temp
29 | /llama/.cache
30 | /llama/build
31 | /llama/.idea
32 | /llama/.vscode
33 | /llama/cmake-build-debug
34 | /llama/localBuilds
35 | /llama/Release
36 | /llama/Debug
37 | /llama/xpack/cache
38 | /llama/xpack/store
39 | /llama/xpack/xpacks
40 | /llama/xpack/cmakeInstall.lock
41 | /bins
42 |
--------------------------------------------------------------------------------
/.husky/commit-msg:
--------------------------------------------------------------------------------
1 | commitlint --edit "$1"
2 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | *
2 |
--------------------------------------------------------------------------------
/.vitepress/components.d.ts:
--------------------------------------------------------------------------------
1 | declare module "*.vue" {
2 | import type {DefineComponent} from "vue";
3 |
4 | const component: DefineComponent<{}, {}, any>;
5 | export default component;
6 | }
7 |
--------------------------------------------------------------------------------
/.vitepress/theme/smoothLoad.css:
--------------------------------------------------------------------------------
1 | #app {
2 | animation: app-show backwards 0.3s 0.3s ease-in-out;
3 | }
4 |
5 | @keyframes app-show {
6 | from {opacity: 0;}
7 | to {opacity: 1;}
8 | }
9 |
--------------------------------------------------------------------------------
/.vitepress/utils/buildHtmlHeading.ts:
--------------------------------------------------------------------------------
1 | export function buildHtmlHeading(headingType: "h1" | "h2" | "h3" | "h4" | "h5" | "h6", content: string, id?: string) {
2 | return (
3 | `<${headingType}${id != null ? ` id="${id}"` : ""}>` +
4 | "" + content + (id != null ? `` : "") +
5 | `${headingType}>\n`
6 | );
7 | }
8 |
--------------------------------------------------------------------------------
/.vitepress/utils/buildHtmlTable.ts:
--------------------------------------------------------------------------------
1 | export function buildHtmlTable(header: string[], rows: string[][]) {
2 | let res = "";
3 |
4 | res += "
\n";
5 |
6 | if (header.length > 0) {
7 | res += "" + "\n";
8 | res += "" + "" + "\n";
9 | for (const headerCell of header) {
10 | res += "" + "" + "" + "" + headerCell + " | \n";
11 | }
12 | res += "" + "" + "
\n";
13 | res += "" + "\n";
14 | }
15 |
16 | if (rows.length > 0) {
17 | res += "" + '\n';
18 |
19 | for (const row of rows) {
20 | res += "" + "" + "\n";
21 |
22 | for (const cell of row) {
23 | res += "" + "" + "" + "" + cell + " | \n";
24 | }
25 |
26 | res += "" + "" + "
\n";
27 | }
28 |
29 | res += "" + "\n";
30 | }
31 |
32 | res += "
\n";
33 |
34 | return res;
35 | }
36 |
--------------------------------------------------------------------------------
/.vitepress/utils/getInlineCodeBlockHtml.ts:
--------------------------------------------------------------------------------
1 | import {createMarkdownRenderer} from "vitepress";
2 | import {htmlEscape} from "./htmlEscape.js";
3 |
4 | export function getInlineCodeBlockHtml(
5 | markdownRenderer: Awaited>, code: string, lang: string, link?: string
6 | ) {
7 | if (markdownRenderer.options.highlight != null) {
8 | const codeBlock = markdownRenderer.options.highlight(code, lang, "");
9 |
10 | if (link != null && link !== "")
11 | return `${codeBlock}`;
12 |
13 | return `${codeBlock}`;
14 | }
15 |
16 | if (link != null && link !== "")
17 | return `${htmlEscape(code)}
`;
18 |
19 | return `${htmlEscape(code)}
`;
20 | }
21 |
--------------------------------------------------------------------------------
/.vitepress/utils/getMarkdownRenderer.ts:
--------------------------------------------------------------------------------
1 | import {createMarkdownRenderer} from "vitepress";
2 |
3 | const renderers = new Map>();
4 | export function getMarkdownRenderer(path: string = process.cwd()): ReturnType {
5 | if (!renderers.has(path))
6 | renderers.set(path, createMarkdownRenderer(path));
7 |
8 | return renderers.get(path)!;
9 | }
10 |
--------------------------------------------------------------------------------
/.vitepress/utils/htmlEscape.ts:
--------------------------------------------------------------------------------
1 | export function htmlEscape(string?: string | number | boolean) {
2 | if (typeof string === "number")
3 | return String(string);
4 | else if (typeof string === "boolean")
5 | return String(string);
6 |
7 | if (typeof string !== "string")
8 | return "";
9 |
10 | return string
11 | .replace(/&/g, "&")
12 | .replace(/"/g, """)
13 | .replace(/'/g, "'")
14 | .replace(//g, ">");
16 | }
17 |
18 |
--------------------------------------------------------------------------------
/.vitepress/utils/htmlEscapeWithCodeMarkdown.ts:
--------------------------------------------------------------------------------
1 | import {htmlEscape} from "./htmlEscape.js";
2 |
3 | export function htmlEscapeWithCodeMarkdown(string?: string | number | boolean) {
4 | const escapedString = htmlEscape(string);
5 |
6 | let res = "";
7 | let backtickIndex = escapedString.indexOf("`");
8 | let textIndex = 0;
9 |
10 | while (backtickIndex >= 0 && backtickIndex < escapedString.length - 1 && textIndex < escapedString.length) {
11 | const nextBacktickIndex = escapedString.indexOf("`", backtickIndex + 1);
12 | if (nextBacktickIndex < 0)
13 | break;
14 |
15 | res += escapedString.slice(textIndex, backtickIndex) + "" + escapedString.slice(backtickIndex + 1, nextBacktickIndex) + "
";
16 | textIndex = nextBacktickIndex + 1;
17 |
18 | if (textIndex < escapedString.length)
19 | backtickIndex = escapedString.indexOf("`", textIndex);
20 | }
21 |
22 | res += escapedString.slice(textIndex);
23 |
24 | return res;
25 | }
26 |
27 |
--------------------------------------------------------------------------------
/.vitepress/utils/renderHtmlTag.ts:
--------------------------------------------------------------------------------
1 | export function renderHtmlTag(
2 | tagName: string,
3 | attributes: Record,
4 | htmlContent?: string
5 | ) {
6 | const renderedAttributes: string[] = [];
7 | for (const key of Object.keys(attributes)) {
8 | const value = attributes[key];
9 | if (value === true || value == null)
10 | renderedAttributes.push(key);
11 | else if (value === false)
12 | continue;
13 |
14 | renderedAttributes.push(`${key}="${escapeAttributeValue(String(value))}"`);
15 | }
16 |
17 | const attributesString = renderedAttributes.length === 0
18 | ? ""
19 | : " " + renderedAttributes.join(" ");
20 |
21 | if (htmlContent == null)
22 | return `<${tagName}${attributesString} />`;
23 | else
24 | return `<${tagName}${attributesString}>${htmlContent}${tagName}>`;
25 | }
26 |
27 | function escapeAttributeValue(text: string) {
28 | return text
29 | .replace(/"/g, """)
30 | .replace(//g, ">")
32 | .replace(/&(?![\w#]+;)/g, "&");
33 | }
34 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Moved [here](https://node-llama-cpp.withcat.ai/guide/contributing)
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/assets/electronTemplate.icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/electronTemplate.icon.png
--------------------------------------------------------------------------------
/assets/icon.v3.svg:
--------------------------------------------------------------------------------
1 |
10 |
--------------------------------------------------------------------------------
/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/logo.png
--------------------------------------------------------------------------------
/assets/logo.roundEdges.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/logo.roundEdges.png
--------------------------------------------------------------------------------
/assets/logo.v3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/logo.v3.png
--------------------------------------------------------------------------------
/assets/logo.v3.roundEdges.avif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/logo.v3.roundEdges.avif
--------------------------------------------------------------------------------
/assets/logo.v3.roundEdges.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/logo.v3.roundEdges.png
--------------------------------------------------------------------------------
/assets/social.poster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/social.poster.png
--------------------------------------------------------------------------------
/assets/star.please.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/star.please.png
--------------------------------------------------------------------------------
/assets/star.please.roundEdges.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/assets/star.please.roundEdges.png
--------------------------------------------------------------------------------
/docs/blog/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Blog
3 | description: node-llama-cpp blog
4 | editLink: false
5 | lastUpdated: false
6 | outline: false
7 | aside: false
8 | ---
9 |
16 |
17 |
22 |
23 |
24 |
32 |
33 |
--------------------------------------------------------------------------------
/docs/cli/chat.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'chat' command reference"
4 | ---
5 | # `chat` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/complete.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'complete' command reference"
4 | ---
5 | # `complete` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: CLI commands reference
4 | ---
5 | # CLI
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/infill.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'infill' command reference"
4 | ---
5 | # `infill` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/init.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'init' command reference"
4 | ---
5 | # `init` command
6 |
7 |
11 |
12 |
13 |
14 | ::: info
15 | This command is also available via:
16 | ```shell
17 | npm create node-llama-cpp@latest [name]
18 | ```
19 | :::
20 |
21 | ## Usage
22 |
23 |
24 |
--------------------------------------------------------------------------------
/docs/cli/inspect.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'inspect' command reference"
4 | ---
5 | # `inspect` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/inspect/estimate.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'inspect estimate' command reference"
4 | ---
5 | # `inspect estimate` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/inspect/gguf.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'inspect gguf' command reference"
4 | ---
5 | # `inspect gguf` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/inspect/gpu.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'inspect gpu' command reference"
4 | ---
5 | # `inspect gpu` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/inspect/measure.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'inspect measure' command reference"
4 | ---
5 | # `inspect measure` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/source.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'source' command reference"
4 | ---
5 | # `source` command
6 |
7 |
11 |
12 |
13 |
14 | ## Usage
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/cli/source/build.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'source build' command reference"
4 | ---
5 | # `source build` command
6 |
7 |
11 |
12 |
13 |
14 | ::: info
15 | If the build fails on macOS with the error `"/usr/bin/cc" is not able to compile a simple test program`, try running `xcode-select --install` to install the Xcode command line tools.
16 | :::
17 |
18 | ::: details Programmatically calling the `source build` command in your code
19 | To programmatically call this command in your code, call the `BuildLlamaCppCommand` function:
20 | ```typescript
21 | import {BuildLlamaCppCommand} from "node-llama-cpp/commands";
22 | await BuildLlamaCppCommand({});
23 | ```
24 | > **Note:** The `node-llama-cpp/commands` import is subject to change and is unsupported inside Electron
25 |
26 | :::
27 |
28 | ## Usage
29 |
30 |
31 |
32 |
33 | > To set custom cmake options that are supported by `llama.cpp`'s cmake build,
34 | > set an environment variable of the option prefixed with `NODE_LLAMA_CPP_CMAKE_OPTION_`.
35 |
--------------------------------------------------------------------------------
/docs/cli/source/clear.md:
--------------------------------------------------------------------------------
1 | ---
2 | outline: deep
3 | description: "'source clear' command reference"
4 | ---
5 | # `source clear` command
6 |
7 |
11 |
12 |
13 |
14 | ::: details Programmatically calling the `source clear` command in your code
15 | To programmatically call this command in your code, call the `ClearLlamaCppBuildCommand` function:
16 | ```typescript
17 | import {ClearLlamaCppBuildCommand} from "node-llama-cpp/commands";
18 | await ClearLlamaCppBuildCommand({type: "all"});
19 | ```
20 | > **Note:** The `node-llama-cpp/commands` import is subject to change and is unsupported inside Electron
21 |
22 | :::
23 |
24 | ## Usage
25 |
26 |
27 |
--------------------------------------------------------------------------------
/docs/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/docs/public/favicon.ico
--------------------------------------------------------------------------------
/docs/public/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/docs/public/favicon.png
--------------------------------------------------------------------------------
/docs/public/favicon.svg:
--------------------------------------------------------------------------------
1 |
10 |
--------------------------------------------------------------------------------
/docs/public/giscus/dark.css:
--------------------------------------------------------------------------------
1 | @import "./original/dark.css";
2 | @import "./style.css";
3 |
4 | main {
5 | --vp-c-bg: #1b1b1f;
6 | --vp-c-bg-alt: #161618;
7 | --vp-c-bg-elv: #202127;
8 | --vp-c-bg-soft: #202127;
9 |
10 | --vp-c-text-1: rgba(255, 255, 245, 0.86);
11 | --vp-c-text-2: rgba(235, 235, 245, 0.6);
12 | --vp-c-text-3: rgba(235, 235, 245, 0.38);
13 |
14 | --vp-c-border: #3c3f44;
15 | --vp-c-divider: #2e2e32;
16 | --vp-c-gutter: #000000;
17 |
18 | --vp-c-brand-1: #ffc7a8;
19 | --vp-c-brand-2: #e78e5c;
20 | --vp-c-brand-3: #dd773e;
21 | --vp-c-brand-soft: rgb(255 156 100 / 16%);
22 |
23 | --g-comment-bg: var(--vp-c-bg);
24 | --g-comment-bg-alt: var(--vp-c-bg-alt);
25 | --color-btn-primary-disabled-text: var(--vp-c-text-3);
26 | --color-btn-primary-disabled-bg: color-mix(in srgb, var(--vp-c-brand-3) 24%, transparent);
27 | }
28 |
--------------------------------------------------------------------------------
/docs/public/giscus/light.css:
--------------------------------------------------------------------------------
1 | @import "./original/light.css";
2 | @import "./style.css";
3 |
4 | main {
5 | --vp-c-bg: #ffffff;
6 | --vp-c-bg-alt: #f6f6f7;
7 | --vp-c-bg-elv: #ffffff;
8 | --vp-c-bg-soft: #f6f6f7;
9 |
10 | --vp-c-text-1: rgba(60, 60, 67);
11 | --vp-c-text-2: rgba(60, 60, 67, 0.78);
12 | --vp-c-text-3: rgba(60, 60, 67, 0.56);
13 |
14 | --vp-c-border: #c2c2c4;
15 | --vp-c-divider: #e2e2e3;
16 | --vp-c-gutter: #e2e2e3;
17 |
18 | --vp-c-brand-1: #b26134;
19 | --vp-c-brand-2: #cc6e3a;
20 | --vp-c-brand-3: #cd8156;
21 | --vp-c-brand-soft: rgb(255 156 100 / 14%);
22 |
23 | --g-comment-bg: var(--vp-c-bg-alt);
24 | --g-comment-bg-alt: var(--vp-c-bg);
25 | --color-btn-primary-disabled-text: var(--vp-c-bg);
26 | --color-btn-primary-disabled-bg: color-mix(in srgb, var(--vp-c-brand-3) 36%, transparent);
27 | }
28 |
--------------------------------------------------------------------------------
/docs/public/icon.svg:
--------------------------------------------------------------------------------
1 |
10 |
--------------------------------------------------------------------------------
/docs/public/logo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/withcatai/node-llama-cpp/ea8d9046bb2b4c33b79c058b4a0f608f5452c0fb/docs/public/logo.jpg
--------------------------------------------------------------------------------
/docs/public/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 |
3 | Sitemap: https://node-llama-cpp.withcat.ai/sitemap.xml
4 |
--------------------------------------------------------------------------------
/giscus.json:
--------------------------------------------------------------------------------
1 | {
2 | "origins": [
3 | "https://node-llama-cpp.withcat.ai",
4 | "https://withcatai.github.io",
5 | "http://localhost:5173",
6 | "http://localhost:3000"
7 | ],
8 | "defaultCommentOrder": "oldest"
9 | }
10 |
--------------------------------------------------------------------------------
/llama/addon/AddonGrammar.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "llama.h"
3 | #include "common/common.h"
4 | #include "llama-grammar.h"
5 | #include "unicode.h"
6 | #include "napi.h"
7 | #include "addonGlobals.h"
8 |
9 | class AddonGrammar : public Napi::ObjectWrap {
10 | public:
11 | std::string grammarCode = "";
12 | std::string rootRuleName = "root";
13 | Napi::Reference addonExportsRef;
14 | bool hasAddonExportsRef = false;
15 |
16 | AddonGrammar(const Napi::CallbackInfo& info);
17 | ~AddonGrammar();
18 |
19 | Napi::Value isTextCompatible(const Napi::CallbackInfo& info);
20 |
21 | static void init(Napi::Object exports);
22 | };
--------------------------------------------------------------------------------
/llama/addon/AddonGrammarEvaluationState.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "llama.h"
3 | #include "napi.h"
4 | #include "addonGlobals.h"
5 | #include "AddonModel.h"
6 |
7 | class AddonGrammarEvaluationState : public Napi::ObjectWrap {
8 | public:
9 | AddonModel* model;
10 | AddonGrammar* grammarDef;
11 | llama_sampler * sampler = nullptr;
12 |
13 | AddonGrammarEvaluationState(const Napi::CallbackInfo& info);
14 | ~AddonGrammarEvaluationState();
15 |
16 | static void init(Napi::Object exports);
17 | };
--------------------------------------------------------------------------------
/llama/addon/AddonModelData.cpp:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | #include "addonGlobals.h"
4 | #include "AddonModelData.h"
5 | #include "AddonModelLora.h"
6 |
7 | AddonModelData::AddonModelData() {
8 |
9 | }
10 | AddonModelData::~AddonModelData() {
11 | std::set currentLoraAdapters;
12 | currentLoraAdapters.swap(loraAdapters);
13 |
14 | for (auto lora : currentLoraAdapters) {
15 | lora->dispose(true);
16 | }
17 | currentLoraAdapters.clear();
18 | }
19 |
20 | void AddonModelData::removeLora(AddonModelLora* lora) {
21 | auto pos = loraAdapters.find(lora);
22 | if (pos != loraAdapters.end()) {
23 | loraAdapters.erase(pos);
24 | }
25 | }
--------------------------------------------------------------------------------
/llama/addon/AddonModelData.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include
3 | #include "llama.h"
4 | #include "napi.h"
5 | #include "addonGlobals.h"
6 |
7 | class AddonModelData {
8 | public:
9 | std::set loraAdapters;
10 |
11 | AddonModelData();
12 | ~AddonModelData();
13 |
14 | void removeLora(AddonModelLora* lora);
15 | };
--------------------------------------------------------------------------------
/llama/addon/AddonModelLora.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "llama.h"
3 | #include "napi.h"
4 | #include "addonGlobals.h"
5 |
6 | class AddonModelLora : public Napi::ObjectWrap {
7 | public:
8 | AddonModel* model;
9 | llama_adapter_lora * lora_adapter;
10 | std::string loraFilePath;
11 | uint32_t usages = 0;
12 |
13 | AddonModelLora(const Napi::CallbackInfo& info);
14 | ~AddonModelLora();
15 |
16 | void dispose(bool skipErase = false);
17 |
18 | Napi::Value GetFilePath(const Napi::CallbackInfo& info);
19 |
20 | Napi::Value GetUsages(const Napi::CallbackInfo& info);
21 | void SetUsages(const Napi::CallbackInfo& info, const Napi::Value &value);
22 |
23 | Napi::Value GetDisposed(const Napi::CallbackInfo& info);
24 |
25 | Napi::Value Dispose(const Napi::CallbackInfo& info);
26 |
27 | static void init(Napi::Object exports);
28 | };
29 |
--------------------------------------------------------------------------------
/llama/addon/addonGlobals.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include "addonGlobals.h"
4 | #include "napi.h"
5 |
6 | void adjustNapiExternalMemoryAdd(Napi::Env env, uint64_t size) {
7 | const uint64_t chunkSize = std::numeric_limits::max();
8 | while (size > 0) {
9 | int64_t adjustSize = std::min(size, chunkSize);
10 | Napi::MemoryManagement::AdjustExternalMemory(env, adjustSize);
11 | size -= adjustSize;
12 | }
13 | }
14 |
15 | void adjustNapiExternalMemorySubtract(Napi::Env env, uint64_t size) {
16 | const uint64_t chunkSize = std::numeric_limits::max();
17 | while (size > 0) {
18 | int64_t adjustSize = std::min(size, chunkSize);
19 | Napi::MemoryManagement::AdjustExternalMemory(env, -adjustSize);
20 | size -= adjustSize;
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/llama/addon/addonGlobals.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "napi.h"
3 |
4 | class AddonModel;
5 | class AddonModelLora;
6 | class AddonModelData;
7 | class AddonContext;
8 | class AddonGrammar;
9 | class AddonGrammarEvaluationState;
10 |
11 | void adjustNapiExternalMemoryAdd(Napi::Env env, uint64_t size);
12 | void adjustNapiExternalMemorySubtract(Napi::Env env, uint64_t size);
13 |
--------------------------------------------------------------------------------
/llama/addon/globals/addonLog.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "llama.h"
3 | #include "napi.h"
4 |
5 | struct addon_logger_log {
6 | public:
7 | const int logLevelNumber;
8 | const std::stringstream* stringStream;
9 | };
10 |
11 | void addonLlamaCppLogCallback(ggml_log_level level, const char* text, void* user_data);
12 |
13 | using AddonThreadSafeLogCallbackFunctionContext = Napi::Reference;
14 | void addonCallJsLogCallback(
15 | Napi::Env env, Napi::Function callback, AddonThreadSafeLogCallbackFunctionContext* context, addon_logger_log* data
16 | );
17 | using AddonThreadSafeLogCallbackFunction =
18 | Napi::TypedThreadSafeFunction;
19 |
20 | Napi::Value setLogger(const Napi::CallbackInfo& info);
21 | Napi::Value setLoggerLogLevel(const Napi::CallbackInfo& info);
22 |
--------------------------------------------------------------------------------
/llama/addon/globals/addonProgress.cpp:
--------------------------------------------------------------------------------
1 | #include "addonProgress.h"
2 |
3 | void addonCallJsProgressCallback(
4 | Napi::Env env, Napi::Function callback, AddonThreadSafeProgressCallbackFunctionContext* context, addon_progress_event* data
5 | ) {
6 | if (env != nullptr && callback != nullptr) {
7 | try {
8 | callback.Call({Napi::Number::New(env, data->progress)});
9 | } catch (const Napi::Error& e) {}
10 | }
11 |
12 | if (data != nullptr) {
13 | delete data;
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/llama/addon/globals/addonProgress.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "napi.h"
3 |
4 | struct addon_progress_event {
5 | public:
6 | const float progress;
7 | };
8 |
9 | using AddonThreadSafeProgressCallbackFunctionContext = Napi::Reference;
10 | void addonCallJsProgressCallback(
11 | Napi::Env env, Napi::Function callback, AddonThreadSafeProgressCallbackFunctionContext* context, addon_progress_event* data
12 | );
13 | using AddonThreadSafeProgressEventCallbackFunction =
14 | Napi::TypedThreadSafeFunction;
15 |
16 |
--------------------------------------------------------------------------------
/llama/addon/globals/getGpuInfo.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include
3 | #include
4 | #include "napi.h"
5 | #include "llama.h"
6 |
7 | Napi::Value getGpuVramInfo(const Napi::CallbackInfo& info);
8 | Napi::Value getGpuDeviceInfo(const Napi::CallbackInfo& info);
9 | std::pair getGpuDevice();
10 | Napi::Value getGpuType(const Napi::CallbackInfo& info);
11 | Napi::Value ensureGpuDeviceIsSupported(const Napi::CallbackInfo& info);
12 |
--------------------------------------------------------------------------------
/llama/addon/globals/getMemoryInfo.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "napi.h"
3 |
4 | Napi::Value getMemoryInfo(const Napi::CallbackInfo& info);
5 |
--------------------------------------------------------------------------------
/llama/addon/globals/getSwapInfo.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 | #include "napi.h"
3 |
4 | Napi::Value getSwapInfo(const Napi::CallbackInfo& info);
5 |
--------------------------------------------------------------------------------
/llama/binariesGithubRelease.json:
--------------------------------------------------------------------------------
1 | {
2 | "release": "latest"
3 | }
4 |
--------------------------------------------------------------------------------
/llama/cmake/win32.llvmApplyGnuModeAdaptations.cmake:
--------------------------------------------------------------------------------
1 | function(llvmApplyGnuModeAdaptations)
2 | # adapt cmake-js to work with llvm in GNU mode
3 | if (NOT CMAKE_SHARED_LINKER_FLAGS MATCHES "-Xlinker /DELAYLOAD:NODE.EXE")
4 | string(REPLACE "/DELAYLOAD:NODE.EXE" "-Xlinker /DELAYLOAD:NODE.EXE -Xlinker /defaultlib:delayimp"
5 | UPDATED_CMAKE_SHARED_LINKER_FLAGS
6 | "${CMAKE_SHARED_LINKER_FLAGS}")
7 | set(CMAKE_SHARED_LINKER_FLAGS "${UPDATED_CMAKE_SHARED_LINKER_FLAGS}" PARENT_SCOPE)
8 | endif()
9 |
10 | set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -Xclang --dependent-lib=msvcrt" PARENT_SCOPE)
11 | set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -Xclang --dependent-lib=msvcrt" PARENT_SCOPE)
12 | endfunction()
13 |
--------------------------------------------------------------------------------
/llama/gpuInfo/vulkan-gpu-info.h:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #include
4 | #include
5 |
6 | typedef void (*gpuInfoVulkanWarningLogCallback_t)(const char* message);
7 |
8 | bool gpuInfoGetTotalVulkanDevicesInfo(size_t* total, size_t* used, size_t* unifiedMemorySize, gpuInfoVulkanWarningLogCallback_t warningLogCallback);
9 | bool checkIsVulkanEnvSupported(gpuInfoVulkanWarningLogCallback_t warningLogCallback);
10 |
--------------------------------------------------------------------------------
/llama/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "binary": {
3 | "napi_versions": [7]
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/llama/profiles/llvm.win32.host-arm64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
2 | setProgramFilesPaths("arm64")
3 |
4 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNodeLib.cmake")
5 | ensureNodeLib("arm64" "arm64")
6 |
7 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmApplyGnuModeAdaptations.cmake")
8 | llvmApplyGnuModeAdaptations()
9 |
10 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmEnsureCmakeAr.cmake")
11 | llvmEnsureCmakeAr("arm64")
12 |
13 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
14 | ensureNinjaPath()
15 |
--------------------------------------------------------------------------------
/llama/profiles/llvm.win32.host-x64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
2 | setProgramFilesPaths("x64")
3 |
4 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNodeLib.cmake")
5 | ensureNodeLib("x64" "arm64")
6 |
7 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmApplyGnuModeAdaptations.cmake")
8 | llvmApplyGnuModeAdaptations()
9 |
10 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmEnsureCmakeAr.cmake")
11 | llvmEnsureCmakeAr("x64")
12 |
13 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
14 | ensureNinjaPath()
15 |
--------------------------------------------------------------------------------
/llama/profiles/llvm.win32.host-x64.target-x64.cmake:
--------------------------------------------------------------------------------
1 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
2 | setProgramFilesPaths("x64")
3 |
4 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNodeLib.cmake")
5 | ensureNodeLib("x64" "x64")
6 |
7 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmApplyGnuModeAdaptations.cmake")
8 | llvmApplyGnuModeAdaptations()
9 |
10 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmEnsureCmakeAr.cmake")
11 | llvmEnsureCmakeAr("x64")
12 |
13 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
14 | ensureNinjaPath()
15 |
--------------------------------------------------------------------------------
/llama/toolchains/darwin.host-x64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Darwin) # macOS
2 | set(CMAKE_SYSTEM_PROCESSOR arm64)
3 |
4 | set(CMAKE_C_COMPILER clang)
5 | set(CMAKE_CXX_COMPILER clang++)
6 |
7 | set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -arch arm64")
8 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -arch arm64")
9 |
--------------------------------------------------------------------------------
/llama/toolchains/linux.host-arm64.target-x64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Linux)
2 | set(CMAKE_SYSTEM_PROCESSOR x86_64)
3 |
4 | set(CMAKE_C_COMPILER x86_64-linux-gnu-gcc)
5 | set(CMAKE_CXX_COMPILER x86_64-linux-gnu-g++)
6 |
--------------------------------------------------------------------------------
/llama/toolchains/linux.host-x64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Linux)
2 | set(CMAKE_SYSTEM_PROCESSOR aarch64)
3 |
4 | set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
5 | set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
6 |
--------------------------------------------------------------------------------
/llama/toolchains/linux.host-x64.target-arm71.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Linux)
2 | set(CMAKE_SYSTEM_PROCESSOR arm)
3 |
4 | set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
5 | set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
6 |
--------------------------------------------------------------------------------
/llama/toolchains/llvm.win32.host-x64.target-x64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Windows)
2 | set(CMAKE_SYSTEM_PROCESSOR x86_64)
3 |
4 | set(target x86_64-pc-windows-msvc)
5 | set(CMAKE_C_COMPILER_TARGET ${target})
6 | set(CMAKE_CXX_COMPILER_TARGET ${target})
7 |
8 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
9 | setProgramFilesPaths("x64")
10 |
11 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmUseGnuModeCompilers.cmake")
12 | llvmUseGnuModeCompilers("x64")
13 |
14 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
15 | ensureNinjaPath()
16 |
17 | set(arch_c_flags "-march=native")
18 |
19 | set(CMAKE_C_FLAGS_INIT "${arch_c_flags}")
20 | set(CMAKE_CXX_FLAGS_INIT "${arch_c_flags}")
21 |
--------------------------------------------------------------------------------
/llama/toolchains/win32.host-arm64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Windows)
2 | set(CMAKE_SYSTEM_PROCESSOR arm64)
3 |
4 | set(target arm64-pc-windows-msvc)
5 | set(CMAKE_C_COMPILER_TARGET ${target})
6 | set(CMAKE_CXX_COMPILER_TARGET ${target})
7 |
8 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
9 | setProgramFilesPaths("arm64")
10 |
11 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmUseGnuModeCompilers.cmake")
12 | llvmUseGnuModeCompilers("arm64")
13 |
14 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
15 | ensureNinjaPath()
16 |
17 | set(arch_c_flags "-march=armv8.7-a -fvectorize -ffp-model=fast -fno-finite-math-only")
18 | set(warn_c_flags "-Wno-format -Wno-unused-variable -Wno-unused-function -Wno-gnu-zero-variadic-macro-arguments")
19 |
20 | set(CMAKE_C_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}")
21 | set(CMAKE_CXX_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}")
22 |
--------------------------------------------------------------------------------
/llama/toolchains/win32.host-x64.target-arm64.cmake:
--------------------------------------------------------------------------------
1 | set(CMAKE_SYSTEM_NAME Windows)
2 | set(CMAKE_SYSTEM_PROCESSOR arm64)
3 |
4 | set(target arm64-pc-windows-msvc)
5 | set(CMAKE_C_COMPILER_TARGET ${target})
6 | set(CMAKE_CXX_COMPILER_TARGET ${target})
7 |
8 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.programFilesPaths.cmake")
9 | setProgramFilesPaths("x64")
10 |
11 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.llvmUseGnuModeCompilers.cmake")
12 | llvmUseGnuModeCompilers("x64")
13 |
14 | include("${CMAKE_CURRENT_LIST_DIR}/../cmake/win32.ensureNinjaPath.cmake")
15 | ensureNinjaPath()
16 |
17 | set(arch_c_flags "-march=armv8.7-a -fvectorize -ffp-model=fast -fno-finite-math-only")
18 | set(warn_c_flags "-Wno-format -Wno-unused-variable -Wno-unused-function -Wno-gnu-zero-variadic-macro-arguments")
19 |
20 | set(CMAKE_C_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}")
21 | set(CMAKE_CXX_FLAGS_INIT "${arch_c_flags} ${warn_c_flags}")
22 |
--------------------------------------------------------------------------------
/llama/xpack/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "xpack": {
3 | "minimumXpmRequired": "0.16.3",
4 | "dependencies": {},
5 | "devDependencies": {},
6 | "properties": {},
7 | "actions": {},
8 | "buildConfigurations": {}
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Linux arm64.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/linux-arm64",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/linux-arm64",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "arm64",
12 | "x64"
13 | ],
14 | "license": "MIT",
15 | "os": [
16 | "linux"
17 | ],
18 | "devDependencies": {
19 | "typescript": "^5.2.2"
20 | },
21 | "engines": {
22 | "node": ">=20.0.0"
23 | }
24 | },
25 | "node_modules/typescript": {
26 | "version": "5.5.2",
27 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
28 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
29 | "dev": true,
30 | "bin": {
31 | "tsc": "bin/tsc",
32 | "tsserver": "bin/tsserver"
33 | },
34 | "engines": {
35 | "node": ">=14.17"
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-arm64/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Linux armv7l.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/linux-armv7l",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/linux-armv7l",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "arm",
12 | "x64"
13 | ],
14 | "license": "MIT",
15 | "os": [
16 | "linux"
17 | ],
18 | "devDependencies": {
19 | "typescript": "^5.2.2"
20 | },
21 | "engines": {
22 | "node": ">=20.0.0"
23 | }
24 | },
25 | "node_modules/typescript": {
26 | "version": "5.5.2",
27 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
28 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
29 | "dev": true,
30 | "bin": {
31 | "tsc": "bin/tsc",
32 | "tsserver": "bin/tsserver"
33 | },
34 | "engines": {
35 | "node": ">=14.17"
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-armv7l/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Linux x64 with CUDA support.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/linux-x64-cuda",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/linux-x64-cuda",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "linux"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-cuda/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Linux x64 with Vulkan support.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/linux-x64-vulkan",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/linux-x64-vulkan",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "linux"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64-vulkan/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Linux x64.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/linux-x64",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/linux-x64",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "linux"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/linux-x64/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for macOS arm64 with Metal support.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/mac-arm64-metal",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/mac-arm64-metal",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "arm64",
12 | "x64"
13 | ],
14 | "license": "MIT",
15 | "os": [
16 | "darwin"
17 | ],
18 | "devDependencies": {
19 | "typescript": "^5.2.2"
20 | },
21 | "engines": {
22 | "node": ">=20.0.0"
23 | }
24 | },
25 | "node_modules/typescript": {
26 | "version": "5.5.2",
27 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
28 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
29 | "dev": true,
30 | "bin": {
31 | "tsc": "bin/tsc",
32 | "tsserver": "bin/tsserver"
33 | },
34 | "engines": {
35 | "node": ">=14.17"
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-arm64-metal/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for macOS x64.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/mac-x64",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/mac-x64",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "darwin"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/mac-x64/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Windows arm64.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/win-arm64",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/win-arm64",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "arm64",
12 | "x64"
13 | ],
14 | "license": "MIT",
15 | "os": [
16 | "win32"
17 | ],
18 | "devDependencies": {
19 | "typescript": "^5.2.2"
20 | },
21 | "engines": {
22 | "node": ">=20.0.0"
23 | }
24 | },
25 | "node_modules/typescript": {
26 | "version": "5.5.2",
27 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
28 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
29 | "dev": true,
30 | "bin": {
31 | "tsc": "bin/tsc",
32 | "tsserver": "bin/tsserver"
33 | },
34 | "engines": {
35 | "node": ">=14.17"
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-arm64/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Windows x64 with CUDA support.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/win-x64-cuda",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/win-x64-cuda",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "win32"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-cuda/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Windows x64 with Vulkan support.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/win-x64-vulkan",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/win-x64-vulkan",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "win32"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64-vulkan/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/README.md:
--------------------------------------------------------------------------------
1 | # [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp)
2 | This is a prebuilt binary package for [`node-llama-cpp`](https://github.com/withcatai/node-llama-cpp) for Windows x64.
3 |
4 | Do not install this package directly.
5 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@node-llama-cpp/win-x64",
3 | "version": "0.1.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "@node-llama-cpp/win-x64",
9 | "version": "0.1.0",
10 | "cpu": [
11 | "x64"
12 | ],
13 | "license": "MIT",
14 | "os": [
15 | "win32"
16 | ],
17 | "devDependencies": {
18 | "typescript": "^5.2.2"
19 | },
20 | "engines": {
21 | "node": ">=20.0.0"
22 | }
23 | },
24 | "node_modules/typescript": {
25 | "version": "5.5.2",
26 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
27 | "integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
28 | "dev": true,
29 | "bin": {
30 | "tsc": "bin/tsc",
31 | "tsserver": "bin/tsserver"
32 | },
33 | "engines": {
34 | "node": ">=14.17"
35 | }
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/src/index.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "node:fs/promises";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const binsDir = path.join(__dirname, "..", "bins");
7 | const packageVersion: string = (JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"))).version;
8 |
9 | export function getBinsDir() {
10 | return {
11 | binsDir,
12 | packageVersion
13 | };
14 | }
15 |
--------------------------------------------------------------------------------
/packages/@node-llama-cpp/win-x64/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": false,
24 | "composite": false,
25 | "declaration": false,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/.gitignore:
--------------------------------------------------------------------------------
1 | /dist
2 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Gilad S.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/README.md:
--------------------------------------------------------------------------------
1 | # `create-node-llama-cpp`
2 | ## Scaffold a new [`node-llama-cpp`](https://www.npmjs.com/package/node-llama-cpp) project from a template
3 | ```bash
4 | npm create node-llama-cpp@latest
5 | ```
6 |
7 | And then follow the prompts.
8 |
9 | You can directly specify the project name you want to use via the command line:
10 | ```bash
11 | npm create node-llama-cpp@latest my-project
12 | ```
13 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/src/cli.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import path from "path";
4 | import {fileURLToPath} from "url";
5 | import fs from "node:fs/promises";
6 | /* eslint import/no-unresolved: "off" */
7 | // @ts-ignore
8 | import {_startCreateCli} from "node-llama-cpp/commands";
9 |
10 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
11 |
12 | const packageJson = JSON.parse(await fs.readFile(path.join(__dirname, "..", "package.json"), "utf8"));
13 |
14 | _startCreateCli({
15 | cliBinName: packageJson.name,
16 | packageVersion: packageJson.version,
17 | _enable: Symbol.for("internal")
18 | });
19 |
20 | export {};
21 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/src/index.ts:
--------------------------------------------------------------------------------
1 | export {};
2 |
--------------------------------------------------------------------------------
/packages/create-node-llama-cpp/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["es2022"],
4 | "module": "es2022",
5 | "target": "es2022",
6 | "esModuleInterop": true,
7 | "noImplicitAny": true,
8 | "noImplicitReturns": true,
9 | "noImplicitThis": true,
10 | "noImplicitOverride": true,
11 | "removeComments": false,
12 | "allowSyntheticDefaultImports": true,
13 | "forceConsistentCasingInFileNames": true,
14 | "noFallthroughCasesInSwitch": true,
15 | "skipLibCheck": true,
16 | "moduleResolution": "node",
17 | "resolveJsonModule": false,
18 | "strictNullChecks": true,
19 | "isolatedModules": true,
20 | "noEmit": false,
21 | "outDir": "./dist",
22 | "strict": true,
23 | "sourceMap": true,
24 | "composite": false,
25 | "declaration": true,
26 | "stripInternal": true
27 | },
28 | "files": [
29 | "./src/index.ts"
30 | ],
31 | "include": [
32 | "./src"
33 | ]
34 | }
35 |
--------------------------------------------------------------------------------
/scripts/patches/@semantic-release+github+11.0.0.patch:
--------------------------------------------------------------------------------
1 | diff --git a/node_modules/@semantic-release/github/lib/definitions/retry.js b/node_modules/@semantic-release/github/lib/definitions/retry.js
2 | index 9b5021c..b684e52 100644
3 | --- a/node_modules/@semantic-release/github/lib/definitions/retry.js
4 | +++ b/node_modules/@semantic-release/github/lib/definitions/retry.js
5 | @@ -5,5 +5,5 @@ export const RETRY_CONF = {
6 | // By default, Octokit does not retry on 404s.
7 | // But we want to retry on 404s to account for replication lag.
8 | doNotRetry: [400, 401, 403, 422],
9 | - retries: 3,
10 | + retries: 25,
11 | };
12 |
--------------------------------------------------------------------------------
/scripts/patches/@semantic-release+npm+12.0.1.patch:
--------------------------------------------------------------------------------
1 | diff --git a/node_modules/@semantic-release/npm/lib/verify-auth.js b/node_modules/@semantic-release/npm/lib/verify-auth.js
2 | index 99e138e..31dee5f 100644
3 | --- a/node_modules/@semantic-release/npm/lib/verify-auth.js
4 | +++ b/node_modules/@semantic-release/npm/lib/verify-auth.js
5 | @@ -12,6 +12,10 @@ export default async function (npmrc, pkg, context) {
6 | stdout,
7 | stderr,
8 | } = context;
9 | +
10 | + if (context.options?.dryRun)
11 | + return;
12 | +
13 | const registry = getRegistry(pkg, context);
14 |
15 | await setNpmrcAuth(npmrc, registry, context);
16 |
--------------------------------------------------------------------------------
/scripts/patches/semantic-release+24.1.1.patch:
--------------------------------------------------------------------------------
1 | diff --git a/node_modules/semantic-release/index.js b/node_modules/semantic-release/index.js
2 | index 19c9f70..559665c 100644
3 | --- a/node_modules/semantic-release/index.js
4 | +++ b/node_modules/semantic-release/index.js
5 | @@ -94,7 +94,8 @@ async function run(context, plugins) {
6 | return false;
7 | }
8 |
9 | - throw error;
10 | + if (!options.dryRun)
11 | + throw error;
12 | }
13 | } catch (error) {
14 | logger.error(`The command "${error.command}" failed with the error message ${error.stderr}.`);
15 |
--------------------------------------------------------------------------------
/scripts/patches/vitepress+1.3.4.patch:
--------------------------------------------------------------------------------
1 | diff --git a/node_modules/vitepress/dist/client/theme-default/components/VPLocalSearchBox.vue b/node_modules/vitepress/dist/client/theme-default/components/VPLocalSearchBox.vue
2 | index c8aded4..ccd5eff 100644
3 | --- a/node_modules/vitepress/dist/client/theme-default/components/VPLocalSearchBox.vue
4 | +++ b/node_modules/vitepress/dist/client/theme-default/components/VPLocalSearchBox.vue
5 | @@ -443,6 +443,7 @@ function formMarkRegex(terms: Set) {
6 | :placeholder="buttonText"
7 | id="localsearch-input"
8 | aria-labelledby="localsearch-label"
9 | + autocomplete="off"
10 | class="search-input"
11 | />
12 |
13 |
--------------------------------------------------------------------------------
/scripts/postVersion.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "fs-extra";
4 |
5 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
6 | const packageJsonPath = path.join(__dirname, "..", "package.json");
7 |
8 | const packageJson = await fs.readJson(packageJsonPath);
9 | const currentVersion = packageJson.version;
10 |
11 | if (packageJson.optionalDependencies != null) {
12 | for (const packageName of Object.keys(packageJson.optionalDependencies)) {
13 | if (!packageName.startsWith("@node-llama-cpp/"))
14 | continue;
15 |
16 | console.info(`Updating optional dependency "${packageName}" to version "${currentVersion}"`);
17 | packageJson.optionalDependencies[packageName] = currentVersion;
18 | }
19 | }
20 |
21 | await fs.writeFile(packageJsonPath, JSON.stringify(packageJson, null, 2), "utf8");
22 |
--------------------------------------------------------------------------------
/scripts/prepareCreateNodeLlamaCppModuleForPublish.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import yargs from "yargs";
4 | import {hideBin} from "yargs/helpers";
5 | import fs from "fs-extra";
6 |
7 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
8 | const createPackageModulePackageJsonPath = path.join(__dirname, "..", "packages", "create-node-llama-cpp", "package.json");
9 |
10 | const argv = await yargs(hideBin(process.argv))
11 | .option("packageVersion", {
12 | type: "string",
13 | demandOption: true
14 | })
15 | .argv;
16 |
17 | const {packageVersion} = argv;
18 | if (packageVersion === "")
19 | throw new Error("packageVersion is empty");
20 |
21 | const packageJson = await fs.readJson(createPackageModulePackageJsonPath);
22 | packageJson.version = packageVersion;
23 | packageJson.dependencies["node-llama-cpp"] = packageVersion;
24 | delete packageJson.devDependencies;
25 |
26 | await fs.writeJson(createPackageModulePackageJsonPath, packageJson, {spaces: 2});
27 | console.info(`Updated "create-node-llama-cpp/package.json" to version "${packageVersion}"`);
28 |
--------------------------------------------------------------------------------
/scripts/prepareStandalonePrebuiltBinaryModules.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import {fileURLToPath} from "url";
3 | import fs from "fs-extra";
4 | import {$, cd} from "zx";
5 |
6 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
7 | const packageDirectory = path.join(__dirname, "..", "packages");
8 | const packageScope = "@node-llama-cpp";
9 | const subPackagesDirectory = path.join(packageDirectory, packageScope);
10 |
11 | for (const packageName of await fs.readdir(subPackagesDirectory)) {
12 | const packagePath = path.join(subPackagesDirectory, packageName);
13 | const packagePackageJsonPath = path.join(packagePath, "package.json");
14 |
15 | if ((await fs.stat(packagePath)).isFile())
16 | continue;
17 |
18 | $.verbose = true;
19 | cd(packagePath);
20 | await $`npm ci -f`;
21 | await $`npm run build`;
22 |
23 | const packageJson = await fs.readJson(packagePackageJsonPath);
24 | delete packageJson.devDependencies;
25 | const postinstall = packageJson.scripts?.postinstall;
26 | delete packageJson.scripts;
27 |
28 | if (postinstall != null)
29 | packageJson.scripts = {postinstall};
30 |
31 | await fs.writeJson(packagePackageJsonPath, packageJson, {spaces: 2});
32 | }
33 |
--------------------------------------------------------------------------------
/scripts/resolveLatestReleaseVersion.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import yargs from "yargs";
3 | import {hideBin} from "yargs/helpers";
4 | import fs from "fs-extra";
5 |
6 | const argv = await yargs(hideBin(process.argv))
7 | .option("saveVersionToFile", {
8 | type: "string"
9 | })
10 | .argv;
11 |
12 | const {saveVersionToFile} = argv;
13 |
14 | const releaseRes = await fetch("https://api.github.com/repos/withcatai/node-llama-cpp/releases/latest");
15 | const release: Release = await releaseRes.json();
16 |
17 | let latestReleaseVersion = release.tag_name;
18 | if (latestReleaseVersion.toLowerCase().startsWith("v"))
19 | latestReleaseVersion = latestReleaseVersion.slice("v".length);
20 |
21 | if (latestReleaseVersion === "")
22 | throw new Error("Could not get latest release version");
23 |
24 | console.log("Latest release version:", latestReleaseVersion);
25 |
26 | if (saveVersionToFile != null) {
27 | const resolvedPath = path.resolve(process.cwd(), saveVersionToFile);
28 |
29 | console.info("Writing latest release version to file:", resolvedPath);
30 | await fs.writeFile(resolvedPath, latestReleaseVersion, "utf8");
31 | }
32 |
33 | type Release = {
34 | tag_name: string
35 | };
36 |
--------------------------------------------------------------------------------
/src/apiDocsIndex.ts:
--------------------------------------------------------------------------------
1 | /** @internal */
2 | import {
3 | _LlamaText
4 | } from "./utils/LlamaText.js";
5 |
6 | /** @internal */
7 | export * from "./index.js";
8 |
9 | /** @internal */
10 | export {
11 | _LlamaText as LlamaText
12 | };
13 |
--------------------------------------------------------------------------------
/src/bindings/consts.ts:
--------------------------------------------------------------------------------
1 | import {BuildGpu} from "./types.js";
2 |
3 | const prettyBuildGpuNames: Record
, string> = {
4 | metal: "Metal",
5 | cuda: "CUDA",
6 | vulkan: "Vulkan"
7 | };
8 |
9 | export function getPrettyBuildGpuName(gpu: BuildGpu | undefined) {
10 | if (gpu == null)
11 | return "unknown GPU";
12 |
13 | if (gpu == false)
14 | return "no GPU";
15 |
16 | return prettyBuildGpuNames[gpu] ?? ('"' + gpu + '"');
17 | }
18 |
--------------------------------------------------------------------------------
/src/bindings/utils/NoBinaryFoundError.ts:
--------------------------------------------------------------------------------
1 | export class NoBinaryFoundError extends Error {
2 | /** @internal */
3 | public constructor(message: string = "NoBinaryFoundError") {
4 | super(message);
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/src/bindings/utils/asyncEvery.ts:
--------------------------------------------------------------------------------
1 | import {getConsoleLogPrefix} from "../../utils/getConsoleLogPrefix.js";
2 |
3 | /**
4 | * Returns a promise that resolves to true if every promise in the array resolves to true, otherwise false.
5 | * Note that this function will not throw on error and instead will log the error to the console.
6 | */
7 | export async function asyncEvery(promises: Promise[]): Promise {
8 | try {
9 | return (await Promise.all(promises)).every(Boolean);
10 | } catch (err) {
11 | console.error(getConsoleLogPrefix(false, false), err);
12 |
13 | return false;
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/src/bindings/utils/asyncSome.ts:
--------------------------------------------------------------------------------
1 | import {getConsoleLogPrefix} from "../../utils/getConsoleLogPrefix.js";
2 |
3 | /**
4 | * Returns a promise that fulfills as soon as any of the promises return `true`.
5 | * Note that this function will not throw on error and instead will log the error to the console.
6 | */
7 | export async function asyncSome(promises: Promise[]): Promise {
8 | if (promises.length === 0)
9 | return Promise.resolve(false);
10 |
11 | return new Promise((resolve) => {
12 | let fulfilled = 0;
13 |
14 | for (const promise of promises) {
15 | promise
16 | .then((result) => {
17 | if (result)
18 | return void resolve(true);
19 |
20 | fulfilled++;
21 | if (fulfilled === promises.length)
22 | resolve(false);
23 | })
24 | .catch((err) => {
25 | console.error(getConsoleLogPrefix(false, false), err);
26 |
27 | fulfilled++;
28 | if (fulfilled === promises.length)
29 | resolve(false);
30 | });
31 | }
32 | });
33 | }
34 |
--------------------------------------------------------------------------------
/src/bindings/utils/binariesGithubRelease.ts:
--------------------------------------------------------------------------------
1 | import fs from "fs-extra";
2 | import {binariesGithubReleasePath} from "../../config.js";
3 |
4 | type BinariesGithubReleaseFile = {
5 | release: "latest" | string
6 | };
7 |
8 | export async function getBinariesGithubRelease() {
9 | const binariesGithubRelease: BinariesGithubReleaseFile = await fs.readJson(binariesGithubReleasePath);
10 |
11 | return binariesGithubRelease.release;
12 | }
13 |
14 | export async function setBinariesGithubRelease(release: BinariesGithubReleaseFile["release"]) {
15 | const binariesGithubReleaseJson: BinariesGithubReleaseFile = {
16 | release: release
17 | };
18 |
19 | await fs.writeJson(binariesGithubReleasePath, binariesGithubReleaseJson, {
20 | spaces: 4
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/src/bindings/utils/getCanUsePrebuiltBinaries.ts:
--------------------------------------------------------------------------------
1 | import {builtinLlamaCppGitHubRepo, builtinLlamaCppRelease} from "../../config.js";
2 | import {getClonedLlamaCppRepoReleaseInfo} from "./cloneLlamaCppRepo.js";
3 |
4 | export async function getCanUsePrebuiltBinaries() {
5 | const clonedLlamaCppRepoReleaseInfo = await getClonedLlamaCppRepoReleaseInfo();
6 |
7 | return clonedLlamaCppRepoReleaseInfo == null || (
8 | clonedLlamaCppRepoReleaseInfo.tag === builtinLlamaCppRelease &&
9 | clonedLlamaCppRepoReleaseInfo.llamaCppGithubRepo === builtinLlamaCppGitHubRepo
10 | );
11 | }
12 |
--------------------------------------------------------------------------------
/src/bindings/utils/getLlamaWithoutBackend.ts:
--------------------------------------------------------------------------------
1 | import {withLock} from "lifecycle-utils";
2 | import {getLlamaForOptions} from "../getLlama.js";
3 | import {LlamaLogLevel} from "../types.js";
4 | import {Llama} from "../Llama.js";
5 |
6 | let sharedLlamaWithoutBackend: Llama | null = null;
7 |
8 | /**
9 | * This is used to access various methods in the addon side without actually using a backend
10 | */
11 | export async function getLlamaWithoutBackend() {
12 | if (sharedLlamaWithoutBackend != null)
13 | return sharedLlamaWithoutBackend;
14 |
15 | return await withLock(getLlamaWithoutBackend, "loadAddon", async () => {
16 | if (sharedLlamaWithoutBackend != null)
17 | return sharedLlamaWithoutBackend;
18 |
19 | sharedLlamaWithoutBackend = await getLlamaForOptions({
20 | gpu: false,
21 | progressLogs: false,
22 | logLevel: LlamaLogLevel.error,
23 | build: "never",
24 | usePrebuiltBinaries: true,
25 | vramPadding: 0
26 | }, {
27 | skipLlamaInit: true
28 | });
29 |
30 | return sharedLlamaWithoutBackend;
31 | });
32 | }
33 |
--------------------------------------------------------------------------------
/src/bindings/utils/getPlatform.ts:
--------------------------------------------------------------------------------
1 | import process from "process";
2 |
3 | export function getPlatform() {
4 | switch (process.platform) {
5 | case "win32":
6 | case "cygwin":
7 | return "win";
8 |
9 | case "linux":
10 | case "android":
11 | return "linux";
12 |
13 | case "darwin":
14 | return "mac";
15 | }
16 |
17 | return process.platform;
18 | }
19 |
20 | export type BinaryPlatform = ReturnType;
21 |
--------------------------------------------------------------------------------
/src/bindings/utils/getPlatformInfo.ts:
--------------------------------------------------------------------------------
1 | import os from "os";
2 | import {getPlatform} from "./getPlatform.js";
3 | import {getLinuxDistroInfo} from "./getLinuxDistroInfo.js";
4 |
5 | export async function getPlatformInfo(): Promise<{name: string, version: string}> {
6 | const currentPlatform = getPlatform();
7 |
8 | if (currentPlatform === "mac")
9 | return {
10 | name: "macOS",
11 | version: os.release()
12 | };
13 | else if (currentPlatform === "linux") {
14 | const linuxDistroInfo = await getLinuxDistroInfo();
15 |
16 | return {
17 | name: linuxDistroInfo.name,
18 | version: linuxDistroInfo.version
19 | };
20 | } else if (currentPlatform === "win")
21 | return {
22 | name: "Windows",
23 | version: os.release()
24 | };
25 |
26 | return {
27 | name: "Unknown",
28 | version: os.release()
29 | };
30 | }
31 |
32 | export type BinaryPlatformInfo = Awaited>;
33 |
--------------------------------------------------------------------------------
/src/bindings/utils/hasBuildingFromSourceDependenciesInstalled.ts:
--------------------------------------------------------------------------------
1 | import which from "which";
2 | import {asyncEvery} from "./asyncEvery.js";
3 |
4 | export async function hasBuildingFromSourceDependenciesInstalled() {
5 | return await asyncEvery([
6 | hasGit(),
7 | hasNpm()
8 | ]);
9 | }
10 |
11 | export async function hasGit() {
12 | try {
13 | const resolvedPath = await which("git");
14 | return resolvedPath !== "";
15 | } catch (err) {
16 | return false;
17 | }
18 | }
19 |
20 | export async function hasNpm() {
21 | try {
22 | const resolvedPath = await which("npm");
23 | return resolvedPath !== "";
24 | } catch (err) {
25 | return false;
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/bindings/utils/lastBuildInfo.ts:
--------------------------------------------------------------------------------
1 | import fs from "fs-extra";
2 | import {lastBuildInfoJsonPath} from "../../config.js";
3 |
4 | type LastBuildInfo = {
5 | folderName: string
6 | };
7 |
8 | export async function getLastBuildInfo() {
9 | try {
10 | const buildInfo: LastBuildInfo = await fs.readJson(lastBuildInfoJsonPath);
11 |
12 | return buildInfo;
13 | } catch (err) {
14 | return null;
15 | }
16 | }
17 |
18 | export async function setLastBuildInfo(buildInfo: LastBuildInfo) {
19 | await fs.writeJson(lastBuildInfoJsonPath, buildInfo, {
20 | spaces: 4
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/src/bindings/utils/resolveActualBindingBinaryPath.ts:
--------------------------------------------------------------------------------
1 | import path from "path";
2 | import fs from "fs-extra";
3 | import {runningInElectron} from "../../utils/runtime.js";
4 |
5 | export async function resolveActualBindingBinaryPath(binaryPath: string) {
6 | const absolutePath = path.resolve(binaryPath);
7 | if (!runningInElectron)
8 | return absolutePath;
9 |
10 | const fixedAsarPath = absolutePath.replace(".asar" + path.sep, ".asar.unpacked" + path.sep);
11 | try {
12 | if (await fs.pathExists(fixedAsarPath))
13 | return fixedAsarPath;
14 |
15 | return absolutePath;
16 | } catch (err) {
17 | return absolutePath;
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/bindings/utils/testCmakeBinary.ts:
--------------------------------------------------------------------------------
1 | import process from "process";
2 | import {execFile} from "node:child_process";
3 | import path from "path";
4 | import {fileURLToPath} from "url";
5 | import fs from "fs-extra";
6 |
7 | const __dirname = path.dirname(fileURLToPath(import.meta.url));
8 |
9 | export async function testCmakeBinary(cmakeBinaryPath?: string, {
10 | cwd = __dirname, env = process.env
11 | }: {
12 | cwd?: string, env?: typeof process.env
13 | } = {}) {
14 | if (cmakeBinaryPath == null || !(await fs.pathExists(cmakeBinaryPath)))
15 | return false;
16 |
17 | return new Promise((resolve, reject) => {
18 | const child = execFile(cmakeBinaryPath, ["--version"], {
19 | cwd,
20 | env,
21 | windowsHide: true
22 | });
23 |
24 | child.on("exit", (code) => {
25 | if (code == 0)
26 | resolve(true);
27 | else
28 | reject(false);
29 | });
30 | child.on("error", reject);
31 | child.on("disconnect", () => resolve(false));
32 | child.on("close", (code) => {
33 | if (code == 0)
34 | resolve(true);
35 | else
36 | resolve(false);
37 | });
38 | });
39 | }
40 |
--------------------------------------------------------------------------------
/src/chatWrappers/EmptyChatWrapper.ts:
--------------------------------------------------------------------------------
1 | import {ChatWrapper} from "../ChatWrapper.js";
2 |
3 | export class EmptyChatWrapper extends ChatWrapper {
4 | public readonly wrapperName: string = "Empty";
5 | }
6 |
--------------------------------------------------------------------------------
/src/chatWrappers/generic/utils/UniqueIdGenerator.ts:
--------------------------------------------------------------------------------
1 | export class UniqueIdGenerator {
2 | public readonly antiText: string;
3 | private readonly _ids = new Set();
4 |
5 | public constructor(antiText: string) {
6 | this.antiText = antiText;
7 | }
8 |
9 | public generateId(numbersOnly: boolean = false): string {
10 | let id: string;
11 |
12 | do {
13 | if (numbersOnly) {
14 | do {
15 | id = (
16 | Math.random()
17 | .toString(10)
18 | .slice(2)
19 | .slice(0, String(Number.MAX_SAFE_INTEGER).length - 1)
20 | );
21 | } while (id.startsWith("0"));
22 | } else
23 | id = "W" + (
24 | Math.random()
25 | .toString(36)
26 | .slice(2)
27 | ) + "W";
28 | } while (this._ids.has(id) || this.antiText.includes(id));
29 |
30 | this._ids.add(id);
31 |
32 | return id;
33 | }
34 |
35 | public removeId(id: string) {
36 | this._ids.delete(id);
37 | }
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/src/chatWrappers/generic/utils/getFirstValidResult.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Call the functions in the array one by one and return the result of the first one that doesn't throw an error.
3 | *
4 | * If all functions throw an error, throw the error of the last function.
5 | */
6 | export function getFirstValidResult any)[]>(options: T): ReturnType {
7 | for (let i = 0; i < options.length; i++) {
8 | if (i === options.length - 1)
9 | return options[i]!();
10 |
11 | try {
12 | return options[i]!();
13 | } catch (err) {
14 | // do nothing
15 | }
16 | }
17 |
18 | throw new Error("All options failed");
19 | }
20 |
--------------------------------------------------------------------------------
/src/chatWrappers/utils/getModelLinageNames.ts:
--------------------------------------------------------------------------------
1 | import {GgufMetadata} from "../../gguf/types/GgufMetadataTypes.js";
2 |
3 | export function getModelLinageNames(ggufMetadata?: GgufMetadata) {
4 | const res: string[][] = [];
5 |
6 | if (ggufMetadata == null)
7 | return res;
8 |
9 | const currentModelInfo = [ggufMetadata?.general?.name, ggufMetadata?.general?.basename]
10 | .filter((v): v is string => v != null);
11 | if (currentModelInfo.length > 0)
12 | res.push(currentModelInfo);
13 |
14 | if (typeof ggufMetadata?.general?.base_model?.count === "number") {
15 | for (let i = 0; i < ggufMetadata.general.base_model.count; i++) {
16 | const baseModel = ggufMetadata.general.base_model[String(i) as `${bigint}`];
17 | if (baseModel?.name != null)
18 | res.push([baseModel.name]);
19 | }
20 | }
21 |
22 | return res;
23 | }
24 |
--------------------------------------------------------------------------------
/src/chatWrappers/utils/isLlama3_2LightweightModel.ts:
--------------------------------------------------------------------------------
1 | import {ChatWrapperCheckModelCompatibilityParams} from "../../types.js";
2 | import {includesText} from "../../utils/includesText.js";
3 | import {getModelLinageNames} from "./getModelLinageNames.js";
4 |
5 | export function isLlama3_2LightweightModel(options: ChatWrapperCheckModelCompatibilityParams) {
6 | const isLlama3_2 = getModelLinageNames(options.fileInfo?.metadata)
7 | .some((modelNames) => includesText(modelNames, ["llama 3.2", "llama-3.2", "llama3.2"]));
8 | const isSmallModel = (["1B", "3B"] as string[]).includes(options.fileInfo?.metadata?.general?.size_label ?? "");
9 |
10 | return isLlama3_2 && isSmallModel;
11 | }
12 |
--------------------------------------------------------------------------------
/src/chatWrappers/utils/jsonDumps.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Like `JSON.stringify` but results in a value formatted in the format that Python produces when using `json.dumps(value)`.
3 | *
4 | * We need to format results this way since this is what many models use in their training data,
5 | * so this is what many models expect to have in their context state.
6 | */
7 | export function jsonDumps(value: any) {
8 | return JSON.stringify(value, null, 1)
9 | .split("\n")
10 | .map((line) => {
11 | line = line.trim();
12 |
13 | if (line.endsWith(","))
14 | line += " ";
15 |
16 | return line;
17 | })
18 | .join("");
19 | }
20 |
--------------------------------------------------------------------------------
/src/cli/commands/inspect/InspectCommand.ts:
--------------------------------------------------------------------------------
1 | import {CommandModule} from "yargs";
2 | import {withCliCommandDescriptionDocsUrl} from "../../utils/withCliCommandDescriptionDocsUrl.js";
3 | import {documentationPageUrls} from "../../../config.js";
4 | import {InspectGgufCommand} from "./commands/InspectGgufCommand.js";
5 | import {InspectGpuCommand} from "./commands/InspectGpuCommand.js";
6 | import {InspectMeasureCommand} from "./commands/InspectMeasureCommand.js";
7 | import {InspectEstimateCommand} from "./commands/InspectEstimateCommand.js";
8 |
9 | type InspectCommand = {
10 | // no options for now
11 | };
12 |
13 | export const InspectCommand: CommandModule