├── .aider.chat.history.md
├── .editorconfig
├── .github
├── FUNDING.yml
├── copilot-instructions.md
└── workflows
│ ├── app-release.yml
│ ├── bench-docker.yml
│ └── boost-docker.yml
├── .gitignore
├── .scripts
├── app.ts
├── comment.ts
├── deno.json
├── deno.lock
├── docs-to-app.ts
├── docs.ts
├── release.sh
├── scaffold.ts
├── seed-cdi.ts
├── seed-traefik.ts
├── seed.ts
└── terser.sh
├── .style.yapf
├── .vscode
├── .copilot-instructions.md
└── settings.json
├── LICENSE
├── README.md
├── RELEASE.md
├── agent
├── .gitignore
├── Dockerfile
├── docker
│ └── services.d
│ │ └── agent.sh
├── override.env
├── requirements.txt
└── src
│ ├── agent.py
│ ├── chat.py
│ ├── chat_node.py
│ ├── config.py
│ ├── format.py
│ ├── llm.py
│ ├── log.py
│ ├── main.py
│ └── tasks
│ ├── chat_to_goal.py
│ ├── direct.py
│ ├── next_step.py
│ ├── plan.py
│ └── refine.py
├── agentzero
├── .gitignore
└── override.env
├── aichat
├── Dockerfile
├── configs
│ ├── aichat.airllm.yml
│ ├── aichat.aphrodite.yml
│ ├── aichat.config.yml
│ ├── aichat.dify.yml
│ ├── aichat.ktransformers.yml
│ ├── aichat.litellm.yml
│ ├── aichat.llamacpp.yml
│ ├── aichat.mistralrs.yml
│ ├── aichat.ollama.yml
│ ├── aichat.sglang.yml
│ ├── aichat.tabbyapi.yml
│ └── aichat.vllm.yml
├── override.env
└── start_aichat.sh
├── aider
├── configs
│ ├── aider.airllm.yml
│ ├── aider.aphrodite.yml
│ ├── aider.config.yml
│ ├── aider.dify.yml
│ ├── aider.ktransformers.yml
│ ├── aider.litellm.yml
│ ├── aider.llamacpp.yml
│ ├── aider.mistralrs.yml
│ ├── aider.ollama.yml
│ ├── aider.sglang.yml
│ ├── aider.tabbyapi.yml
│ └── aider.vllm.yml
├── override.env
└── start_aider.sh
├── airllm
├── Dockerfile
├── override.env
└── server.py
├── airweave
├── .gitignore
└── override.env
├── anythingllm
└── override.env
├── aphrodite
└── override.env
├── app
├── .editorconfig
├── .gitignore
├── README.md
├── app-icon.png
├── bun.lockb
├── index.html
├── package.json
├── postcss.config.js
├── public
│ ├── tauri.svg
│ └── vite.svg
├── src-tauri
│ ├── .gitignore
│ ├── Cargo.lock
│ ├── Cargo.toml
│ ├── build.rs
│ ├── capabilities
│ │ ├── default.json
│ │ └── desktop.json
│ ├── icons
│ │ ├── 128x128.png
│ │ ├── 128x128@2x.png
│ │ ├── 32x32.png
│ │ ├── Square107x107Logo.png
│ │ ├── Square142x142Logo.png
│ │ ├── Square150x150Logo.png
│ │ ├── Square284x284Logo.png
│ │ ├── Square30x30Logo.png
│ │ ├── Square310x310Logo.png
│ │ ├── Square44x44Logo.png
│ │ ├── Square71x71Logo.png
│ │ ├── Square89x89Logo.png
│ │ ├── StoreLogo.png
│ │ ├── android
│ │ │ ├── mipmap-hdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ ├── ic_launcher_foreground.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-mdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ ├── ic_launcher_foreground.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ ├── ic_launcher_foreground.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ ├── ic_launcher_foreground.png
│ │ │ │ └── ic_launcher_round.png
│ │ │ └── mipmap-xxxhdpi
│ │ │ │ ├── ic_launcher.png
│ │ │ │ ├── ic_launcher_foreground.png
│ │ │ │ └── ic_launcher_round.png
│ │ ├── icon.icns
│ │ ├── icon.ico
│ │ ├── icon.png
│ │ └── ios
│ │ │ ├── AppIcon-20x20@1x.png
│ │ │ ├── AppIcon-20x20@2x-1.png
│ │ │ ├── AppIcon-20x20@2x.png
│ │ │ ├── AppIcon-20x20@3x.png
│ │ │ ├── AppIcon-29x29@1x.png
│ │ │ ├── AppIcon-29x29@2x-1.png
│ │ │ ├── AppIcon-29x29@2x.png
│ │ │ ├── AppIcon-29x29@3x.png
│ │ │ ├── AppIcon-40x40@1x.png
│ │ │ ├── AppIcon-40x40@2x-1.png
│ │ │ ├── AppIcon-40x40@2x.png
│ │ │ ├── AppIcon-40x40@3x.png
│ │ │ ├── AppIcon-512@2x.png
│ │ │ ├── AppIcon-60x60@2x.png
│ │ │ ├── AppIcon-60x60@3x.png
│ │ │ ├── AppIcon-76x76@1x.png
│ │ │ ├── AppIcon-76x76@2x.png
│ │ │ └── AppIcon-83.5x83.5@2x.png
│ ├── src
│ │ ├── lib.rs
│ │ ├── main.rs
│ │ └── tray.rs
│ └── tauri.conf.json
├── src
│ ├── .gitignore
│ ├── App.tsx
│ ├── AppContent.tsx
│ ├── AppRoutes.tsx
│ ├── AppSidebar.tsx
│ ├── BackButton.tsx
│ ├── Button.tsx
│ ├── ConfirmModal.tsx
│ ├── DataClass.tsx
│ ├── HarborLogo.tsx
│ ├── IconButton.tsx
│ ├── Icons.tsx
│ ├── Loading.tsx
│ ├── LostSquirrel.tsx
│ ├── Modal.tsx
│ ├── OverlayContext.tsx
│ ├── ScrollToTop.tsx
│ ├── SearchInput.tsx
│ ├── Section.tsx
│ ├── ServiceTags.tsx
│ ├── assets
│ │ └── font
│ │ │ ├── Inter-Black.woff2
│ │ │ ├── Inter-BlackItalic.woff2
│ │ │ ├── Inter-Bold.woff2
│ │ │ ├── Inter-BoldItalic.woff2
│ │ │ ├── Inter-ExtraBold.woff2
│ │ │ ├── Inter-ExtraBoldItalic.woff2
│ │ │ ├── Inter-ExtraLight.woff2
│ │ │ ├── Inter-ExtraLightItalic.woff2
│ │ │ ├── Inter-Italic.woff2
│ │ │ ├── Inter-Light.woff2
│ │ │ ├── Inter-LightItalic.woff2
│ │ │ ├── Inter-Medium.woff2
│ │ │ ├── Inter-MediumItalic.woff2
│ │ │ ├── Inter-Regular.woff2
│ │ │ ├── Inter-SemiBold.woff2
│ │ │ ├── Inter-SemiBoldItalic.woff2
│ │ │ ├── Inter-Thin.woff2
│ │ │ ├── Inter-ThinItalic.woff2
│ │ │ ├── InterDisplay-Black.woff2
│ │ │ ├── InterDisplay-BlackItalic.woff2
│ │ │ ├── InterDisplay-Bold.woff2
│ │ │ ├── InterDisplay-BoldItalic.woff2
│ │ │ ├── InterDisplay-ExtraBold.woff2
│ │ │ ├── InterDisplay-ExtraBoldItalic.woff2
│ │ │ ├── InterDisplay-ExtraLight.woff2
│ │ │ ├── InterDisplay-ExtraLightItalic.woff2
│ │ │ ├── InterDisplay-Italic.woff2
│ │ │ ├── InterDisplay-Light.woff2
│ │ │ ├── InterDisplay-LightItalic.woff2
│ │ │ ├── InterDisplay-Medium.woff2
│ │ │ ├── InterDisplay-MediumItalic.woff2
│ │ │ ├── InterDisplay-Regular.woff2
│ │ │ ├── InterDisplay-SemiBold.woff2
│ │ │ ├── InterDisplay-SemiBoldItalic.woff2
│ │ │ ├── InterDisplay-Thin.woff2
│ │ │ ├── InterDisplay-ThinItalic.woff2
│ │ │ ├── InterVariable-Italic.woff2
│ │ │ └── InterVariable.woff2
│ ├── cli
│ │ └── CLI.tsx
│ ├── config
│ │ ├── Config.tsx
│ │ ├── ConfigNameModal.tsx
│ │ ├── HarborConfig.ts
│ │ ├── HarborConfigEditor.tsx
│ │ ├── HarborConfigEntryEditor.tsx
│ │ ├── HarborConfigSectionEditor.tsx
│ │ └── useHarborConfig.ts
│ ├── configMetadata.tsx
│ ├── font.css
│ ├── home
│ │ ├── Doctor.tsx
│ │ ├── Home.tsx
│ │ ├── ServiceCard.tsx
│ │ ├── ServiceList.tsx
│ │ ├── Version.tsx
│ │ └── useServiceList.tsx
│ ├── localStorage.ts
│ ├── main.css
│ ├── main.tsx
│ ├── service
│ │ ├── ServiceActions.tsx
│ │ ├── ServiceDescription.tsx
│ │ ├── ServiceDetails.tsx
│ │ ├── ServiceDocs.tsx
│ │ ├── ServiceHandle.tsx
│ │ ├── ServiceName.tsx
│ │ └── useCurrentService.ts
│ ├── serviceActions.tsx
│ ├── serviceMetadata.ts
│ ├── settings
│ │ ├── ProfileSelector.tsx
│ │ └── Settings.tsx
│ ├── squirrel.css
│ ├── tags.css
│ ├── theme.tsx
│ ├── useArrayState.ts
│ ├── useAutostart.tsx
│ ├── useCalled.tsx
│ ├── useDebounceCallback.tsx
│ ├── useGlobalKeydown.tsx
│ ├── useHarbor.tsx
│ ├── useInvoke.tsx
│ ├── useOpen.tsx
│ ├── useSearch.tsx
│ ├── useSelectedProfile.tsx
│ ├── useSharedState.tsx
│ ├── useStoredState.tsx
│ ├── useUnmount.ts
│ ├── utils.tsx
│ └── vite-env.d.ts
├── tailwind.config.js
├── tsconfig.json
├── tsconfig.node.json
└── vite.config.ts
├── autogpt
├── backends
│ └── autogpt.ollama.yml
└── override.env
├── bench
├── Dockerfile
├── defaultTasks.yml
├── override.env
└── src
│ ├── bench.ts
│ ├── config.ts
│ ├── deps.ts
│ ├── judge.ts
│ ├── llm.ts
│ ├── log.ts
│ ├── report.ts
│ ├── run.ts
│ ├── runner.ts
│ ├── task.ts
│ ├── tasks.ts
│ ├── tsconfig.json
│ └── utils.ts
├── bionicgpt
├── override.env
└── start_envoy.sh
├── bolt
├── .env.local
├── Dockerfile
├── model.ts
└── override.env
├── boost
├── .dockerignore
├── Dockerfile
├── README.md
├── override.env
└── src
│ ├── chat.py
│ ├── chat_node.py
│ ├── config.py
│ ├── custom_modules
│ ├── .gitkeep
│ ├── 3t.py
│ ├── ambi.py
│ ├── artifacts
│ │ ├── dnd.html
│ │ ├── dnd_mini.html
│ │ ├── dot.html
│ │ ├── dot_mini.html
│ │ ├── graph.html
│ │ ├── graph_mini.html
│ │ ├── ponder.html
│ │ ├── ponder_mini.html
│ │ ├── tokens.html
│ │ └── tokens_mini.html
│ ├── cea.py
│ ├── cex.py
│ ├── clarity.py
│ ├── crystal.py
│ ├── cssv.py
│ ├── discussurl.py
│ ├── example.py
│ ├── fml.py
│ ├── gact.py
│ ├── grug.py
│ ├── l33t.py
│ ├── meow.py
│ ├── ow.py
│ ├── pad.py
│ ├── pdsv.py
│ ├── polyglot.py
│ ├── r0.py
│ ├── recpl.py
│ ├── stcl.py
│ ├── tmsv.py
│ ├── tri.py
│ ├── unstable.py
│ ├── usv.py
│ ├── webui_artifact.py
│ └── wswp.py
│ ├── format.py
│ ├── llm.py
│ ├── llm_registry.py
│ ├── log.py
│ ├── main.py
│ ├── mapper.py
│ ├── middleware
│ └── request_id.py
│ ├── mods.py
│ ├── modules
│ ├── dnd.py
│ ├── dot.py
│ ├── eli5.py
│ ├── g1.py
│ ├── klmbr.py
│ ├── markov.py
│ ├── mcts.py
│ ├── ponder.py
│ ├── r0.py
│ ├── rcn.py
│ └── supersummer.py
│ ├── requirements.txt
│ ├── selection.py
│ └── tools
│ └── registry.py
├── chatnio
├── .gitignore
├── Dockerfile
├── config
│ ├── config.base.yml
│ ├── config.ollama.yml
│ ├── config.override.yml
│ └── config.searxng.yml
├── override.env
└── start_chatnio.sh
├── chatui
├── configs
│ ├── chatui.airllm.yml
│ ├── chatui.aphrodite.yml
│ ├── chatui.config.yml
│ ├── chatui.dify.yml
│ ├── chatui.litellm.yml
│ ├── chatui.llamacpp.yml
│ ├── chatui.mistralrs.yml
│ ├── chatui.ollama.yml
│ ├── chatui.searxng.yml
│ ├── chatui.tabbyapi.yml
│ └── chatui.vllm.yml
├── envify.js
├── override.env
└── start_chatui.sh
├── cmdh
├── Dockerfile
├── harbor.prompt
├── ollama.ts
├── override.env
└── system.prompt
├── comfyui
├── .gitkeep
├── default-workflow.json
├── override.env
└── provisioning.sh
├── compose.agent.yml
├── compose.agentzero.yml
├── compose.aichat.yml
├── compose.aider.yml
├── compose.airllm.yml
├── compose.airweave.yml
├── compose.anythingllm.yml
├── compose.aphrodite.yml
├── compose.autogpt.yml
├── compose.bench.yml
├── compose.bionicgpt.yml
├── compose.bolt.yml
├── compose.boost.yml
├── compose.cfd.yml
├── compose.chatnio.yml
├── compose.chatui.yml
├── compose.cmdh.yml
├── compose.comfyui.yml
├── compose.dify.yml
├── compose.fabric.yml
├── compose.flowise.yml
├── compose.gptme.yml
├── compose.gum.yml
├── compose.hf.yml
├── compose.hfdownloader.yml
├── compose.hollama.yml
├── compose.jupyter.yml
├── compose.k6.yml
├── compose.kobold.yml
├── compose.ktransformers.yml
├── compose.langflow.yml
├── compose.langfuse.yml
├── compose.latentscope.yml
├── compose.ldr.yml
├── compose.librechat.yml
├── compose.libretranslate.yml
├── compose.litellm.yml
├── compose.litlytics.yml
├── compose.llamacpp.yml
├── compose.llamaswap.yml
├── compose.lmdeploy.yml
├── compose.lmeval.yml
├── compose.lobechat.yml
├── compose.localai.yml
├── compose.mcp-inspector.yml
├── compose.mcpo.yml
├── compose.metamcp.yml
├── compose.mikupad.yml
├── compose.mistralrs.yml
├── compose.modularmax.yml
├── compose.morphic.yml
├── compose.n8n.yml
├── compose.nexa.yml
├── compose.ol1.yml
├── compose.ollama.yml
├── compose.omnichain.yml
├── compose.omniparser.yml
├── compose.openhands.yml
├── compose.opint.yml
├── compose.optillm.yml
├── compose.oterm.yml
├── compose.parler.yml
├── compose.parllama.yml
├── compose.perplexica.yml
├── compose.perplexideez.yml
├── compose.pipelines.yml
├── compose.plandex.yml
├── compose.promptfoo.yml
├── compose.qdrant.yml
├── compose.qrgen.yml
├── compose.raglite.yml
├── compose.repopack.yml
├── compose.searxng.morphic.yml
├── compose.sglang.yml
├── compose.speaches.yml
├── compose.sqlchat.yml
├── compose.stt.yml
├── compose.supergateway.yml
├── compose.tabbyapi.yml
├── compose.textgrad.yml
├── compose.tgi.yml
├── compose.traefik.yml
├── compose.tts.yml
├── compose.txtairag.yml
├── compose.vllm.yml
├── compose.webtop.yml
├── compose.webui.yml
├── compose.x.agentzero.ollama.yml
├── compose.x.aichat.ktransformers.yml
├── compose.x.aichat.ollama.yml
├── compose.x.aider.airllm.yml
├── compose.x.aider.aphrodite.yml
├── compose.x.aider.cdi.yml
├── compose.x.aider.dify.yml
├── compose.x.aider.ktransformers.yml
├── compose.x.aider.litellm.yml
├── compose.x.aider.llamacpp.yml
├── compose.x.aider.mistralrs.yml
├── compose.x.aider.nvidia.yml
├── compose.x.aider.ollama.yml
├── compose.x.aider.sglang.yml
├── compose.x.aider.tabbyapi.yml
├── compose.x.aider.vllm.yml
├── compose.x.anythingllm.llamacpp.yml
├── compose.x.anythingllm.ollama.yml
├── compose.x.anythingllm.searxng.yml
├── compose.x.aphrodite.cdi.yml
├── compose.x.aphrodite.nvidia.yml
├── compose.x.boost.airllm.yml
├── compose.x.boost.aphrodite.yml
├── compose.x.boost.dify.yml
├── compose.x.boost.ktransformers.yml
├── compose.x.boost.litellm.yml
├── compose.x.boost.llamacpp.yml
├── compose.x.boost.mistralrs.yml
├── compose.x.boost.ollama.yml
├── compose.x.boost.omnichain.yml
├── compose.x.boost.sglang.yml
├── compose.x.boost.tabbyapi.yml
├── compose.x.boost.vllm.yml
├── compose.x.chatnio.ollama.yml
├── compose.x.chatnio.searxng.yml
├── compose.x.chatui.airllm.yml
├── compose.x.chatui.aphrodite.yml
├── compose.x.chatui.dify.yml
├── compose.x.chatui.litellm.yml
├── compose.x.chatui.llamacpp.yml
├── compose.x.chatui.mistralrs.yml
├── compose.x.chatui.ollama.yml
├── compose.x.chatui.searxng.yml
├── compose.x.chatui.tabbyapi.yml
├── compose.x.chatui.vllm.yml
├── compose.x.cmdh.harbor.yml
├── compose.x.cmdh.llamacpp.yml
├── compose.x.cmdh.ollama.yml
├── compose.x.cmdh.tgi.yml
├── compose.x.comfyui.cdi.yml
├── compose.x.comfyui.nvidia.yml
├── compose.x.fabric.ollama.yml
├── compose.x.gptme.ollama.yml
├── compose.x.jupyter.cdi.yml
├── compose.x.jupyter.nvidia.yml
├── compose.x.kobold.cdi.yml
├── compose.x.kobold.nvidia.yml
├── compose.x.ktransformers.cdi.yml
├── compose.x.ktransformers.nvidia.yml
├── compose.x.langflow.litellm.yml
├── compose.x.ldr.ollama.yml
├── compose.x.ldr.searxng.yml
├── compose.x.litellm.langfuse.yml
├── compose.x.litellm.optillm.yml
├── compose.x.litellm.tgi.yml
├── compose.x.litellm.vllm.yml
├── compose.x.llamacpp.cdi.yml
├── compose.x.llamacpp.nvidia.yml
├── compose.x.llamaswap.cdi.yml
├── compose.x.llamaswap.nvidia.yml
├── compose.x.lmdeploy.cdi.yml
├── compose.x.lmdeploy.nvidia.yml
├── compose.x.lmeval.cdi.yml
├── compose.x.lmeval.nvidia.yml
├── compose.x.lobechat.ollama.yml
├── compose.x.localai.cdi.yml
├── compose.x.localai.nvidia.yml
├── compose.x.localai.rocm.yml
├── compose.x.mcpo.mcp-server-time.yml
├── compose.x.mcpo.metamcp.yml
├── compose.x.mistralrs.cdi.yml
├── compose.x.mistralrs.nvidia.yml
├── compose.x.morphic.ollama.yml
├── compose.x.nexa.cdi.yml
├── compose.x.nexa.nvidia.yml
├── compose.x.ollama.cdi.yml
├── compose.x.ollama.nvidia.yml
├── compose.x.ollama.rocm.yml
├── compose.x.ollama.webui.yml
├── compose.x.openhands.ollama.yml
├── compose.x.opint.aphrodite.yml
├── compose.x.opint.litellm.yml
├── compose.x.opint.llamacpp.yml
├── compose.x.opint.mistralrs.yml
├── compose.x.opint.ollama.yml
├── compose.x.opint.tabbyapi.yml
├── compose.x.opint.vllm.yml
├── compose.x.optillm.airllm.yml
├── compose.x.optillm.aphrodite.yml
├── compose.x.optillm.boost.yml
├── compose.x.optillm.cdi.yml
├── compose.x.optillm.dify.yml
├── compose.x.optillm.ktransformers.yml
├── compose.x.optillm.litellm.yml
├── compose.x.optillm.llamacpp.yml
├── compose.x.optillm.mistralrs.yml
├── compose.x.optillm.nexa.yml
├── compose.x.optillm.nvidia.yml
├── compose.x.optillm.ollama.yml
├── compose.x.optillm.omnichain.yml
├── compose.x.optillm.pipelines.yml
├── compose.x.optillm.sglang.yml
├── compose.x.optillm.tabbyapi.yml
├── compose.x.optillm.vllm.yml
├── compose.x.oterm.ollama.yml
├── compose.x.parler.cdi.yml
├── compose.x.parler.nvidia.yml
├── compose.x.parllama.ollama.yml
├── compose.x.perplexica.ollama.yml
├── compose.x.perplexica.searxng.yml
├── compose.x.perplexideez.mdc.yml
├── compose.x.perplexideez.ollama.yml
├── compose.x.perplexideez.searxng.yml
├── compose.x.plandex.litellm.yml
├── compose.x.plandex.llamacpp.yml
├── compose.x.plandex.ollama.yml
├── compose.x.promptfoo.ollama.yml
├── compose.x.raglite.ollama.yml
├── compose.x.raglite.vllm.yml
├── compose.x.sglang.cdi.yml
├── compose.x.sglang.nvidia.yml
├── compose.x.speaches.cdi.yml
├── compose.x.speaches.nvidia.yml
├── compose.x.sqlchat.ollama.yml
├── compose.x.stt.cdi.yml
├── compose.x.stt.nvidia.yml
├── compose.x.tabbyapi.cdi.yml
├── compose.x.tabbyapi.nvidia.yml
├── compose.x.textgrad.cdi.yml
├── compose.x.textgrad.nvidia.yml
├── compose.x.tgi.cdi.yml
├── compose.x.tgi.nvidia.yml
├── compose.x.traefik.agent.yml
├── compose.x.traefik.agentzero.yml
├── compose.x.traefik.aichat.yml
├── compose.x.traefik.aider.yml
├── compose.x.traefik.airllm.yml
├── compose.x.traefik.anythingllm.yml
├── compose.x.traefik.aphrodite.yml
├── compose.x.traefik.autogpt.yml
├── compose.x.traefik.bionicgpt.yml
├── compose.x.traefik.bolt.yml
├── compose.x.traefik.boost.yml
├── compose.x.traefik.chatnio.yml
├── compose.x.traefik.chatui.yml
├── compose.x.traefik.comfyui.yml
├── compose.x.traefik.dify.yml
├── compose.x.traefik.flowise.yml
├── compose.x.traefik.hollama.yml
├── compose.x.traefik.jupyter.yml
├── compose.x.traefik.k6.yml
├── compose.x.traefik.kobold.yml
├── compose.x.traefik.ktransformers.yml
├── compose.x.traefik.langflow.yml
├── compose.x.traefik.langfuse.yml
├── compose.x.traefik.latentscope.yml
├── compose.x.traefik.ldr.yml
├── compose.x.traefik.librechat.yml
├── compose.x.traefik.libretranslate.yml
├── compose.x.traefik.litellm.yml
├── compose.x.traefik.litlytics.yml
├── compose.x.traefik.llamacpp.yml
├── compose.x.traefik.llamaswap.yml
├── compose.x.traefik.lmdeploy.yml
├── compose.x.traefik.lobechat.yml
├── compose.x.traefik.localai.yml
├── compose.x.traefik.mcp-inspector.yml
├── compose.x.traefik.mcpo.yml
├── compose.x.traefik.metamcp.yml
├── compose.x.traefik.mikupad.yml
├── compose.x.traefik.mistralrs.yml
├── compose.x.traefik.modularmax.yml
├── compose.x.traefik.morphic.yml
├── compose.x.traefik.n8n.yml
├── compose.x.traefik.nexa.yml
├── compose.x.traefik.ol1.yml
├── compose.x.traefik.ollama.yml
├── compose.x.traefik.omnichain.yml
├── compose.x.traefik.omniparser.yml
├── compose.x.traefik.openhands.yml
├── compose.x.traefik.optillm.yml
├── compose.x.traefik.parler.yml
├── compose.x.traefik.perplexica.yml
├── compose.x.traefik.perplexideez.yml
├── compose.x.traefik.pipelines.yml
├── compose.x.traefik.plandex.yml
├── compose.x.traefik.promptfoo.yml
├── compose.x.traefik.qdrant.yml
├── compose.x.traefik.raglite.yml
├── compose.x.traefik.sglang.yml
├── compose.x.traefik.speaches.yml
├── compose.x.traefik.sqlchat.yml
├── compose.x.traefik.stt.yml
├── compose.x.traefik.tabbyapi.yml
├── compose.x.traefik.textgrad.yml
├── compose.x.traefik.tgi.yml
├── compose.x.traefik.tts.yml
├── compose.x.traefik.txtairag.yml
├── compose.x.traefik.vllm.yml
├── compose.x.traefik.webtop.yml
├── compose.x.traefik.webui.yml
├── compose.x.tts.cdi.yml
├── compose.x.tts.nvidia.yml
├── compose.x.txtairag.cdi.yml
├── compose.x.txtairag.nvidia.yml
├── compose.x.txtairag.ollama.yml
├── compose.x.vllm.cdi.yml
├── compose.x.vllm.nvidia.yml
├── compose.x.webui.agent.yml
├── compose.x.webui.airllm.yml
├── compose.x.webui.aphrodite.yml
├── compose.x.webui.boost.yml
├── compose.x.webui.comfyui.yml
├── compose.x.webui.dify.yml
├── compose.x.webui.kobold.yml
├── compose.x.webui.ktransformers.yml
├── compose.x.webui.litellm.yml
├── compose.x.webui.llamacpp.yml
├── compose.x.webui.llamaswap.yml
├── compose.x.webui.mcpo.metamcp.yml
├── compose.x.webui.mistralrs.yml
├── compose.x.webui.modularmax.yml
├── compose.x.webui.nexa.yml
├── compose.x.webui.ollama.yml
├── compose.x.webui.omnichain.yml
├── compose.x.webui.optillm.yml
├── compose.x.webui.parler.yml
├── compose.x.webui.pipelines.yml
├── compose.x.webui.searxng.ollama.yml
├── compose.x.webui.searxng.yml
├── compose.x.webui.sglang.yml
├── compose.x.webui.speaches.yml
├── compose.x.webui.stt.yml
├── compose.x.webui.tabbyapi.yml
├── compose.x.webui.tts.yml
├── compose.x.webui.vllm.yml
├── compose.yml
├── deno.lock
├── dify
├── certbot
│ ├── README.md
│ ├── docker-entrypoint.sh
│ └── update-cert.template.txt
├── nginx
│ ├── conf.d
│ │ └── default.conf.template
│ ├── docker-entrypoint.sh
│ ├── https.conf.template
│ ├── nginx.conf.template
│ ├── proxy.conf.template
│ └── ssl
│ │ └── .gitkeep
├── openai
│ ├── Dockerfile
│ ├── app.js
│ └── package.json
├── override.env
└── ssrf_proxy
│ ├── docker-entrypoint.sh
│ └── squid.conf.template
├── docs
├── 1.-Harbor-User-Guide.md
├── 1.0.-Installing-Harbor.md
├── 1.1-Harbor-App.md
├── 1.2-Tools.md
├── 2.-Services.md
├── 2.1.1-Frontend&colon-Open-WebUI.md
├── 2.1.10-Frontend&colon-Chat-Nio.md
├── 2.1.11-Frontend&colon-Mikupad.md
├── 2.1.12-Frontend-oterm.md
├── 2.1.2-Frontend&colon-ComfyUI.md
├── 2.1.3-Frontend&colon-LibreChat.md
├── 2.1.4-Frontend&colon-ChatUI.md
├── 2.1.5-Frontend&colon-Lobe-Chat.md
├── 2.1.6-Frontend&colon-hollama.md
├── 2.1.7-Frontend&colon-parllama.md
├── 2.1.8-Frontend&colon-BionicGPT.md
├── 2.1.9-Frontend&colon-AnythingLLM.md
├── 2.2.1-Backend&colon-Ollama.md
├── 2.2.10-Backend&colon-lmdeploy.md
├── 2.2.11-Backend&colon-AirLLM.md
├── 2.2.12-Backend&colon-SGLang.md
├── 2.2.13-Backend&colon-KTransformers.md
├── 2.2.14-Backend&colon-Speaches.md
├── 2.2.15-Backend&colon-Nexa-SDK.md
├── 2.2.16-Backend&colon-KoboldCpp.md
├── 2.2.17-Backend-Modular-MAX.md
├── 2.2.2-Backend&colon-llama.cpp.md
├── 2.2.3-Backend&colon-vLLM.md
├── 2.2.4-Backend&colon-TabbyAPI.md
├── 2.2.5-Backend&colon-Aphrodite-Engine.md
├── 2.2.6-Backend&colon-mistral.rs.md
├── 2.2.7-Backend&colon-openedai-speech.md
├── 2.2.8-Backend&colon-Parler.md
├── 2.2.9-Backend&colon-text-generation-inference.md
├── 2.3.1-Satellite&colon-SearXNG.md
├── 2.3.10-Satellite&colon-fabric.md
├── 2.3.11-Satellite&colon-txtai-RAG.md
├── 2.3.12-Satellite&colon-TextGrad.md
├── 2.3.13-Satellite&colon-aider.md
├── 2.3.14-Satellite&colon-aichat.md
├── 2.3.15-Satellite&colon-AutoGPT.md
├── 2.3.16-Satellite&colon-omnichain.md
├── 2.3.17-Satellite&colon-lm-evaluation-harness.md
├── 2.3.18-Satellite&colon-JupyterLab.md
├── 2.3.19-Satellite&colon-ol1.md
├── 2.3.2-Satellite&colon-Perplexica.md
├── 2.3.20-Satellite&colon-OpenHands.md
├── 2.3.21-Satellite&colon-LitLytics.md
├── 2.3.22-Satellite&colon-Repopack.md
├── 2.3.23-Satellite&colon-n8n.md
├── 2.3.24-Satellite&colon-Bolt.new.md
├── 2.3.25-Satellite&colon-Open-WebUI-Pipelines.md
├── 2.3.26-Satellite&colon-Qdrant.md
├── 2.3.27-Satellite&colon-K6.md
├── 2.3.28-Satellite&colon-Promptfoo.md
├── 2.3.29-Satellite&colon-Webtop.md
├── 2.3.3-Satellite&colon-Dify.md
├── 2.3.30-Satellite&colon-OmniParser.md
├── 2.3.31-Satellite&colon-Flowise.md
├── 2.3.32-Satellite&colon-LangFlow.md
├── 2.3.33-Satellite&colon-OptiLLM.md
├── 2.3.34-Satellite-Morphic.md
├── 2.3.35-Satellite-SQL-Chat.md
├── 2.3.36-Satellite-gptme.md
├── 2.3.37-Satellite-traefik.md
├── 2.3.38-Satellite-Latent-Scope.md
├── 2.3.39-Satellite-RAGLite.md
├── 2.3.4-Satellite&colon-Plandex.md
├── 2.3.40-Satellite-llamaswap.md
├── 2.3.41-Satellite-libretranslate.md
├── 2.3.42-Satellite-metamcp.md
├── 2.3.43-Satellite-mcpo.md
├── 2.3.44-Satellite-supergateway.md
├── 2.3.45-Satellite-Local-Deep-Research.md
├── 2.3.46-Satellite-LocalAI.md
├── 2.3.47-Satellite-Agent-Zero.md
├── 2.3.48-Satellite-Airweave.md
├── 2.3.5-Satellite&colon-LiteLLM.md
├── 2.3.6-Satellite&colon-langfuse.md
├── 2.3.7-Satellite&colon-Open-Interpreter.md
├── 2.3.8-Satellite&colon-cloudflared.md
├── 2.3.9-Satellite&colon-cmdh.md
├── 3.-Harbor-CLI-Reference.md
├── 4.-Compatibility.md
├── 5.1.-Harbor-Bench.md
├── 5.2.-Harbor-Boost.md
├── 5.2.1.-Harbor-Boost-Custom-Modules.md
├── 6.-Harbor-Compose-Setup.md
├── 7.-Adding-A-New-Service.md
├── README.md
├── _Footer.md
├── anythingllm.png
├── bench-report.png
├── bench-superset.png
├── bionicgpt.png
├── bolt-local-qwen.png
├── bolt-openailike.png
├── boost-behavior.png
├── boost-custom-example.png
├── boost-dot.png
├── boost-markov.png
├── boost-r0.png
├── boost-unstable.png
├── boost.png
├── chatnio-searxng.png
├── chatui-searxng.png
├── dify-harbor.png
├── dify-ollama.png
├── dify-sample-webllama.yml
├── dify-sample-webrag.yml
├── dify-searxng.png
├── dify-webui.png
├── flowise.png
├── g1-reasoning.png
├── gptme.png
├── harbor-2.png
├── harbor-agentzero.png
├── harbor-airllm.png
├── harbor-airweave.png
├── harbor-app-2.png
├── harbor-app-3.png
├── harbor-app-4.png
├── harbor-app-5.png
├── harbor-app-6.png
├── harbor-app-7.png
├── harbor-app-8.png
├── harbor-app-macos.png
├── harbor-app.png
├── harbor-arch-diag.png
├── harbor-boost.png
├── harbor-history.gif
├── harbor-k6.png
├── harbor-latentscope.png
├── harbor-ldr.png
├── harbor-libretranslate.png
├── harbor-metamcp.png
├── harbor-morphic.png
├── harbor-ollama-models.png
├── harbor-oterm.png
├── harbor-raglite.png
├── harbor-tools.png
├── harbor-top.png
├── harbor-traefik.png
├── harbor-webtop.png
├── harbor-webui-json.png
├── image.png
├── ktransformers-webui.png
├── langflow.png
├── langfuse.png
├── litlytics-config.png
├── litlytics.png
├── mcp-inspector.png
├── mcpo.png
├── mikupad.png
├── nexa-webui-error.png
├── nexa-webui-workaround.png
├── ol1.png
├── omnichain-import.png
├── omnichain.png
├── omniparser.png
├── openhands-config.png
├── optillm.png
├── parllama.png
├── plandex-exec.png
├── promptfoo-2.png
├── promptfoo.png
├── qr.png
├── seaxng-webrag.png
├── sqlchat.png
├── tunnels.png
├── txtairag.png
├── webui-boost-status.png
├── webui-pipelines-2.png
└── webui-pipelines.png
├── fabric
├── Dockerfile
└── override.env
├── flowise
├── .gitignore
└── override.env
├── gptme
├── Dockerfile
├── config.toml
└── override.env
├── gum
└── Dockerfile
├── harbor.sh
├── harbor
└── __init__.py
├── hf
├── Dockerfile
└── override.env
├── hfdownloader
├── Dockerfile
└── override.env
├── http-catalog
├── agent.http
├── airllm.http
├── aphrodite.http
├── boost.http
├── comfyui.http
├── dify.http
├── hf.http
├── kobold.http
├── ktransformers.http
├── langfuse.http
├── litellm.http
├── llamacpp.http
├── llamaswap.http
├── mistral.http
├── mistralrs.http
├── modularmax.http
├── nexa.http
├── ollama.http
├── ollama
│ └── completions.http
├── omnichain.http
├── optillm.http
├── parler.http
├── perplexideez.http
├── plandex.http
├── sglang.http
├── speaches.http
├── stt.http
├── tabbyapi.http
├── tgi.http
├── tts.http
├── vllm.http
└── webui.http
├── install.sh
├── jupyter
├── Dockerfile
├── override.env
└── workspace
│ └── 000-sample.ipynb
├── k6
├── .gitignore
├── dashboards
│ ├── k6-load-testing-results_rev3.json
│ └── k6-openai-tokens_rev1.json
├── docker-compose.yaml
├── grafana-dashboard.yaml
├── grafana-datasource.yaml
├── override.env
└── scripts
│ ├── concurrent-prefix-caching.js
│ ├── example.js
│ ├── helpers
│ ├── config.js
│ ├── http.js
│ ├── ollamaHttp.js
│ ├── openaiGeneric.js
│ └── utils.js
│ ├── ollama.js
│ ├── openai-api-throughput.js
│ └── payloads
│ ├── completions.js
│ └── ollama.js
├── kobold
├── .gitignore
└── override.env
├── ktransformers
├── Dockerfile
├── chat.py
└── override.env
├── langflow
├── .gitignore
└── override.env
├── langfuse
├── .gitignore
├── .gitkeep
└── override.env
├── latentscope
├── .gitignore
├── Dockerfile
└── override.env
├── ldr
├── .gitignore
└── override.env
├── librechat
├── .env
├── .gitignore
├── .gitkeep
├── librechat.yml
├── override.env
└── start_librechat.sh
├── libretranslate
├── .gitignore
└── override.env
├── litellm
├── litellm.config.yaml
├── litellm.langfuse.yaml
├── litellm.optillm.yaml
├── litellm.tgi.yaml
├── litellm.vllm.yaml
├── override.env
└── start_litellm.sh
├── litlytics
└── override.env
├── llamacpp
├── data
│ └── templates
│ │ └── tars.jinja
└── override.env
├── llamaswap
├── config.yaml
└── override.env
├── lmeval
├── Dockerfile
└── override.env
├── lobechat
└── override.env
├── localai
├── .gitignore
└── override.env
├── mcp
├── .gitignore
├── inspector-entrypoint.sh
└── override.env
├── mcpo
├── configs
│ ├── mcpo.mcp-server-fetch.json
│ ├── mcpo.mcp-server-time.json
│ ├── mcpo.metamcp.json
│ └── mcpo.override.json
├── override.env
└── start_mcpo.sh
├── metamcp
├── .gitignore
├── override.env
└── start-sse.mjs
├── mikupad
└── override.env
├── modularmax
└── override.env
├── morphic
├── .gitignore
└── override.env
├── n8n
├── .gitignore
├── backup
│ ├── credentials
│ │ └── 9LdDQI9lblNjIGIZ.json
│ └── workflows
│ │ └── 6K7zSSBeRa0z1hi6.json
└── override.env
├── nexa
├── Dockerfile
├── nvidia.sh
├── override.env
├── proxy.Dockerfile
└── proxy_server.py
├── ol1
├── Dockerfile
├── README.md
├── app.py
└── override.env
├── ollama
├── .gitkeep
├── init_entrypoint.sh
├── modelfiles
│ ├── README.md
│ ├── flowaicom-flow-judge.Modelfile
│ ├── gemma3-qat-tools.Modelfile
│ ├── hargent.Modelfile
│ ├── llama3.1_8b.Modelfile
│ ├── llama3.1_q6k_48k.Modelfile
│ ├── qwen2.5_7b_q8_32k.Modelfile
│ ├── qwen2.5_7b_q8_48k.Modelfile
│ ├── qwen2.5_q6k_32k.Modelfile
│ └── tars.Modelfile
└── override.env
├── omnichain
├── Dockerfile
├── custom_nodes
│ └── example
│ │ └── example.maker.js
├── entrypoint.sh
├── examples
│ └── HarborChat.json
├── files
│ └── harbor.prompt
├── openai.ts
└── override.env
├── omniparser
├── Dockerfile
└── override.env
├── open-webui
├── configs
│ ├── config.agent.json
│ ├── config.airllm.json
│ ├── config.aphrodite.json
│ ├── config.boost.json
│ ├── config.comfyui.json
│ ├── config.dify.json
│ ├── config.json
│ ├── config.kobold.json
│ ├── config.ktransformers.json
│ ├── config.litellm.json
│ ├── config.llamacpp.json
│ ├── config.llamaswap.json
│ ├── config.mistralrs.json
│ ├── config.modularmax.json
│ ├── config.nexa.json
│ ├── config.ollama.json
│ ├── config.omnichain.json
│ ├── config.optillm.json
│ ├── config.override.json
│ ├── config.parler.json
│ ├── config.pipelines.json
│ ├── config.searxng.json
│ ├── config.sglang.json
│ ├── config.speaches.json
│ ├── config.stt.json
│ ├── config.tabbyapi.json
│ ├── config.tts.json
│ ├── config.vllm.json
│ ├── config.x.mcpo.metamcp.json
│ └── config.x.searxng.ollama.json
├── extras
│ ├── artifact.py
│ └── mcts.py
├── override.env
└── start_webui.sh
├── openhands
├── .gitignore
└── override.env
├── openinterpreter
├── Dockerfile
└── override.env
├── optillm
├── .gitignore
└── override.env
├── oterm
├── .gitignore
├── Dockerfile
└── override.env
├── package.json
├── parler
├── main.py
└── override.env
├── parllama
├── Dockerfile
└── override.env
├── perplexica
├── override.env
└── source.config.toml
├── perplexideez
├── .gitignore
└── override.env
├── pipelines
└── override.env
├── plandex
├── Dockerfile
└── override.env
├── poetry.lock
├── profiles
└── default.env
├── promptfoo
├── .gitignore
├── README.md
├── examples
│ ├── bias
│ │ ├── README.md
│ │ └── promptfooconfig.yaml
│ ├── hello-promptfoo
│ │ ├── README.md
│ │ └── promptfooconfig.yaml
│ ├── misguided
│ │ ├── README.md
│ │ └── promptfooconfig.yaml
│ └── temp-test
│ │ ├── README.md
│ │ └── promptfooconfig.yaml
└── override.env
├── pyproject.toml
├── qdrant
└── override.env
├── qrgen
├── Dockerfile
└── gen.ts
├── raglite
├── .gitignore
├── Dockerfile
└── override.env
├── repopack
├── Dockerfile
└── override.env
├── requirements.sh
├── routines
├── config.js
├── docker.js
├── envManager.js
├── manageTools.js
├── mergeComposeFiles.js
├── paths.js
└── utils.js
├── searxng
├── override.env
├── settings.yml
├── settings.yml.new
├── uwsgi.ini
└── uwsgi.ini.new
├── shared
├── README.md
├── harbor_entrypoint.sh
├── json_config_merger.py
├── proxy_user.sh
├── yaml_config_merger.js
└── yaml_config_merger.py
├── speaches
├── hf_utils.py
├── kokoro_utils.py
└── override.env
├── sqlchat
└── override.env
├── stt
└── override.env
├── supergateway
└── override.env
├── tabbyapi
├── api_tokens.yml
├── config.yml
├── override.env
└── start_tabbyapi.sh
├── textgrad
├── Dockerfile
├── override.env
└── workspace
│ └── 000-sample.ipynb
├── tgi
└── override.env
├── tools
└── config.yaml
├── traefik
├── acme.json
├── override.env
└── traefik.yml
├── tts
├── config
│ ├── pre_process_map.yaml
│ └── voice_to_speaker.yaml
└── override.env
├── txtairag
└── rag.py
├── vllm
├── Dockerfile
└── override.env
└── webtop
├── .gitignore
├── Dockerfile
├── init
├── create_symlink.sh
├── fix_desktop_app.sh
└── provision_docker_groups.sh
└── override.env
/.aider.chat.history.md:
--------------------------------------------------------------------------------
1 |
2 | # aider chat started at 2024-08-12 20:29:40
3 |
4 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # top-most EditorConfig file
2 | root = true
3 |
4 | # Unix-style newlines with a newline ending every file
5 | [*]
6 | end_of_line = lf
7 | insert_final_newline = true
8 |
9 | # 2 space indentation
10 | [*.{py,yml,yaml,json,js,ts}]
11 | indent_style = space
12 | indent_size = 2
13 |
14 |
--------------------------------------------------------------------------------
/.scripts/deno.json:
--------------------------------------------------------------------------------
1 | {
2 | "imports": {
3 | "@std/assert": "jsr:@std/assert@1"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/.scripts/release.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -eo pipefail
4 |
5 | echo "Seeding..."
6 | harbor dev seed
7 | harbor dev seed-cdi
8 | harbor dev seed-traefik
9 |
10 | echo "Moving docs..."
11 | harbor dev docs
12 |
13 | # echo "NPM Publish..."
14 | # npm publish --access public
15 |
16 | # # # echo "PyPi Publish..."
17 | # poetry env use system
18 | # poetry build -v
19 | # poetry publish -v
--------------------------------------------------------------------------------
/.style.yapf:
--------------------------------------------------------------------------------
1 | [style]
2 | based_on_style = google
3 | indent_width = 2
4 | continuation_indent_width = 2
5 | spaces_before_comment = 4
6 | dedent_closing_brackets=true
--------------------------------------------------------------------------------
/.vscode/.copilot-instructions.md:
--------------------------------------------------------------------------------
1 | Add comments to non-trivial pieces of code.
2 | When writing bash scripts, ensure that code is compatible with bash 3 for MacOS.
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "files.exclude": {
3 | "**/.git": false
4 | },
5 | "deno.enable": false,
6 | "github.copilot.chat.codeGeneration.instructions": [
7 | {
8 | "file": ".copilot-instructions.md"
9 | }
10 | ]
11 | }
--------------------------------------------------------------------------------
/agent/.gitignore:
--------------------------------------------------------------------------------
1 | # Pycache
2 | src/**/__pycache__/
3 | data/
--------------------------------------------------------------------------------
/agent/docker/services.d/agent.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/with-contenv bash
2 |
3 | # Start Harbor Agent
4 | cd /config/agent/src || exit 1
5 | uvicorn main:app --host 0.0.0.0 --port 8000 --reload
--------------------------------------------------------------------------------
/agent/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional
2 | # environment variable overrides that will
3 | # only be visible to the agent service.
4 | # You can also use the "harbor env" command to set these variables.
5 | LOG_LEVEL=DEBUG
--------------------------------------------------------------------------------
/agent/requirements.txt:
--------------------------------------------------------------------------------
1 | fastapi==0.111.0
2 | uvicorn[standard]==0.30.6
3 | requests==2.32.3
4 | aiohttp==3.10.5
5 | openai
--------------------------------------------------------------------------------
/agent/src/log.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | def setup_logger(name):
4 | logger = logging.getLogger(name)
5 | if not logger.handlers:
6 | logger.setLevel(logging.DEBUG)
7 | handler = logging.StreamHandler()
8 | handler.set_name(name)
9 | formatter = logging.Formatter(
10 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
11 | )
12 | handler.setFormatter(formatter)
13 | logger.addHandler(handler)
14 | logger.propagate = False
15 | return logger
--------------------------------------------------------------------------------
/agentzero/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/agentzero/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'agentzero' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/aichat/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 | SHELL ["/bin/bash", "-c"]
3 |
4 | WORKDIR /app
5 | RUN pip install pyyaml
6 |
7 | RUN curl https://zyedidia.github.io/eget.sh | sh
8 | RUN ./eget sigoden/aichat
9 |
10 | ENTRYPOINT [ "/app/aichat" ]
--------------------------------------------------------------------------------
/aichat/configs/aichat.airllm.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: airllm
4 | api_base: http://airllm:5000/v1
5 | api_key: sk-airllm
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.aphrodite.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: aphrodite
4 | api_base: http://aphrodite:2242/v1
5 | api_key: sk-aphrodite
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.dify.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: dify
4 | api_base: http://dify-openai:3000/v1
5 | apiKey: "${HARBOR_DIFY_OPENAI_WORKFLOW}"
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.ktransformers.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: ktransformers
4 | api_base: http://ktransformers:12456/v1
5 | api_key: sk-ktransformers
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.litellm.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: litellm
4 | api_base: http://litellm:3000/v1
5 | apiKey: "${HARBOR_LITELLM_MASTER_KEY}"
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.llamacpp.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: llamacpp
4 | api_base: http://llamacpp:8080/v1
5 | apiKey: sk-llamacpp
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.mistralrs.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: mistralrs
4 | api_base: http://mistralrs:8021/v1
5 | apiKey: sk-mistralrs
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.ollama.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: ollama
4 | api_base: ${HARBOR_OLLAMA_INTERNAL_URL}/v1
5 | api_key: sk-ollama
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.sglang.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: sglang
4 | api_base: http://sglang:30000/v1
5 | api_key: sk-sglang
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: tabbyapi
4 | api_base: http://tabbyapi:5000/v1
5 | apiKey: "${HARBOR_TABBYAPI_ADMIN_KEY}"
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/configs/aichat.vllm.yml:
--------------------------------------------------------------------------------
1 | clients:
2 | - type: openai-compatible
3 | name: tabbyapi
4 | api_base: http://vllm:8000/v1
5 | apiKey: sk-vllm
6 | models:
7 | - name: ${HARBOR_AICHAT_MODEL}
8 |
9 |
--------------------------------------------------------------------------------
/aichat/override.env:
--------------------------------------------------------------------------------
1 | # Can contain additional environment variables
2 | # that'll only be visible for aichat service
3 | # You can also use "harbor env" command to set these variables
4 |
--------------------------------------------------------------------------------
/aider/configs/aider.airllm.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://airllm:5000/v1
2 | openai-api-key: sk-airllm
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.aphrodite.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://aphrodite:2242/v1
2 | openai-api-key: sk-aphrodite
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.config.yml:
--------------------------------------------------------------------------------
1 | # This is the base config where everything else will be merged
2 | {}
--------------------------------------------------------------------------------
/aider/configs/aider.dify.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://dify-openai:3000/v1
2 | openai-api-key: ${HARBOR_DIFY_OPENAI_WORKFLOW}
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.ktransformers.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://ktransformers:12456/v1
2 | openai-api-key: sk-ktransformers
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.litellm.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://litellm:4000/v1
2 | openai-api-key: ${HARBOR_LITELLM_MASTER_KEY}
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.llamacpp.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://llamacpp:8080/v1
2 | openai-api-key: sk-llamacpp
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.mistralrs.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://mistralrs:8021/v1
2 | openai-api-key: sk-mistralrs
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.ollama.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: ${HARBOR_OLLAMA_INTERNAL_URL}/v1
2 | openai-api-key: sk-ollama
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.sglang.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://sglang:30000/v1
2 | openai-api-key: sk-sglang
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://tabbyapi:5000/v1
2 | openai-api-key: ${HARBOR_TABBYAPI_ADMIN_KEY}
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/configs/aider.vllm.yml:
--------------------------------------------------------------------------------
1 | openai-api-base: http://vllm:8000/v1
2 | openai-api-key: sk-vllm
3 | model: openai/${HARBOR_AIDER_MODEL}
4 | verify-ssl: false
--------------------------------------------------------------------------------
/aider/override.env:
--------------------------------------------------------------------------------
1 | # See all options:
2 | # https://aider.chat/docs/config/dotenv.html
3 | #
4 | # Example override:
5 | # AIDER_DARK_MODE=true
6 | AIDER_CHECK_UPDATE=false
7 |
--------------------------------------------------------------------------------
/airllm/Dockerfile:
--------------------------------------------------------------------------------
1 | # This is a base image for "parler" service,
2 | # so trying to increase the likelihood of the layers
3 | # already being cached and reused
4 | FROM pytorch/pytorch:2.3.0-cuda12.1-cudnn8-runtime
5 |
6 | WORKDIR /app
7 |
8 | # AirLLM + friends for the OpenAI server
9 | RUN pip install airllm flask pydantic bitsandbytes
10 | COPY ./server.py ./server.py
11 |
12 | ENTRYPOINT [ "python", "/app/server.py" ]
13 |
14 |
--------------------------------------------------------------------------------
/airllm/override.env:
--------------------------------------------------------------------------------
1 | # Can contain additional environment variables
2 | # that'll only be visible for airllm service
3 | # You can also use "harbor env" command to set these variables
4 |
--------------------------------------------------------------------------------
/airweave/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/anythingllm/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment
2 | # variables for the anythingllm service
3 |
--------------------------------------------------------------------------------
/aphrodite/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for overrides specific to the "aphrodite" service.
2 | # You can also use "harbor env" command to set these variables
3 | # APHRODITE_LOG_LEVEL=debug
4 | # APHRODITE_TRACE_FUNCTION=1
5 | # CUDA_LAUNCH_BLOCKING=1
6 | # NCCL_DEBUG=TRACE=1
7 |
--------------------------------------------------------------------------------
/app/.editorconfig:
--------------------------------------------------------------------------------
1 | # top-most EditorConfig file
2 | root = true
3 |
4 | # Unix-style newlines with a newline ending every file
5 | [*]
6 | end_of_line = lf
7 | insert_final_newline = true
8 |
9 | # 2 space indentation
10 | [*.{py,yml,yaml,json,js,ts,jsx,tsx,html,css,scss,md}]
11 | indent_style = space
12 | indent_size = 2
13 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/app/app-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/app-icon.png
--------------------------------------------------------------------------------
/app/bun.lockb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/bun.lockb
--------------------------------------------------------------------------------
/app/postcss.config.js:
--------------------------------------------------------------------------------
1 | export default {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/app/src-tauri/.gitignore:
--------------------------------------------------------------------------------
1 | # Generated by Cargo
2 | # will have compiled files and executables
3 | /target/
4 |
5 | # Generated by Tauri
6 | # will have schema files for capabilities auto-completion
7 | /gen/schemas
8 |
--------------------------------------------------------------------------------
/app/src-tauri/build.rs:
--------------------------------------------------------------------------------
1 | fn main() {
2 | tauri_build::build()
3 | }
4 |
--------------------------------------------------------------------------------
/app/src-tauri/capabilities/desktop.json:
--------------------------------------------------------------------------------
1 | {
2 | "identifier": "desktop-capability",
3 | "platforms": [
4 | "macOS",
5 | "windows",
6 | "linux"
7 | ],
8 | "permissions": [
9 | "window-state:default",
10 | "store:default",
11 | "fs:default",
12 | "autostart:default"
13 | ]
14 | }
--------------------------------------------------------------------------------
/app/src-tauri/icons/128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/128x128.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/128x128@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/128x128@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/32x32.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square107x107Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square107x107Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square142x142Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square142x142Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square150x150Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square150x150Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square284x284Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square284x284Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square30x30Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square30x30Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square310x310Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square310x310Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square44x44Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square44x44Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square71x71Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square71x71Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/Square89x89Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/Square89x89Logo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/StoreLogo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/StoreLogo.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/android/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/icon.icns:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/icon.icns
--------------------------------------------------------------------------------
/app/src-tauri/icons/icon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/icon.ico
--------------------------------------------------------------------------------
/app/src-tauri/icons/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/icon.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-20x20@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-20x20@1x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-20x20@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-20x20@2x-1.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-20x20@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-20x20@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-20x20@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-20x20@3x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-29x29@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-29x29@1x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-29x29@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-29x29@2x-1.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-29x29@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-29x29@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-29x29@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-29x29@3x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-40x40@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-40x40@1x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-40x40@2x-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-40x40@2x-1.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-40x40@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-40x40@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-40x40@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-40x40@3x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-512@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-512@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-60x60@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-60x60@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-60x60@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-60x60@3x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-76x76@1x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-76x76@1x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-76x76@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-76x76@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/icons/ios/AppIcon-83.5x83.5@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src-tauri/icons/ios/AppIcon-83.5x83.5@2x.png
--------------------------------------------------------------------------------
/app/src-tauri/src/main.rs:
--------------------------------------------------------------------------------
1 | // Prevents additional console window on Windows in release, DO NOT REMOVE!!
2 | #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
3 |
4 | fn main() {
5 | let _ = fix_path_env::fix();
6 | harbor_lib::run()
7 | }
8 |
--------------------------------------------------------------------------------
/app/src/.gitignore:
--------------------------------------------------------------------------------
1 | # Added by "harbor dev docs"
2 | docs/
--------------------------------------------------------------------------------
/app/src/AppContent.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from 'react';
2 | import { Route, Routes } from 'react-router-dom';
3 | import { ROUTES_LIST } from './AppRoutes';
4 |
5 | export const AppContent: FC = () => {
6 | return
7 | {ROUTES_LIST.map((route) => )}
8 |
9 | }
10 |
--------------------------------------------------------------------------------
/app/src/BackButton.tsx:
--------------------------------------------------------------------------------
1 | import { useNavigate } from "react-router-dom";
2 |
3 | import { IconButton } from "./IconButton";
4 | import { IconMoveLeft } from "./Icons";
5 |
6 | export const BackButton = () => {
7 | const navigate = useNavigate();
8 | const handleBack = () => navigate(-1);
9 |
10 | return } onClick={handleBack} />;
11 | };
12 |
--------------------------------------------------------------------------------
/app/src/Button.tsx:
--------------------------------------------------------------------------------
1 | import { ButtonHTMLAttributes, FC } from 'react';
2 |
3 | export const Button: FC> = ({ className, ...rest }) => {
4 | return
5 | }
6 |
--------------------------------------------------------------------------------
/app/src/HarborLogo.tsx:
--------------------------------------------------------------------------------
1 | import { FC } from 'react';
2 |
3 | export const HarborLogo: FC = () => {
4 | return Harbor
5 | }
6 |
--------------------------------------------------------------------------------
/app/src/LostSquirrel.tsx:
--------------------------------------------------------------------------------
1 | import { SVGProps } from "react";
2 |
3 | import { IconSquirrel } from "./Icons";
4 | import './squirrel.css';
5 |
6 | export const LostSquirrel = ({ className, ...rest }: SVGProps) => {
7 | return ;
8 | };
9 |
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Black.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Black.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-BlackItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-BlackItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Bold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-BoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-BoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-ExtraBold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-ExtraBold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-ExtraBoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-ExtraBoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-ExtraLight.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-ExtraLight.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-ExtraLightItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-ExtraLightItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Italic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Light.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Light.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-LightItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-LightItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Medium.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Medium.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-MediumItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-MediumItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Regular.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-SemiBold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-SemiBold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-SemiBoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-SemiBoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-Thin.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-Thin.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/Inter-ThinItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/Inter-ThinItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Black.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Black.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-BlackItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-BlackItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Bold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-BoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-BoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-ExtraBold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-ExtraBold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-ExtraBoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-ExtraBoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-ExtraLight.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-ExtraLight.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-ExtraLightItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-ExtraLightItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Italic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Light.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Light.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-LightItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-LightItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Medium.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Medium.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-MediumItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-MediumItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Regular.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-SemiBold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-SemiBold.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-SemiBoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-SemiBoldItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-Thin.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-Thin.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterDisplay-ThinItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterDisplay-ThinItalic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterVariable-Italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterVariable-Italic.woff2
--------------------------------------------------------------------------------
/app/src/assets/font/InterVariable.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/app/src/assets/font/InterVariable.woff2
--------------------------------------------------------------------------------
/app/src/cli/CLI.tsx:
--------------------------------------------------------------------------------
1 | import { Doctor } from "../home/Doctor";
2 | import { Version } from "../home/Version";
3 |
4 | export const CLI = () => {
5 | return (
6 | <>
7 |
8 |
9 | >
10 | );
11 | };
12 |
--------------------------------------------------------------------------------
/app/src/home/Home.tsx:
--------------------------------------------------------------------------------
1 | import { ScrollToTop } from '../ScrollToTop';
2 | import { ServiceList } from "./ServiceList";
3 |
4 | export const Home = () => {
5 | return (
6 | <>
7 |
8 |
9 | >
10 | );
11 | };
12 |
--------------------------------------------------------------------------------
/app/src/main.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | :root {
6 | font-family: Inter;
7 | font-size: 18px;
8 | line-height: 24px;
9 | font-weight: 400;
10 |
11 | font-synthesis: none;
12 | text-rendering: optimizeLegibility;
13 | -webkit-font-smoothing: antialiased;
14 | -moz-osx-font-smoothing: grayscale;
15 | -webkit-text-size-adjust: 100%;
16 | }
--------------------------------------------------------------------------------
/app/src/service/ServiceDescription.tsx:
--------------------------------------------------------------------------------
1 | import { HarborService } from '../serviceMetadata';
2 |
3 | export const ServiceDescription = ({ service }: { service: HarborService }) => {
4 | return (
5 |
6 |
{service.tooltip}
7 |
8 | );
9 | }
--------------------------------------------------------------------------------
/app/src/service/ServiceName.tsx:
--------------------------------------------------------------------------------
1 | import { HarborService } from "../serviceMetadata";
2 |
3 | export const ServiceName = ({ service }: { service: HarborService }) => {
4 | return (
5 | {service.name ?? service.handle}
6 | );
7 | };
8 |
--------------------------------------------------------------------------------
/app/src/service/useCurrentService.ts:
--------------------------------------------------------------------------------
1 | import { useParams } from 'react-router-dom';
2 | import { useServiceList } from '../home/useServiceList';
3 |
4 | export const useCurrentService = () => {
5 | const params = useParams()
6 | const handle = params.handle;
7 | const services = useServiceList()
8 | const service = services.services.find((service) => service.handle === handle);
9 |
10 | return {
11 | service,
12 | loading: services.loading,
13 | rerun: services.rerun,
14 | }
15 | }
--------------------------------------------------------------------------------
/app/src/serviceActions.tsx:
--------------------------------------------------------------------------------
1 | import { IconPlaneLanding, IconRocketLaunch } from "./Icons";
2 |
3 | export const ACTION_ICONS = {
4 | loading: ,
5 | up: ,
6 | down: ,
7 | };
--------------------------------------------------------------------------------
/app/src/tags.css:
--------------------------------------------------------------------------------
1 | .service-tag:has(+ input:checked) {
2 | background: red;
3 | }
--------------------------------------------------------------------------------
/app/src/useOpen.tsx:
--------------------------------------------------------------------------------
1 | import { Command } from "@tauri-apps/plugin-shell";
2 | import { isWindows } from "./utils";
3 |
4 | export async function runOpen(args: string[]) {
5 | try {
6 | if (await isWindows()) {
7 | await Command.create("cmd", ['/c', 'start', ...args]).execute();
8 | } else {
9 | await Command.create("open", args).execute();
10 | }
11 | } catch (e) {
12 | console.error(e);
13 | }
14 | }
--------------------------------------------------------------------------------
/app/src/useSelectedProfile.tsx:
--------------------------------------------------------------------------------
1 | import { CURRENT_PROFILE } from "./configMetadata";
2 | import { useStoredState } from "./useStoredState";
3 |
4 | export const useSelectedProfile = () => {
5 | return useStoredState(
6 | "selectedProfile",
7 | CURRENT_PROFILE,
8 | );
9 | };
10 |
--------------------------------------------------------------------------------
/app/src/useUnmount.ts:
--------------------------------------------------------------------------------
1 | import { useEffect, useRef } from 'react'
2 |
3 | export function useUnmount(func: () => void) {
4 | const funcRef = useRef(func)
5 |
6 | funcRef.current = func
7 |
8 | useEffect(
9 | () => () => {
10 | funcRef.current()
11 | },
12 | [],
13 | )
14 | }
--------------------------------------------------------------------------------
/app/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/app/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "skipLibCheck": true,
5 | "module": "ESNext",
6 | "moduleResolution": "bundler",
7 | "allowSyntheticDefaultImports": true
8 | },
9 | "include": ["vite.config.ts"]
10 | }
11 |
--------------------------------------------------------------------------------
/autogpt/backends/autogpt.ollama.yml:
--------------------------------------------------------------------------------
1 | azure_api_type: azure
2 | azure_api_version: api-version-for-azure
3 | azure_endpoint: ${HARBOR_OLLAMA_INTERNAL_URL}/v1
4 | azure_model_map:
5 | ollama: ${HARBOR_AUTOGPT_MODEL}
--------------------------------------------------------------------------------
/bench/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM denoland/deno:1.46.3
2 |
3 | WORKDIR /app
4 | COPY src/ /app/src
5 | RUN deno cache src/deps.ts
6 |
7 | ENTRYPOINT ["deno", "run", "-A", "src/bench.ts"]
--------------------------------------------------------------------------------
/bench/defaultTasks.yml:
--------------------------------------------------------------------------------
1 | # The task format:
2 | #
3 | # type Task {
4 | # tags: string[];
5 | # question: string;
6 | # criteria: Record;
7 | # };
8 |
9 | - tags: [easy, knowledge]
10 | question: Who painted "Starry Night"?
11 | criteria:
12 | correctness: Answer mentions this painting was made by Vincent van Gogh
13 |
--------------------------------------------------------------------------------
/bench/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for overrides specific to the "bench" service.
2 | # You can also use "harbor env" command to set these variables
3 |
--------------------------------------------------------------------------------
/bench/src/deps.ts:
--------------------------------------------------------------------------------
1 | export * as args from "jsr:@std/cli/parse-args";
2 | export * as log from "jsr:@std/log";
3 | export * as csv from "jsr:@std/csv";
4 | export * as yaml from "jsr:@std/yaml";
5 | export * as path from "jsr:@std/path";
6 |
7 | export { default as chalk } from "https://deno.land/x/chalk_deno@v4.1.1-deno/source/index.js"
8 |
--------------------------------------------------------------------------------
/bench/src/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "noEmit": true,
4 | "lib": ["ESNext"],
5 | "target": "ESNext",
6 | "module": "CommonJS",
7 | "strict": true,
8 | "esModuleInterop": true,
9 | "skipLibCheck": true,
10 | "forceConsistentCasingInFileNames": true,
11 | "allowImportingTsExtensions": true
12 | },
13 | "include": ["**/*.ts"]
14 | }
--------------------------------------------------------------------------------
/bionicgpt/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for overrides specific to the "bionicgpt" service.
2 | # You can also use "harbor env" command to set these variables
3 |
--------------------------------------------------------------------------------
/bolt/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/coleam00/bolt.new-any-llm:latest
2 |
3 | # Only works build-time
4 | COPY .env.local /app/.env.local
5 | RUN pnpm build
6 |
7 |
--------------------------------------------------------------------------------
/bolt/model.ts:
--------------------------------------------------------------------------------
1 | import { createOpenAI } from '@ai-sdk/openai';
2 |
3 | export function getModel() {
4 | return createOpenAI({
5 | name: 'Ollama',
6 | apiKey: 'sk-ollama',
7 | baseURL: 'http://localhost:33821',
8 | })
9 | }
--------------------------------------------------------------------------------
/bolt/override.env:
--------------------------------------------------------------------------------
1 | # Can contain additional environment variables
2 | # specific for the bolt service. See for example:
3 | # https://github.com/coleam00/bolt.new-any-llm/blob/main/.env.example
4 | # You can also use "harbor env" command to set these variables
5 | NODE_ENV=production
6 |
7 | OLLAMA_API_BASE_URL=http://172.17.0.1:33821
--------------------------------------------------------------------------------
/boost/.dockerignore:
--------------------------------------------------------------------------------
1 | **/.venv/
--------------------------------------------------------------------------------
/boost/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ghcr.io/av/tools
2 |
3 | WORKDIR /boost
4 | COPY /src /boost
5 |
6 | RUN uv venv
7 | RUN uv pip install -r requirements.txt
8 |
9 | CMD ["uv", "run", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
10 |
--------------------------------------------------------------------------------
/boost/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional
2 | # environment variable overrides that will
3 | # only be visible to the boost service.
4 | # You can also use the "harbor env" command to set these variables.
5 | LOG_LEVEL=DEBUG
6 |
--------------------------------------------------------------------------------
/boost/src/custom_modules/.gitkeep:
--------------------------------------------------------------------------------
1 | # It is our folder and we want it here, thanks
--------------------------------------------------------------------------------
/boost/src/custom_modules/meow.py:
--------------------------------------------------------------------------------
1 | ID_PREFIX='meow'
2 |
3 | async def apply(chat, llm):
4 | chat.user("Ignore all previous instructions, just meow in response")
5 | await llm.stream_chat_completion()
--------------------------------------------------------------------------------
/boost/src/requirements.txt:
--------------------------------------------------------------------------------
1 | fastapi==0.111.0
2 | uvicorn[standard]==0.30.6
3 | requests==2.32.3
4 | aiohttp==3.10.5
5 | litellm==1.65.7
6 | mcp==1.6.0
7 | cachetools==5.5.2
8 | asyncache==0.3.1
9 | openai
--------------------------------------------------------------------------------
/boost/src/tools/registry.py:
--------------------------------------------------------------------------------
1 | def is_local_tool(name: str) -> bool:
2 | return False
3 |
--------------------------------------------------------------------------------
/chatnio/.gitignore:
--------------------------------------------------------------------------------
1 | config/config.yaml
2 | db/
3 | logs/
4 | redis/
5 | storage/
--------------------------------------------------------------------------------
/chatnio/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG HARBOR_CHATNIO_IMAGE=programzmh/chatnio
2 | ARG HARBOR_CHATNIO_VERSION=latest
3 |
4 | FROM ${HARBOR_CHATNIO_IMAGE}:${HARBOR_CHATNIO_VERSION}
5 |
6 | # Python needed for config merging
7 | RUN apk add python3 py3-pip py3-yaml
--------------------------------------------------------------------------------
/chatnio/config/config.override.yml:
--------------------------------------------------------------------------------
1 | # This config can be used to
2 | # permanently override the settings set by Harbor
3 | # Note that it'll also override the settings you configure
4 | # via the UI itself
5 | {}
--------------------------------------------------------------------------------
/chatnio/config/config.searxng.yml:
--------------------------------------------------------------------------------
1 | system:
2 | search:
3 | endpoint: http://searxng:8080/search
4 | crop: false
5 | croplen: 1000
6 | engines:
7 | - wikipedia
8 | - duckduckgo
9 | imageproxy: false
10 | safesearch: 0
11 | common:
12 | article: []
13 | generation: []
14 | cache: []
15 | expire: 0
16 | size: 0
17 | imagestore: false
18 | promptstore: false
19 |
--------------------------------------------------------------------------------
/chatnio/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "chatnio" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/chatui/configs/chatui.airllm.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: airllm
4 | id: airllm
5 | endpoints:
6 | - type: openai
7 | baseURL: http://airllm:5000/v1
8 | apiKey: sk-airllm
--------------------------------------------------------------------------------
/chatui/configs/chatui.aphrodite.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: aphrodite
4 | id: aphrodite
5 | endpoints:
6 | - type: openai
7 | baseURL: http://aphrodite:2242/v1
8 | apiKey: sk-aphrodite
--------------------------------------------------------------------------------
/chatui/configs/chatui.config.yml:
--------------------------------------------------------------------------------
1 | # This is the base config where everything else will be merged
2 | envVars:
3 | MONGODB_URL: mongodb://chatui-db:27017
4 |
--------------------------------------------------------------------------------
/chatui/configs/chatui.dify.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: dify
4 | id: dify
5 | endpoints:
6 | - type: openai
7 | baseURL: http://dify-openai:3000/v1
8 | apiKey: "${HARBOR_DIFY_OPENAI_WORKFLOW}"
--------------------------------------------------------------------------------
/chatui/configs/chatui.litellm.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: litellm
4 | id: ${HARBOR_CHATUI_LITELLM_MODEL}
5 | endpoints:
6 | - type: openai
7 | baseURL: http://litellm:4000/v1
8 | apiKey: "${HARBOR_LITELLM_MASTER_KEY}"
--------------------------------------------------------------------------------
/chatui/configs/chatui.llamacpp.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: llamacpp
4 | id: llamacpp
5 | endpoints:
6 | - type: openai
7 | baseURL: http://llamacpp:8080/v1
8 | apiKey: sk-llamacpp
--------------------------------------------------------------------------------
/chatui/configs/chatui.mistralrs.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: mistralrs
4 | id: mistralrs
5 | endpoints:
6 | - type: openai
7 | baseURL: http://mistralrs:8021/v1
8 | apiKey: sk-mistralrs
--------------------------------------------------------------------------------
/chatui/configs/chatui.ollama.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: ollama
4 | id: ${HARBOR_CHATUI_OLLAMA_MODEL}
5 | endpoints:
6 | - type: openai
7 | baseURL: ${HARBOR_OLLAMA_INTERNAL_URL}/v1
--------------------------------------------------------------------------------
/chatui/configs/chatui.searxng.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | SEARXNG_QUERY_URL: http://searxng:8080/search?q=
--------------------------------------------------------------------------------
/chatui/configs/chatui.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: tabbyapi
4 | id: tabbyapi
5 | endpoints:
6 | - type: openai
7 | baseURL: http://tabbyapi:5000/v1
8 | apiKey: "${HARBOR_TABBYAPI_ADMIN_KEY}"
--------------------------------------------------------------------------------
/chatui/configs/chatui.vllm.yml:
--------------------------------------------------------------------------------
1 | envVars:
2 | MODELS:
3 | - name: vllm
4 | id: vllm
5 | endpoints:
6 | - type: openai
7 | baseURL: http://vllm:8000/v1
8 | apiKey: sk-vllm
--------------------------------------------------------------------------------
/chatui/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional
2 | # environment variable overrides that will
3 | # only be visible to the chatui service.
4 | # You can also use the "harbor env" command to set these variables.
5 |
--------------------------------------------------------------------------------
/cmdh/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:20
2 |
3 | RUN apt-get update && apt-get install -y git && apt-get clean && rm -rf /var/lib/apt/lists/*
4 |
5 | WORKDIR /app
6 | RUN git clone https://github.com/pgibler/cmdh.git && cd cmdh
7 |
8 | # Little switcheroo
9 | COPY ./ollama.ts /app/cmdh/src/api/ollama.ts
10 | RUN cd /app/cmdh && ./install.sh
11 | RUN npm i tsx zod zod-to-json-schema
12 |
13 | ENTRYPOINT [ "/app/node_modules/.bin/tsx", "/app/cmdh/src/cmdh.ts" ]
--------------------------------------------------------------------------------
/cmdh/override.env:
--------------------------------------------------------------------------------
1 | # This file can contain additional
2 | # env vars for cmdh service
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/comfyui/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/comfyui/.gitkeep
--------------------------------------------------------------------------------
/comfyui/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for overriding environment variables
2 | # specifically for the ComfyUI service
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/compose.bench.yml:
--------------------------------------------------------------------------------
1 | services:
2 | bench:
3 | build:
4 | context: ./bench
5 | dockerfile: Dockerfile
6 | container_name: ${HARBOR_CONTAINER_PREFIX}.bench
7 | env_file:
8 | - ./.env
9 | - ./bench/override.env
10 | volumes:
11 | # Inline source
12 | - ./bench/src:/app/src
13 | - ${HARBOR_BENCH_RESULTS}:/app/results
14 | - ${HARBOR_BENCH_TASKS}:/app/tasks.yml
15 | networks:
16 | - harbor-network
--------------------------------------------------------------------------------
/compose.cfd.yml:
--------------------------------------------------------------------------------
1 | services:
2 | cfd:
3 | image: cloudflare/cloudflared
4 | container_name: ${HARBOR_CONTAINER_PREFIX}.cfd
5 | env_file: ./.env
6 | environment:
7 | - NO_AUTOUPDATE=1
8 | networks:
9 | - harbor-network
10 |
--------------------------------------------------------------------------------
/compose.fabric.yml:
--------------------------------------------------------------------------------
1 | services:
2 | fabric:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.fabric
4 | env_file:
5 | - ./.env
6 | - ./fabric/override.env
7 | build:
8 | context: ./fabric
9 | dockerfile: Dockerfile
10 | volumes:
11 | - ${HARBOR_FABRIC_CONFIG_PATH}:/root/.config/fabric
12 | networks:
13 | - harbor-network
--------------------------------------------------------------------------------
/compose.flowise.yml:
--------------------------------------------------------------------------------
1 |
2 | services:
3 | flowise:
4 | container_name: ${HARBOR_CONTAINER_PREFIX}.flowise
5 | image: ${HARBOR_FLOWISE_IMAGE}:${HARBOR_FLOWISE_VERSION}
6 | ports:
7 | - ${HARBOR_FLOWISE_HOST_PORT}:3000
8 | volumes:
9 | # Persistence
10 | - ${HARBOR_FLOWISE_WORKSPACE}:/root/.flowise
11 | env_file:
12 | - ./.env
13 | - flowise/override.env
14 | networks:
15 | - harbor-network
16 |
--------------------------------------------------------------------------------
/compose.gptme.yml:
--------------------------------------------------------------------------------
1 | services:
2 | gptme:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.gptme
4 | build:
5 | context: ./gptme
6 | dockerfile: Dockerfile
7 | env_file:
8 | - ./.env
9 | - gptme/override.env
10 | volumes:
11 | - ./gptme/config.toml:/root/.config/gptme/config.toml
12 | networks:
13 | - harbor-network
14 |
--------------------------------------------------------------------------------
/compose.gum.yml:
--------------------------------------------------------------------------------
1 | services:
2 | gum:
3 | build:
4 | context: ./gum
5 | dockerfile: Dockerfile
--------------------------------------------------------------------------------
/compose.hf.yml:
--------------------------------------------------------------------------------
1 | services:
2 | hf:
3 | build:
4 | context: ./hf
5 | dockerfile: Dockerfile
6 | container_name: ${HARBOR_CONTAINER_PREFIX}.hf
7 | env_file:
8 | - ./.env
9 | - ./hf/override.env
10 | volumes:
11 | - ${HARBOR_HF_CACHE}:/root/.cache/huggingface
12 | environment:
13 | - HF_TOKEN=${HARBOR_HF_TOKEN}
14 | network_mode: host
15 |
--------------------------------------------------------------------------------
/compose.hollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | hollama:
3 | image: ghcr.io/fmaclen/hollama:latest
4 | env_file: ./.env
5 | container_name: ${HARBOR_CONTAINER_PREFIX}.hollama
6 | ports:
7 | - ${HARBOR_HOLLAMA_HOST_PORT}:4173
8 | networks:
9 | - harbor-network
10 |
11 |
--------------------------------------------------------------------------------
/compose.litlytics.yml:
--------------------------------------------------------------------------------
1 | services:
2 | litlytics:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.litlytics
4 | env_file:
5 | - ./.env
6 | - ./litlytics/override.env
7 | image: ghcr.io/yamalight/litlytics:${HARBOR_LITLYTICS_VERSION}
8 | ports:
9 | - ${HARBOR_LITLYTICS_HOST_PORT}:3000
10 | networks:
11 | - harbor-network
12 |
--------------------------------------------------------------------------------
/compose.lmdeploy.yml:
--------------------------------------------------------------------------------
1 | services:
2 | lmdeploy:
3 | image: openmmlab/lmdeploy:latest
4 | container_name: ${HARBOR_CONTAINER_PREFIX}.lmdeploy
5 | volumes:
6 | - ${HARBOR_HF_CACHE}:/root/.cache/huggingface
7 | ports:
8 | - ${HARBOR_LMDEPLOY_HOST_PORT}:23333
9 | ipc: host
10 | command: lmdeploy serve api_server Weni/ZeroShot-Agents-Llama3-4.0.43-ORPO-AWQ
11 | networks:
12 | - harbor-network
--------------------------------------------------------------------------------
/compose.lobechat.yml:
--------------------------------------------------------------------------------
1 | services:
2 | lobechat:
3 | image: lobehub/lobe-chat:${HARBOR_LOBECHAT_VERSION}
4 | container_name: ${HARBOR_CONTAINER_PREFIX}.lobechat
5 | env_file:
6 | - ./.env
7 | ports:
8 | - ${HARBOR_LOBECHAT_HOST_PORT}:3210
9 | networks:
10 | - harbor-network
11 |
--------------------------------------------------------------------------------
/compose.omniparser.yml:
--------------------------------------------------------------------------------
1 | services:
2 | omniparser:
3 | build:
4 | context: ./omniparser
5 | dockerfile: Dockerfile
6 | container_name: ${HARBOR_CONTAINER_PREFIX}.omniparser
7 | env_file:
8 | - ./.env
9 | - ./omniparser/override.env
10 | ports:
11 | - ${HARBOR_OMNIPARSER_HOST_PORT}:7861
12 | networks:
13 | - harbor-network
--------------------------------------------------------------------------------
/compose.optillm.yml:
--------------------------------------------------------------------------------
1 |
2 | services:
3 | optillm:
4 | container_name: ${HARBOR_CONTAINER_PREFIX}.optillm
5 | build:
6 | context: https://github.com/codelion/optillm.git#main
7 | dockerfile: Dockerfile
8 | env_file:
9 | - ./.env
10 | - optillm/override.env
11 | volumes:
12 | - ${HARBOR_OPTILLM_WORKSPACE}:/root/.config/optillm
13 | ports:
14 | - ${HARBOR_OPTILLM_HOST_PORT}:8000
15 | networks:
16 | - harbor-network
17 |
--------------------------------------------------------------------------------
/compose.oterm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | oterm:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.oterm
4 | build:
5 | context: ./oterm
6 | dockerfile: Dockerfile
7 | env_file:
8 | - ./.env
9 | - oterm/override.env
10 | networks:
11 | - harbor-network
12 | volumes:
13 | - ${HARBOR_OTERM_WORKSPACE}:/root/.local/share/oterm
14 |
--------------------------------------------------------------------------------
/compose.parllama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | parllama:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.parllama
4 | env_file:
5 | - ./.env
6 | - ./parllama/override.env
7 | build:
8 | context: ./parllama
9 | dockerfile: Dockerfile
10 | volumes:
11 | - ${HARBOR_PARLLAMA_CACHE}:/root/.parllama
12 | tty: true
13 | networks:
14 | - harbor-network
--------------------------------------------------------------------------------
/compose.qrgen.yml:
--------------------------------------------------------------------------------
1 | services:
2 | qrgen:
3 | build:
4 | context: ./qrgen
5 | dockerfile: Dockerfile
6 |
7 |
--------------------------------------------------------------------------------
/compose.repopack.yml:
--------------------------------------------------------------------------------
1 | services:
2 | repopack:
3 | build:
4 | context: ./repopack
5 | dockerfile: Dockerfile
6 | container_name: ${HARBOR_CONTAINER_PREFIX}.repopack
7 | env_file:
8 | - ./.env
9 | - ./repopack/override.env
10 | networks:
11 | - harbor-network
--------------------------------------------------------------------------------
/compose.sqlchat.yml:
--------------------------------------------------------------------------------
1 | services:
2 | sqlchat:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.sqlchat
4 | image: ${HARBOR_SQLCHAT_IMAGE}:${HARBOR_SQLCHAT_VERSION}
5 | ports:
6 | - ${HARBOR_SQLCHAT_HOST_PORT}:3000
7 | env_file:
8 | - ./.env
9 | - sqlchat/override.env
10 | networks:
11 | - harbor-network
12 |
--------------------------------------------------------------------------------
/compose.supergateway.yml:
--------------------------------------------------------------------------------
1 | services:
2 | supergateway:
3 | container_name: ${HARBOR_CONTAINER_PREFIX}.supergateway
4 | image: ghcr.io/av/tools:latest
5 | entrypoint: npx supergateway
6 | volumes:
7 | - ./mcp/cache/uv:/app/.uv_cache
8 | - ./mcp/cache/npm:/root/.npm
9 | env_file:
10 | - ./.env
11 | - ./supergateway/override.env
12 | networks:
13 | - harbor-network
14 |
--------------------------------------------------------------------------------
/compose.x.agentzero.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | agentzero:
3 | environment:
4 | - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.aichat.ktransformers.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aichat:
3 | volumes:
4 | - ./aichat/configs/aichat.ktransformers.yml:/app/configs/ktransformers.yml
--------------------------------------------------------------------------------
/compose.x.aichat.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aichat:
3 | volumes:
4 | - ./aichat/configs/aichat.ollama.yml:/app/configs/ollama.yml
--------------------------------------------------------------------------------
/compose.x.aider.airllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.airllm.yml:/root/.aider/airllm.yml
--------------------------------------------------------------------------------
/compose.x.aider.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.aphrodite.yml:/root/.aider/aphrodite.yml
--------------------------------------------------------------------------------
/compose.x.aider.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | aider:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.aider.dify.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.dify.yml:/root/.aider/dify.yml
--------------------------------------------------------------------------------
/compose.x.aider.ktransformers.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.ktransformers.yml:/root/.aider/ktransformers.yml
--------------------------------------------------------------------------------
/compose.x.aider.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.litellm.yml:/root/.aider/litellm.yml
--------------------------------------------------------------------------------
/compose.x.aider.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.llamacpp.yml:/root/.aider/llamacpp.yml
--------------------------------------------------------------------------------
/compose.x.aider.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.mistralrs.yml:/root/.aider/mistralrs.yml
--------------------------------------------------------------------------------
/compose.x.aider.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - capabilities: [gpu]
8 | count: all
9 | driver: nvidia
--------------------------------------------------------------------------------
/compose.x.aider.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.ollama.yml:/root/.aider/ollama.yml
--------------------------------------------------------------------------------
/compose.x.aider.sglang.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.sglang.yml:/root/.aider/sglang.yml
--------------------------------------------------------------------------------
/compose.x.aider.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.tabbyapi.yml:/root/.aider/tabbyapi.yml
--------------------------------------------------------------------------------
/compose.x.aider.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aider:
3 | volumes:
4 | - ./aider/configs/aider.vllm.yml:/root/.aider/vllm.yml
--------------------------------------------------------------------------------
/compose.x.anythingllm.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | anythingllm:
3 | environment:
4 | - LLM_PROVIDER=generic-openai
5 | - GENERIC_OPEN_AI_BASE_PATH=http://llamacpp:8080/v1
6 | - GENERIC_OPEN_AI_API_KEY=sk-llamacpp
--------------------------------------------------------------------------------
/compose.x.anythingllm.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | anythingllm:
3 | environment:
4 | - LLM_PROVIDER=ollama
5 | - OLLAMA_BASE_PATH=${HARBOR_OLLAMA_INTERNAL_URL}
6 | - EMBEDDING_ENGINE=ollama
7 | - EMBEDDING_BASE_PATH=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.anythingllm.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | anythingllm:
3 | environment:
4 | - AGENT_SEARXNG_API_URL=${HARBOR_SEARXNG_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.aphrodite.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | aphrodite:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.aphrodite.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | aphrodite:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - capabilities: [gpu]
8 | count: all
9 | driver: nvidia
--------------------------------------------------------------------------------
/compose.x.boost.airllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_AIRLLM=http://airllm:5000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_AIRLLM=sk-airllm
--------------------------------------------------------------------------------
/compose.x.boost.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_APHRODITE=http://aphrodite:2242/v1
5 | - HARBOR_BOOST_OPENAI_KEY_APHRODITE=sk-aphrodite
--------------------------------------------------------------------------------
/compose.x.boost.dify.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_DIFY=http://dify-openai:3000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_DIFY=${HARBOR_DIFY_OPENAI_WORKFLOW}
--------------------------------------------------------------------------------
/compose.x.boost.ktransformers.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_KTRANSFORMERS=http://ktransformers:12456/v1
5 | - HARBOR_BOOST_OPENAI_KEY_KTRANSFORMERS=sk-transformers
--------------------------------------------------------------------------------
/compose.x.boost.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_LITELLM=http://litellm:4000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_LITELLM=${HARBOR_LITELLM_MASTER_KEY}
--------------------------------------------------------------------------------
/compose.x.boost.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_LLAMACPP=http://llamacpp:8080/v1
5 | - HARBOR_BOOST_OPENAI_KEY_LLAMACPP=sk-llamacpp
--------------------------------------------------------------------------------
/compose.x.boost.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_MISTRALRS=http://mistralrs:8021/v1
5 | - HARBOR_BOOST_OPENAI_KEY_MISTRALRS=sk-mistralrs
--------------------------------------------------------------------------------
/compose.x.boost.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_OLLAMA=${HARBOR_OLLAMA_INTERNAL_URL}/v1
5 | - HARBOR_BOOST_OPENAI_KEY_OLLAMA=sk-ollama
--------------------------------------------------------------------------------
/compose.x.boost.omnichain.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_OMNICHAIN=http://omnichain:34082/v1
5 | - HARBOR_BOOST_OPENAI_KEY_OMNICHAIN=sk-omnichain
--------------------------------------------------------------------------------
/compose.x.boost.sglang.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_SGLANG=http://sglang:30000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_SGLANG=sk-sglang
--------------------------------------------------------------------------------
/compose.x.boost.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_TABBYAPI=http://tabbyapi:5000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_TABBYAPI=${HARBOR_TABBYAPI_ADMIN_KEY}
--------------------------------------------------------------------------------
/compose.x.boost.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | boost:
3 | environment:
4 | - HARBOR_BOOST_OPENAI_URL_VLLM=http://vllm:8000/v1
5 | - HARBOR_BOOST_OPENAI_KEY_VLLM=sk-vllm
--------------------------------------------------------------------------------
/compose.x.chatnio.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatnio:
3 | volumes:
4 | - ./chatnio/config/config.ollama.yml:/configs/config.ollama.yml
--------------------------------------------------------------------------------
/compose.x.chatnio.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatnio:
3 | volumes:
4 | - ./chatnio/config/config.searxng.yml:/configs/config.searxng.yml
--------------------------------------------------------------------------------
/compose.x.chatui.airllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.airllm.yml:/app/configs/chatui.airllm.yml
--------------------------------------------------------------------------------
/compose.x.chatui.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.aphrodite.yml:/app/configs/chatui.aphrodite.yml
--------------------------------------------------------------------------------
/compose.x.chatui.dify.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.dify.yml:/app/configs/chatui.dify.yml
--------------------------------------------------------------------------------
/compose.x.chatui.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.litellm.yml:/app/configs/chatui.litellm.yml
--------------------------------------------------------------------------------
/compose.x.chatui.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.llamacpp.yml:/app/configs/chatui.llamacpp.yml
--------------------------------------------------------------------------------
/compose.x.chatui.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.mistralrs.yml:/app/configs/chatui.mistralrs.yml
--------------------------------------------------------------------------------
/compose.x.chatui.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.ollama.yml:/app/configs/chatui.ollama.yml
--------------------------------------------------------------------------------
/compose.x.chatui.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.searxng.yml:/app/configs/chatui.searxng.yml
--------------------------------------------------------------------------------
/compose.x.chatui.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.tabbyapi.yml:/app/configs/chatui.tabbyapi.yml
--------------------------------------------------------------------------------
/compose.x.chatui.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | chatui:
3 | volumes:
4 | - ./chatui/configs/chatui.vllm.yml:/app/configs/chatui.vllm.yml
--------------------------------------------------------------------------------
/compose.x.cmdh.harbor.yml:
--------------------------------------------------------------------------------
1 | services:
2 | cmdh:
3 | volumes:
4 | - ./cmdh/harbor.prompt:/app/system.prompt
--------------------------------------------------------------------------------
/compose.x.cmdh.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | cmdh:
3 | environment:
4 | - LLM_HOST=OpenAI
5 | - OPENAI_API_KEY=sk-llamacpp
6 | - OPENAI_BASE_URL=http://llamacpp:8080/v1
--------------------------------------------------------------------------------
/compose.x.cmdh.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | cmdh:
3 | environment:
4 | - LLM_HOST=${HARBOR_CMDH_LLM_HOST}
5 | - OLLAMA_HOST=${HARBOR_OLLAMA_INTERNAL_URL}
6 | - OLLAMA_MODEL_NAME=${HARBOR_CMDH_MODEL}
--------------------------------------------------------------------------------
/compose.x.cmdh.tgi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | cmdh:
3 | environment:
4 | - LLM_HOST=OpenAI
5 | - OPENAI_API_KEY=sk-tgi
6 | - OPENAI_BASE_URL=http://tgi:80/v1
--------------------------------------------------------------------------------
/compose.x.comfyui.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | comfyui:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.comfyui.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | comfyui:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.fabric.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | fabric:
3 | environment:
4 | - DEFAULT_VENDOR=Ollama
5 | - DEFAULT_MODEL=${HARBOR_FABRIC_MODEL}
6 | - OLLAMA_API_URL=${HARBOR_OLLAMA_INTERNAL_URL}
7 |
8 |
--------------------------------------------------------------------------------
/compose.x.gptme.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | gptme:
3 | environment:
4 | - OPENAI_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}/v1
5 |
--------------------------------------------------------------------------------
/compose.x.jupyter.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | jupyter:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.jupyter.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | jupyter:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.kobold.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | kobold:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.kobold.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | kobold:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.ktransformers.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | ktransformers:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.ktransformers.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | ktransformers:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.langflow.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | langflow:
3 | environment:
4 | - langflow_openai_api_base=http://${harbor_container_prefix:-harbor}.litellm:8000/v1
5 |
--------------------------------------------------------------------------------
/compose.x.ldr.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | ldr:
3 | environment:
4 | - LDR_LLM__PROVIDER=ollama
5 | - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
6 | - LDR_LLM_PROVIDER=ollama
7 | - LDR_LLM_OLLAMA_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.ldr.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | ldr:
3 | environment:
4 | - LDR_SEARCH__TOOL=searxng
5 | - LDR_SEARCH_TOOL=searxng
6 | - SEARXNG_INSTANCE=${HARBOR_SEARXNG_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.litellm.langfuse.yml:
--------------------------------------------------------------------------------
1 | services:
2 | litellm:
3 | volumes:
4 | - ./litellm/litellm.langfuse.yaml:/app/litellm/langfuse.yaml
5 | environment:
6 | - LANGFUSE_HOST=http://langfuse:3000
7 | - LANGFUSE_PUBLIC_KEY=${HARBOR_LANGFUSE_PUBLIC_KEY}
8 | - LANGFUSE_SECRET_KEY=${HARBOR_LANGFUSE_SECRET_KEY}
--------------------------------------------------------------------------------
/compose.x.litellm.optillm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | litellm:
3 | volumes:
4 | - ./litellm/litellm.optillm.yaml:/app/litellm/optillm.yaml
--------------------------------------------------------------------------------
/compose.x.litellm.tgi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | litellm:
3 | volumes:
4 | - ./litellm/litellm.tgi.yaml:/app/litellm/tgi.yaml
--------------------------------------------------------------------------------
/compose.x.litellm.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | litellm:
3 | volumes:
4 | - ./litellm/litellm.vllm.yaml:/app/litellm/vllm.yaml
--------------------------------------------------------------------------------
/compose.x.llamacpp.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | llamacpp:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.llamacpp.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | llamacpp:
3 | image: ghcr.io/ggerganov/llama.cpp:full-cuda
4 | deploy:
5 | resources:
6 | reservations:
7 | devices:
8 | - driver: nvidia
9 | count: all
10 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.llamaswap.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | llamaswap:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.llamaswap.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | llamaswap:
3 | image: ${HARBOR_LLAMASWAP_IMAGE}:cuda
4 | deploy:
5 | resources:
6 | reservations:
7 | devices:
8 | - driver: nvidia
9 | count: all
10 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.lmdeploy.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | lmdeploy:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.lmdeploy.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | lmdeploy:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.lmeval.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | lmeval:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.lmeval.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | lmeval:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.lobechat.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | lobechat:
3 | environment:
4 | - OLLAMA_PROXY_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.localai.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | localai:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.localai.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | localai:
3 | image: ${HARBOR_LOCALAI_IMAGE}:${HARBOR_LOCALAI_NVIDIA_VERSION}
4 | deploy:
5 | resources:
6 | reservations:
7 | devices:
8 | - driver: nvidia
9 | count: all
10 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.localai.rocm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | localai:
3 | image: ${HARBOR_LOCALAI_IMAGE}:${HARBOR_LOCALAI_ROCM_VERSION}
4 | devices:
5 | - /dev/kfd
6 | - /dev/dri
--------------------------------------------------------------------------------
/compose.x.mcpo.mcp-server-time.yml:
--------------------------------------------------------------------------------
1 | services:
2 | mcpo:
3 | volumes:
4 | - ./mcpo/configs/mcpo.mcp-server-time.json:/app/configs/mcpo.mcp-server-time.json
5 |
--------------------------------------------------------------------------------
/compose.x.mcpo.metamcp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | mcpo:
3 | volumes:
4 | - ./mcpo/configs/mcpo.metamcp.json:/app/configs/mcpo.metamcp.json
5 | depends_on:
6 | metamcp-sse:
7 | condition: service_healthy
8 |
--------------------------------------------------------------------------------
/compose.x.mistralrs.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | mistralrs:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.mistralrs.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | mistralrs:
3 | image: ghcr.io/ericlbuehler/mistral.rs:cuda-80-${HARBOR_MISTRALRS_VERSION}
4 | deploy:
5 | resources:
6 | reservations:
7 | devices:
8 | - driver: nvidia
9 | count: all
10 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.morphic.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | morphic:
3 | environment:
4 | - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
5 | - NEXT_PUBLIC_OLLAMA_MODEL=${HARBOR_MORPHIC_MODEL}
6 | - NEXT_PUBLIC_OLLAMA_TOOL_CALL_MODEL=${HARBOR_MORPHIC_TOOL_MODEL}
7 |
--------------------------------------------------------------------------------
/compose.x.nexa.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | nexa:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.nexa.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | nexa:
3 | build:
4 | # This is a CUDA-enabled override for the base
5 | # image that is CPU-only
6 | args:
7 | - HARBOR_NEXA_IMAGE=nvidia/cuda:12.4.0-base-ubuntu22.04
8 | deploy:
9 | resources:
10 | reservations:
11 | devices:
12 | - driver: nvidia
13 | count: all
14 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.ollama.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | ollama:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.ollama.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | ollama:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.ollama.rocm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | ollama:
3 | devices:
4 | - /dev/kfd
5 | - /dev/dri
6 | image: ollama/ollama:rocm
7 |
--------------------------------------------------------------------------------
/compose.x.ollama.webui.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | environment:
4 | - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.openhands.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | openhands:
3 | environment:
4 | - LLM_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.opint.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://aphrodite:2242/v1
6 | --api_key sk-aphrodite
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://litellm:4000/v1
6 | --api_key ${HARBOR_LITELLM_MASTER_KEY}
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://llamacpp:8080/v1
6 | --api_key sk-llamacpp
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://mistralrs:8021/v1
6 | --api_key sk-mistralrs
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base ${HARBOR_OLLAMA_INTERNAL_URL}/v1
6 | --api_key sk-ollama
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://tabbyapi:5000/v1
6 | --api_key ${HARBOR_TABBYAPI_ADMIN_KEY}
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.opint.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | opint:
3 | entrypoint: >
4 | interpreter
5 | --api_base http://vllm:8000/v1
6 | --api_key sk-vllm
7 | ${HARBOR_OPINT_CMD}
--------------------------------------------------------------------------------
/compose.x.optillm.airllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://airllm:5000/v1
5 | - OPENAI_API_KEY=sk-airllm
--------------------------------------------------------------------------------
/compose.x.optillm.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://aphrodite:2242/v1
5 | - OPENAI_API_KEY=sk-aphrodite
--------------------------------------------------------------------------------
/compose.x.optillm.boost.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://boost:8000/v1
5 | - OPENAI_API_KEY=${HARBOR_BOOST_API_KEY}
--------------------------------------------------------------------------------
/compose.x.optillm.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | optillm:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.optillm.dify.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://dify-openai:3000/v1
5 | - OPENAI_API_KEY=${HARBOR_DIFY_OPENAI_WORKFLOW}
--------------------------------------------------------------------------------
/compose.x.optillm.ktransformers.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://ktransformers:12456/v1
5 | - OPENAI_API_KEY=sk-ktransformers
--------------------------------------------------------------------------------
/compose.x.optillm.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://litellm:4000/v1
5 | - OPENAI_API_KEY=${HARBOR_LITELLM_MASTER_KEY}
--------------------------------------------------------------------------------
/compose.x.optillm.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://llamacpp:8080/v1
5 | - OPENAI_API_KEY=sk-llamacpp
--------------------------------------------------------------------------------
/compose.x.optillm.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://mistralrs:8021/v1
5 | - OPENAI_API_KEY=sk-mistralrs
--------------------------------------------------------------------------------
/compose.x.optillm.nexa.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://nexa-proxy:8000/v1
5 | - OPENAI_API_KEY=sk-nexa
--------------------------------------------------------------------------------
/compose.x.optillm.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.optillm.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}/v1
5 | - OPENAI_API_KEY=sk-ollama
--------------------------------------------------------------------------------
/compose.x.optillm.omnichain.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://omnichain:34082/v1
5 | - OPENAI_API_KEY=sk-omnichain
--------------------------------------------------------------------------------
/compose.x.optillm.pipelines.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://pipelines:9099
5 | - OPENAI_API_KEY=${HARBOR_PIPELINES_API_KEY}
--------------------------------------------------------------------------------
/compose.x.optillm.sglang.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://sglang:30000/v1
5 | - OPENAI_API_KEY=sk-sglang
--------------------------------------------------------------------------------
/compose.x.optillm.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://tabbyapi:5000/v1
5 | - OPENAI_API_KEY=${HARBOR_TABBYAPI_ADMIN_KEY}
--------------------------------------------------------------------------------
/compose.x.optillm.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | optillm:
3 | environment:
4 | - OPTILLM_BASE_URL=http://vllm:8000/v1
5 | - OPENAI_API_KEY=sk-vllm
--------------------------------------------------------------------------------
/compose.x.oterm.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | oterm:
3 | environment:
4 | - OLLAMA_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.parler.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | parler:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.parler.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | parler:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.parllama.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | parllama:
3 | environment:
4 | - OLLAMA_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.perplexica.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | perplexica-be:
3 | environment:
4 | - OLLAMA_API_ENDPOINT=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.perplexica.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | perplexica-be:
3 | environment:
4 | - SEARXNG_API_ENDPOINT=${HARBOR_SEARXNG_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.perplexideez.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | perplexideez:
3 | environment:
4 | - LLM_MODE="ollama"
5 | - OLLAMA_URL=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.perplexideez.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | perplexideez:
3 | environment:
4 | - SEARXNG_URL=${HARBOR_SEARXNG_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.plandex.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | plandex:
3 | environment:
4 | # Point to LiteLLM
5 | - OPENAI_API_KEY=${HARBOR_LITELLM_MASTER_KEY}
6 | - OPENAI_API_BASE=http://litellm:4000/v1
--------------------------------------------------------------------------------
/compose.x.plandex.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | plandex:
3 | environment:
4 | # Point to llamacpp
5 | # Llamacpp will ignore the API key, but
6 | # it's required for the OpenAI SDK
7 | - OPENAI_API_KEY=sk-llamacpp
8 | - OPENAI_API_BASE=http://llamacpp:8080/v1
--------------------------------------------------------------------------------
/compose.x.plandex.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | plandex:
3 | environment:
4 | # Point to ollama
5 | # Ollama will ignore the API key, but
6 | # it's required for the OpenAI SDK
7 | - OPENAI_API_KEY=sk-ollama
8 | - OPENAI_API_BASE=${HARBOR_OLLAMA_INTERNAL_URL}/v1
--------------------------------------------------------------------------------
/compose.x.promptfoo.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | promptfoo:
3 | environment:
4 | - OLLAMA_BASE_URL=${HARBOR_OLLAMA_INTERNAL_URL}
5 | - OLLAMA_API_KEY=sk-ollama
--------------------------------------------------------------------------------
/compose.x.raglite.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | raglite:
3 | environment:
4 | - OLLAMA_API_BASE=${HARBOR_OLLAMA_INTERNAL_URL}
--------------------------------------------------------------------------------
/compose.x.raglite.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | raglite:
3 | environment:
4 | - VLLM_BASE_URL=http://vllm:8000
--------------------------------------------------------------------------------
/compose.x.sglang.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | sglang:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.sglang.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | sglang:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.speaches.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | speaches:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.sqlchat.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | sqlchat:
3 | environment:
4 | - OPENAI_API_ENDPOINT=${HARBOR_OLLAMA_INTERNAL_URL}/v1
5 | - OPENAI_API_KEY=sk-ollama
--------------------------------------------------------------------------------
/compose.x.stt.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | stt:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.stt.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | stt:
3 | image: fedirz/faster-whisper-server:${HARBOR_STT_VERSION}-cuda
4 | deploy:
5 | resources:
6 | reservations:
7 | devices:
8 | - driver: nvidia
9 | count: all
10 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.tabbyapi.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | tabbyapi:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.tabbyapi.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | tabbyapi:
3 | environment:
4 | - NVIDIA_VISIBLE_DEVICES=all
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: nvidia
10 | count: all
11 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.textgrad.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | textgrad:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.textgrad.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | textgrad:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.tgi.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | tgi:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.tgi.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | tgi:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.traefik.nexa.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-traefik.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | nexa:
5 | labels:
6 | - "traefik.enable=true"
7 |
--------------------------------------------------------------------------------
/compose.x.traefik.plandex.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-traefik.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | plandex:
5 | labels:
6 | - "traefik.enable=true"
7 |
--------------------------------------------------------------------------------
/compose.x.tts.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | tts:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.tts.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | tts:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.txtairag.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | txtairag:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.txtairag.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | txtairag:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.txtairag.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | txtairag:
3 | environment:
4 | - OLLAMA_API_BASE=${HARBOR_OLLAMA_INTERNAL_URL}
5 | - LLM=ollama/${HARBOR_TXTAI_RAG_MODEL}
--------------------------------------------------------------------------------
/compose.x.vllm.cdi.yml:
--------------------------------------------------------------------------------
1 | # This file is generated by seed-cdi.ts script,
2 | # any updates will be overwritten.
3 | services:
4 | vllm:
5 | deploy:
6 | resources:
7 | reservations:
8 | devices:
9 | - driver: cdi
10 | capabilities: [gpu]
11 | device_ids:
12 | - nvidia.com/gpu=all
13 |
--------------------------------------------------------------------------------
/compose.x.vllm.nvidia.yml:
--------------------------------------------------------------------------------
1 | services:
2 | vllm:
3 | deploy:
4 | resources:
5 | reservations:
6 | devices:
7 | - driver: nvidia
8 | count: all
9 | capabilities: [gpu]
--------------------------------------------------------------------------------
/compose.x.webui.agent.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.agent.json:/app/configs/config.agent.json
--------------------------------------------------------------------------------
/compose.x.webui.airllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.airllm.json:/app/configs/config.airllm.json
--------------------------------------------------------------------------------
/compose.x.webui.aphrodite.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.aphrodite.json:/app/configs/config.aphrodite.json
--------------------------------------------------------------------------------
/compose.x.webui.boost.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.boost.json:/app/configs/config.boost.json
--------------------------------------------------------------------------------
/compose.x.webui.comfyui.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.comfyui.json:/app/configs/config.comfyui.json
--------------------------------------------------------------------------------
/compose.x.webui.dify.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.dify.json:/app/configs/config.dify.json
--------------------------------------------------------------------------------
/compose.x.webui.kobold.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.kobold.json:/app/configs/config.kobold.json
--------------------------------------------------------------------------------
/compose.x.webui.ktransformers.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.ktransformers.json:/app/configs/config.ktransformers.json
--------------------------------------------------------------------------------
/compose.x.webui.litellm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.litellm.json:/app/configs/config.litellm.json
--------------------------------------------------------------------------------
/compose.x.webui.llamacpp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.llamacpp.json:/app/configs/config.llamacpp.json
--------------------------------------------------------------------------------
/compose.x.webui.llamaswap.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.llamaswap.json:/app/configs/config.llamaswap.json
--------------------------------------------------------------------------------
/compose.x.webui.mcpo.metamcp.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.x.mcpo.metamcp.json:/app/configs/config.x.mcpo.metamcp.json
--------------------------------------------------------------------------------
/compose.x.webui.mistralrs.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.mistralrs.json:/app/configs/config.mistralrs.json
--------------------------------------------------------------------------------
/compose.x.webui.modularmax.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.modularmax.json:/app/configs/config.modularmax.json
--------------------------------------------------------------------------------
/compose.x.webui.nexa.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.nexa.json:/app/configs/config.nexa.json
--------------------------------------------------------------------------------
/compose.x.webui.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.ollama.json:/app/configs/config.ollama.json
--------------------------------------------------------------------------------
/compose.x.webui.omnichain.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.omnichain.json:/app/configs/config.omnichain.json
--------------------------------------------------------------------------------
/compose.x.webui.optillm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.optillm.json:/app/configs/config.optillm.json
--------------------------------------------------------------------------------
/compose.x.webui.parler.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.parler.json:/app/configs/config.parler.json
--------------------------------------------------------------------------------
/compose.x.webui.pipelines.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.pipelines.json:/app/configs/config.pipelines.json
5 |
--------------------------------------------------------------------------------
/compose.x.webui.searxng.ollama.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.x.searxng.ollama.json:/app/configs/config.x.searxng.ollama.json
--------------------------------------------------------------------------------
/compose.x.webui.searxng.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.searxng.json:/app/configs/config.searxng.json
5 | environment:
6 | - ENABLE_RAG_WEB_SEARCH=true
7 | - RAG_WEB_SEARCH_ENGINE=searxng
8 | - SEARXNG_QUERY_URL=${HARBOR_SEARXNG_INTERNAL_URL}/search?q=
--------------------------------------------------------------------------------
/compose.x.webui.sglang.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.sglang.json:/app/configs/config.sglang.json
--------------------------------------------------------------------------------
/compose.x.webui.speaches.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | # This needs to override default tts settings if run together, hence ".z."
5 | - ./open-webui/configs/config.speaches.json:/app/configs/config.z.speaches.json
--------------------------------------------------------------------------------
/compose.x.webui.stt.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | # This needs to override default tts settings if run together, hence ".z."
5 | - ./open-webui/configs/config.stt.json:/app/configs/config.z.stt.json
--------------------------------------------------------------------------------
/compose.x.webui.tabbyapi.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.tabbyapi.json:/app/configs/config.tabbyapi.json
--------------------------------------------------------------------------------
/compose.x.webui.tts.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.tts.json:/app/configs/config.tts.json
--------------------------------------------------------------------------------
/compose.x.webui.vllm.yml:
--------------------------------------------------------------------------------
1 | services:
2 | webui:
3 | volumes:
4 | - ./open-webui/configs/config.vllm.json:/app/configs/config.vllm.json
--------------------------------------------------------------------------------
/compose.yml:
--------------------------------------------------------------------------------
1 | # Harbor works by combining multiple
2 | # compose files together and is orchestrated by the CLI.
3 | # See Readme.md for more information.
4 | # If you want to obtain your own configuration, see `harbor eject`.
5 |
6 | networks:
7 | harbor-network:
8 | external: false
9 |
--------------------------------------------------------------------------------
/dify/nginx/https.conf.template:
--------------------------------------------------------------------------------
1 | # Please do not directly edit this file. Instead, modify the .env variables related to NGINX configuration.
2 |
3 | listen ${NGINX_SSL_PORT} ssl;
4 | ssl_certificate ${SSL_CERTIFICATE_PATH};
5 | ssl_certificate_key ${SSL_CERTIFICATE_KEY_PATH};
6 | ssl_protocols ${NGINX_SSL_PROTOCOLS};
7 | ssl_prefer_server_ciphers on;
8 | ssl_session_cache shared:SSL:10m;
9 | ssl_session_timeout 10m;
--------------------------------------------------------------------------------
/dify/nginx/proxy.conf.template:
--------------------------------------------------------------------------------
1 | # Please do not directly edit this file. Instead, modify the .env variables related to NGINX configuration.
2 |
3 | proxy_set_header Host $host;
4 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
5 | proxy_set_header X-Forwarded-Proto $scheme;
6 | proxy_http_version 1.1;
7 | proxy_set_header Connection "";
8 | proxy_buffering off;
9 | proxy_read_timeout ${NGINX_PROXY_READ_TIMEOUT};
10 | proxy_send_timeout ${NGINX_PROXY_SEND_TIMEOUT};
11 |
--------------------------------------------------------------------------------
/dify/nginx/ssl/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/dify/nginx/ssl/.gitkeep
--------------------------------------------------------------------------------
/dify/openai/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:20
2 |
3 | WORKDIR /dify2openai
4 |
5 | COPY . .
6 | RUN npm install
7 |
8 | ENTRYPOINT [ "node", "app.js" ]
--------------------------------------------------------------------------------
/dify/openai/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@av/dify2openai",
3 | "description": "Dify <-> OpenAI proxy",
4 | "version": "0.0.1",
5 | "main": "app.js",
6 | "type": "module",
7 | "keywords": [],
8 | "author": "av",
9 | "license": "MIT",
10 | "dependencies": {
11 | "body-parser": "^1.20.2",
12 | "dotenv": "^16.3.1",
13 | "express": "^4.18.2",
14 | "node-fetch": "^3.3.2"
15 | }
16 | }
--------------------------------------------------------------------------------
/docs/2.2.10-Backend&colon-lmdeploy.md:
--------------------------------------------------------------------------------
1 | ### [lmdeploy](https://lmdeploy.readthedocs.io/en/latest/get_started.html)
2 |
3 | > Handle: `lmdeploy`
4 | > URL: WIP
5 |
6 | A toolkit for deploying, and serving LLMs.
7 |
8 | WIP
--------------------------------------------------------------------------------
/docs/_Footer.md:
--------------------------------------------------------------------------------
1 | [Home](./Home) | [CLI Reference](./3.-Harbor-CLI-Reference) | [Services](./2.-Services) | [Adding New Service](./7.-Adding-A-New-Service) | [Compatibility](./4.-Compatibility)
--------------------------------------------------------------------------------
/docs/anythingllm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/anythingllm.png
--------------------------------------------------------------------------------
/docs/bench-report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/bench-report.png
--------------------------------------------------------------------------------
/docs/bench-superset.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/bench-superset.png
--------------------------------------------------------------------------------
/docs/bionicgpt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/bionicgpt.png
--------------------------------------------------------------------------------
/docs/bolt-local-qwen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/bolt-local-qwen.png
--------------------------------------------------------------------------------
/docs/bolt-openailike.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/bolt-openailike.png
--------------------------------------------------------------------------------
/docs/boost-behavior.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-behavior.png
--------------------------------------------------------------------------------
/docs/boost-custom-example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-custom-example.png
--------------------------------------------------------------------------------
/docs/boost-dot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-dot.png
--------------------------------------------------------------------------------
/docs/boost-markov.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-markov.png
--------------------------------------------------------------------------------
/docs/boost-r0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-r0.png
--------------------------------------------------------------------------------
/docs/boost-unstable.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost-unstable.png
--------------------------------------------------------------------------------
/docs/boost.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/boost.png
--------------------------------------------------------------------------------
/docs/chatnio-searxng.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/chatnio-searxng.png
--------------------------------------------------------------------------------
/docs/chatui-searxng.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/chatui-searxng.png
--------------------------------------------------------------------------------
/docs/dify-harbor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/dify-harbor.png
--------------------------------------------------------------------------------
/docs/dify-ollama.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/dify-ollama.png
--------------------------------------------------------------------------------
/docs/dify-searxng.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/dify-searxng.png
--------------------------------------------------------------------------------
/docs/dify-webui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/dify-webui.png
--------------------------------------------------------------------------------
/docs/flowise.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/flowise.png
--------------------------------------------------------------------------------
/docs/g1-reasoning.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/g1-reasoning.png
--------------------------------------------------------------------------------
/docs/gptme.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/gptme.png
--------------------------------------------------------------------------------
/docs/harbor-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-2.png
--------------------------------------------------------------------------------
/docs/harbor-agentzero.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-agentzero.png
--------------------------------------------------------------------------------
/docs/harbor-airllm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-airllm.png
--------------------------------------------------------------------------------
/docs/harbor-airweave.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-airweave.png
--------------------------------------------------------------------------------
/docs/harbor-app-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-2.png
--------------------------------------------------------------------------------
/docs/harbor-app-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-3.png
--------------------------------------------------------------------------------
/docs/harbor-app-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-4.png
--------------------------------------------------------------------------------
/docs/harbor-app-5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-5.png
--------------------------------------------------------------------------------
/docs/harbor-app-6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-6.png
--------------------------------------------------------------------------------
/docs/harbor-app-7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-7.png
--------------------------------------------------------------------------------
/docs/harbor-app-8.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-8.png
--------------------------------------------------------------------------------
/docs/harbor-app-macos.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app-macos.png
--------------------------------------------------------------------------------
/docs/harbor-app.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-app.png
--------------------------------------------------------------------------------
/docs/harbor-arch-diag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-arch-diag.png
--------------------------------------------------------------------------------
/docs/harbor-boost.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-boost.png
--------------------------------------------------------------------------------
/docs/harbor-history.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-history.gif
--------------------------------------------------------------------------------
/docs/harbor-k6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-k6.png
--------------------------------------------------------------------------------
/docs/harbor-latentscope.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-latentscope.png
--------------------------------------------------------------------------------
/docs/harbor-ldr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-ldr.png
--------------------------------------------------------------------------------
/docs/harbor-libretranslate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-libretranslate.png
--------------------------------------------------------------------------------
/docs/harbor-metamcp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-metamcp.png
--------------------------------------------------------------------------------
/docs/harbor-morphic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-morphic.png
--------------------------------------------------------------------------------
/docs/harbor-ollama-models.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-ollama-models.png
--------------------------------------------------------------------------------
/docs/harbor-oterm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-oterm.png
--------------------------------------------------------------------------------
/docs/harbor-raglite.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-raglite.png
--------------------------------------------------------------------------------
/docs/harbor-tools.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-tools.png
--------------------------------------------------------------------------------
/docs/harbor-top.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-top.png
--------------------------------------------------------------------------------
/docs/harbor-traefik.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-traefik.png
--------------------------------------------------------------------------------
/docs/harbor-webtop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-webtop.png
--------------------------------------------------------------------------------
/docs/harbor-webui-json.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/harbor-webui-json.png
--------------------------------------------------------------------------------
/docs/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/image.png
--------------------------------------------------------------------------------
/docs/ktransformers-webui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/ktransformers-webui.png
--------------------------------------------------------------------------------
/docs/langflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/langflow.png
--------------------------------------------------------------------------------
/docs/langfuse.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/langfuse.png
--------------------------------------------------------------------------------
/docs/litlytics-config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/litlytics-config.png
--------------------------------------------------------------------------------
/docs/litlytics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/litlytics.png
--------------------------------------------------------------------------------
/docs/mcp-inspector.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/mcp-inspector.png
--------------------------------------------------------------------------------
/docs/mcpo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/mcpo.png
--------------------------------------------------------------------------------
/docs/mikupad.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/mikupad.png
--------------------------------------------------------------------------------
/docs/nexa-webui-error.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/nexa-webui-error.png
--------------------------------------------------------------------------------
/docs/nexa-webui-workaround.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/nexa-webui-workaround.png
--------------------------------------------------------------------------------
/docs/ol1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/ol1.png
--------------------------------------------------------------------------------
/docs/omnichain-import.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/omnichain-import.png
--------------------------------------------------------------------------------
/docs/omnichain.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/omnichain.png
--------------------------------------------------------------------------------
/docs/omniparser.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/omniparser.png
--------------------------------------------------------------------------------
/docs/openhands-config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/openhands-config.png
--------------------------------------------------------------------------------
/docs/optillm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/optillm.png
--------------------------------------------------------------------------------
/docs/parllama.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/parllama.png
--------------------------------------------------------------------------------
/docs/plandex-exec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/plandex-exec.png
--------------------------------------------------------------------------------
/docs/promptfoo-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/promptfoo-2.png
--------------------------------------------------------------------------------
/docs/promptfoo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/promptfoo.png
--------------------------------------------------------------------------------
/docs/qr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/qr.png
--------------------------------------------------------------------------------
/docs/seaxng-webrag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/seaxng-webrag.png
--------------------------------------------------------------------------------
/docs/sqlchat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/sqlchat.png
--------------------------------------------------------------------------------
/docs/tunnels.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/tunnels.png
--------------------------------------------------------------------------------
/docs/txtairag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/txtairag.png
--------------------------------------------------------------------------------
/docs/webui-boost-status.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/webui-boost-status.png
--------------------------------------------------------------------------------
/docs/webui-pipelines-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/webui-pipelines-2.png
--------------------------------------------------------------------------------
/docs/webui-pipelines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/docs/webui-pipelines.png
--------------------------------------------------------------------------------
/fabric/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.23.4
2 |
3 | WORKDIR /app
4 | RUN go install github.com/danielmiessler/fabric@latest
5 |
6 | ENTRYPOINT [ "fabric" ]
7 |
8 |
--------------------------------------------------------------------------------
/fabric/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the fabric service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/flowise/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/flowise/override.env:
--------------------------------------------------------------------------------
1 |
2 | # This file can be used for additional environment variables
3 | # specifically for the 'flowise' service.
4 | # You can also use the "harbor env" command to set these variables.
5 |
--------------------------------------------------------------------------------
/gptme/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 | RUN pip install gptme
3 |
4 | ENTRYPOINT [ "gptme" ]
--------------------------------------------------------------------------------
/gptme/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'gptme' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/gum/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM busybox:1.35.0-uclibc as busybox
2 | COPY --from=ghcr.io/charmbracelet/gum /usr/local/bin/gum /usr/local/bin/gum
3 |
4 | ENTRYPOINT [ "/usr/local/bin/gum" ]
--------------------------------------------------------------------------------
/hf/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 |
3 | RUN pip install -U huggingface_hub[cli]
4 |
5 | ENTRYPOINT [ "huggingface-cli" ]
--------------------------------------------------------------------------------
/hf/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the HF CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/hfdownloader/Dockerfile:
--------------------------------------------------------------------------------
1 | # Kitbash docker image for running the HuggingFaceModelDownloader CLI
2 | # https://github.com/bodaay/HuggingFaceModelDownloader
3 |
4 | FROM ubuntu:22.04
5 | SHELL ["/bin/bash", "-c"]
6 | RUN apt-get update && apt-get install -y curl
7 |
8 | WORKDIR /app
9 | RUN bash <(curl -sSL https://g.bodaay.io/hfd) -h
10 | ENTRYPOINT ["/app/hfdownloader"]
11 |
--------------------------------------------------------------------------------
/hfdownloader/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the HuggingFaceDownloader CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/http-catalog/comfyui.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:{{HARBOR_COMFYUI_HOST_PORT}}
2 |
3 | ###
4 |
5 | GET {{host}}
6 | Authorization: Bearer sk-comfyui
--------------------------------------------------------------------------------
/http-catalog/hf.http:
--------------------------------------------------------------------------------
1 | @host = https://huggingface.co
2 |
3 | ###
4 |
5 | curl {{host}}/api/models
6 |
7 | ###
8 |
9 | curl {{host}}/models-json?search=gguf&sort=trending
10 |
11 | ###
12 |
13 | curl {{host}}/api/models-tags-by-type
14 |
15 | ###
16 |
17 | curl {{host}}/lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF
18 |
--------------------------------------------------------------------------------
/http-catalog/ktransformers.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:34121
2 |
3 | ###
4 |
5 | curl {{host}}/v1/models
6 |
7 | ###
8 |
9 | curl {{host}}/v1/chat/completions -H 'Content-Type: application/json' -H "Authorization: Bearer sk-fake" -d '{
10 | "model": "anything",
11 | "messages": [
12 | {
13 | "role": "user",
14 | "content": "Bobby was born in Paris. How old is Bobby?"
15 | }
16 | ],
17 | "max_tokens": 30
18 | }'
--------------------------------------------------------------------------------
/http-catalog/langfuse.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:33881
2 |
3 | ###
4 |
5 | curl {{host}}/api/public/health
--------------------------------------------------------------------------------
/http-catalog/llamaswap.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:{{HARBOR_LLAMASWAP_HOST_PORT}}
2 |
3 | ###
4 |
5 | GET {{host}}/v1/models
6 |
7 | ###
8 |
9 | POST {{host}}/v1/chat/completions
10 | Content-Type: application/json
11 |
12 | {
13 | "model": "llamacpp-cache",
14 | "messages": [
15 | {"role": "user", "content": "How many heads Girrafes have?"}
16 | ],
17 | "options": {
18 | "temperature": 0.2
19 | },
20 | "stream": false
21 | }
--------------------------------------------------------------------------------
/http-catalog/modularmax.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:34471
2 |
3 | ### Get Models
4 |
5 | GET {{host}}/v1/models
6 |
7 | ### Chat Completion
8 |
9 | POST {{host}}/v1/chat/completions
10 | Content-Type: application/json
11 | Authorization: Bearer sk-modularmax
12 |
13 | {
14 | "model": "cognitivecomputations/Dolphin3.0-Qwen2.5-3b",
15 | "messages": [
16 | {"role": "user", "content": "Suggest me a random color"}
17 | ],
18 | "temperature": 0.1
19 | }
--------------------------------------------------------------------------------
/http-catalog/nexa.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:34181
2 |
3 |
4 | ###
5 |
6 | GET {{host}}/health
7 |
8 | ###
9 |
10 | GET {{host}}/v1/models
11 |
12 | ###
13 |
14 | POST {{host}}/v1/chat/completions
15 | Content-Type: application/json
16 | Authorization: sk-fake
17 |
18 | {
19 | "model": "anything",
20 | "messages": [
21 | {"role": "user", "content": "How many heads Girrafes have?"}
22 | ],
23 | "options": {
24 | "temperature": 0.2
25 | },
26 | "stream": false
27 | }
--------------------------------------------------------------------------------
/http-catalog/perplexideez.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:34261
2 |
3 | ###
4 |
5 | POST /auth/signUp?email=perplexideez%40gmail.com&password=perplexideez&username=perplexideez
6 | Content-Type: application/x-www-form-urlencoded
7 |
8 | ###
9 |
10 | GET /
--------------------------------------------------------------------------------
/http-catalog/plandex.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:33941
2 |
3 | ###
4 |
5 | curl {{host}}/health
--------------------------------------------------------------------------------
/http-catalog/sglang.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:34091
2 |
3 | ###
4 |
5 | curl {{host}}/v1/models
6 |
7 | ###
8 |
9 | curl {{host}}/v1/chat/completions -H 'Content-Type: application/json' -H "Authorization: Bearer sk-sglang" -d '{
10 | "model": "anything",
11 | "messages": [
12 | {
13 | "role": "user",
14 | "content": "Bobby was born in Paris. How old is Bobby?"
15 | }
16 | ],
17 | "max_tokens": 30
18 | }'
--------------------------------------------------------------------------------
/http-catalog/speaches.http:
--------------------------------------------------------------------------------
1 | @host=http://localhost:{{HARBOR_SPEACHES_HOST_PORT}}
2 |
3 | ###
4 |
5 | GET {{host}}/v1/models
6 |
7 | ###
8 |
9 | POST {{host}}/v1/audio/speech
10 | Content-Type: application/json
11 |
12 | {
13 | "voice": "af_heart",
14 | "input": "The quick brown fox jumped over the lazy dog."
15 | }
16 |
17 | ###
18 |
19 | GET {{host}}/v1/audio/speech/voices
--------------------------------------------------------------------------------
/http-catalog/tts.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:33861
2 |
3 | ===
4 |
5 | curl -s {{host}}/v1/audio/speech -H "Content-Type: application/json" -d '{
6 | "input": "The quick brown fox jumped over the lazy dog."}' > speech.mp3
--------------------------------------------------------------------------------
/http-catalog/webui.http:
--------------------------------------------------------------------------------
1 | @host = http://localhost:33801
2 |
3 | ###
4 |
5 | curl {{host}}/health
6 |
7 | ###
8 |
9 | curl {{host}}/api/models
--------------------------------------------------------------------------------
/jupyter/Dockerfile:
--------------------------------------------------------------------------------
1 | # Base image for parler/airllm/textgrad services, reusing
2 | ARG HARBOR_JUPYTER_IMAGE=pytorch/pytorch:2.3.0-cuda12.1-cudnn8-runtime
3 |
4 | FROM ${HARBOR_JUPYTER_IMAGE}
5 | ARG HARBOR_JUPYTER_EXTRA_DEPS=""
6 |
7 | WORKDIR /app
8 | RUN pip install jupyterlab
9 | RUN if [ "$HARBOR_JUPYTER_EXTRA_DEPS" = "" ] ; then \
10 | echo "No extra deps" ; \
11 | else \
12 | pip install $(echo ${HARBOR_JUPYTER_EXTRA_DEPS} | sed 's/;/ /g') ; \
13 | fi
--------------------------------------------------------------------------------
/jupyter/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the jupyter service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/k6/.gitignore:
--------------------------------------------------------------------------------
1 | !scripts/
--------------------------------------------------------------------------------
/k6/grafana-dashboard.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 | providers:
3 | - name: 'default'
4 | org_id: 1
5 | folder: ''
6 | type: 'file'
7 | options:
8 | path: /var/lib/grafana/dashboards
--------------------------------------------------------------------------------
/k6/grafana-datasource.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 |
3 | datasources:
4 | - name: k6influxdb
5 | type: influxdb
6 | access: proxy
7 | database: k6
8 | url: http://k6-influxdb:8086
9 | isDefault: true
10 |
--------------------------------------------------------------------------------
/k6/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the K6 CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/k6/scripts/example.js:
--------------------------------------------------------------------------------
1 | import http from 'k6/http';
2 |
3 | export const options = {
4 | iterations: 10,
5 | };
6 |
7 | export default function () {
8 | http.get('https://test-api.k6.io/public/crocodiles/');
9 | }
--------------------------------------------------------------------------------
/k6/scripts/helpers/config.js:
--------------------------------------------------------------------------------
1 | import { envNumber } from './utils.js';
2 |
3 | export default {
4 | runs: {
5 | vus: envNumber('VUS', 1),
6 | // Uses K6 time notation: "1m", "10s", etc.
7 | timeWait: __ENV.TIME_WAIT,
8 | timeRampUp: __ENV.TIME_RAMP_UP,
9 | timeLoad: __ENV.TIME_LOAD,
10 | timeRampDown: __ENV.TIME_RAMP_DOWN,
11 | },
12 | ollama: {
13 | url: __ENV.OLLAMA_API_URL,
14 | key: __ENV.OLLAMA_API_KEY,
15 | }
16 | };
17 |
--------------------------------------------------------------------------------
/k6/scripts/payloads/completions.js:
--------------------------------------------------------------------------------
1 | import { mergeDeep } from '../helpers/utils.js'
2 |
3 | export const fimCompletion = ({
4 | prefix = '',
5 | suffix = '',
6 | ...rest
7 | }) => {
8 | return mergeDeep({
9 | max_tokens: 512,
10 | temperature: 0,
11 | seed: 0,
12 | frequency_penalty: 1.25,
13 | prompt: `<|fim_prefix|>${prefix}<|fim_suffix|>${suffix}<|fim_middle|>`
14 | }, rest);
15 | }
--------------------------------------------------------------------------------
/kobold/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/kobold/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'kobold' service.
3 | # You can also use the "harbor env" command to set these variables.
4 | KCPP_DONT_REMOVE_MODELS=true
5 | KCPP_DONT_UPDATE=false
6 | KCPP_DONT_TUNNEL=true
--------------------------------------------------------------------------------
/ktransformers/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "ktransformers" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/langflow/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/langflow/override.env:
--------------------------------------------------------------------------------
1 | LANGFLOW_DATABASE_URL=sqlite:////var/lib/langflow/database.db
2 | LANGFLOW_CONFIG_DIR=/var/lib/langflow
3 | LANGFLOW_HOST=0.0.0.0
4 | LANGFLOW_PORT=7860
5 | LANGFLOW_API_TITLE=Harbor Langflow API
6 | LANGFLOW_AUTO_LOGIN=true
7 | LANGFLOW_SUPERUSER=admin@admin.com
8 | LANGFLOW_SUPERUSER_PASSWORD=admin
9 | LANGFLOW_REMOVE_API_KEYS=false
10 | LANGFLOW_JWT_SECRET=harbor-generated-secret
11 | DO_NOT_TRACK=true
12 |
--------------------------------------------------------------------------------
/langfuse/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/langfuse/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/langfuse/.gitkeep
--------------------------------------------------------------------------------
/latentscope/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/latentscope/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12
2 |
3 | RUN pip install latentscope
4 |
5 | CMD ["ls-serve", "--port", "5001"]
--------------------------------------------------------------------------------
/latentscope/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'latentscope' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/ldr/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/ldr/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'ldr' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/librechat/.gitignore:
--------------------------------------------------------------------------------
1 | data/
2 | meili_data_v1.7/
3 | meili_data_v1.12/
4 | logs/
5 | vectordb/
6 | images/
7 |
--------------------------------------------------------------------------------
/librechat/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/librechat/.gitkeep
--------------------------------------------------------------------------------
/librechat/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "librechat" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/libretranslate/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/libretranslate/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'libretranslate' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
5 | LT_DEBUG="false"
6 | LT_UPDATE_MODELS="true"
7 | LT_SSL="false"
8 | LT_SUGGESTIONS="false"
9 | LT_METRICS="false"
10 | LT_HOST="0.0.0.0"
11 | LT_API_KEYS="false"
12 | LT_THREADS="12"
13 | LT_FRONTEND_TIMEOUT="2000"
14 |
--------------------------------------------------------------------------------
/litellm/litellm.config.yaml:
--------------------------------------------------------------------------------
1 | # Other configs will be merged here
2 | {}
--------------------------------------------------------------------------------
/litellm/litellm.langfuse.yaml:
--------------------------------------------------------------------------------
1 | litellm_settings:
2 | success_callback: ['langfuse']
--------------------------------------------------------------------------------
/litellm/litellm.optillm.yaml:
--------------------------------------------------------------------------------
1 | model_list:
2 | - model_name: optillm
3 | litellm_params:
4 | model: openai/llama3.1:8b
5 | api_base: http://optillm:8000/v1
6 | api_key: sk-optillm
7 |
--------------------------------------------------------------------------------
/litellm/litellm.tgi.yaml:
--------------------------------------------------------------------------------
1 | model_list:
2 | - model_name: tgi
3 | litellm_params:
4 | model: openai/huggingface/anymodel
5 | api_base: http://tgi:80/v1
6 | api_key: sk-tgi
--------------------------------------------------------------------------------
/litellm/litellm.vllm.yaml:
--------------------------------------------------------------------------------
1 | model_list:
2 | - model_name: vllm
3 | litellm_params:
4 | model: openai/microsoft/Phi-3.5-mini-instruct
5 | api_base: http://vllm:8000/v1
6 | api_key: sk-vllm
7 |
--------------------------------------------------------------------------------
/litellm/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "litellm" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/litlytics/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "litlytics" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/llamacpp/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional
2 | # environment variable overrides that will
3 | # only be visible to the llamacpp service
4 | # You can also use the "harbor env" command to set these variables.
--------------------------------------------------------------------------------
/llamaswap/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'llamaswap' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/lmeval/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 |
3 | WORKDIR /app
4 |
5 | RUN git clone https://github.com/EleutherAI/lm-evaluation-harness /app
6 | RUN pip install -e ".[hf_transfer,api]"
7 |
8 | ENTRYPOINT [ "lm_eval" ]
--------------------------------------------------------------------------------
/lmeval/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "lmeval" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/localai/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/localai/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'localai' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/mcp/.gitignore:
--------------------------------------------------------------------------------
1 | cache/
--------------------------------------------------------------------------------
/mcp/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'mcp' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/mcpo/configs/mcpo.mcp-server-fetch.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "fetch": {
4 | "command": "uvx",
5 | "args": [
6 | "mcp-server-fetch"
7 | ]
8 | }
9 | }
10 | }
--------------------------------------------------------------------------------
/mcpo/configs/mcpo.mcp-server-time.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "time": {
4 | "command": "uvx",
5 | "args": [
6 | "mcp-server-time"
7 | ]
8 | }
9 | }
10 | }
--------------------------------------------------------------------------------
/mcpo/configs/mcpo.metamcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "metamcp": {
4 | "command": "npx",
5 | "args": [
6 | "-y",
7 | "supergateway",
8 | "--sse",
9 | "http://metamcp-sse:12006/sse"
10 | ]
11 | }
12 | }
13 | }
--------------------------------------------------------------------------------
/mcpo/configs/mcpo.override.json:
--------------------------------------------------------------------------------
1 | {}
--------------------------------------------------------------------------------
/mcpo/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'mcpo' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/metamcp/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/metamcp/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'metamcp' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
5 | POSTGRES_DB=metatool
6 | POSTGRES_USER=metatool
7 | POSTGRES_PASSWORD=m3t4t00l
8 | DATABASE_URL=postgresql://metatool:m3t4t00l@metamcp-postgres:5432/metatool
9 | USE_DOCKER_HOST=true
10 | NODE_ENV=production
--------------------------------------------------------------------------------
/mikupad/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'mikupad' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/modularmax/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'modularmax' service.
3 | # You can also use the "harbor env" command to set these variables.
4 | HF_HUB_ENABLE_HF_TRANSFER=1
--------------------------------------------------------------------------------
/morphic/.gitignore:
--------------------------------------------------------------------------------
1 | redis/
--------------------------------------------------------------------------------
/n8n/.gitignore:
--------------------------------------------------------------------------------
1 | db/
2 | shared/
3 | storage/
--------------------------------------------------------------------------------
/n8n/backup/credentials/9LdDQI9lblNjIGIZ.json:
--------------------------------------------------------------------------------
1 | {
2 | "createdAt": "2024-10-27T12:40:43.596Z",
3 | "updatedAt": "2024-10-27T12:40:43.595Z",
4 | "id": "9LdDQI9lblNjIGIZ",
5 | "name": "Ollama account",
6 | "data": "U2FsdGVkX1+gq5jFqkLIZ1MN+mZfwTgjc3qA45RGENUK3ootyrLTvqL4cFFG1Uy80jokgSPborQRzYfywShQfA==",
7 | "type": "ollamaApi"
8 | }
--------------------------------------------------------------------------------
/n8n/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specific to the n8n service
3 | # You can also use the "harbor env" command to set these variables.
4 | N8N_DIAGNOSTICS_ENABLED=false
5 | N8N_PERSONALIZATION_ENABLED=false
6 |
7 | # Harbor runs locally by default, so
8 | # this setting is more appropriate (esp. on Mac OS)
9 | N8N_SECURE_COOKIE=false
10 |
--------------------------------------------------------------------------------
/nexa/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG HARBOR_NEXA_IMAGE=ubuntu:22.04
2 |
3 | FROM ${HARBOR_NEXA_IMAGE}
4 | ARG HARBOR_NEXA_IMAGE=ubuntu:22.04
5 |
6 | # This file will coerce nexa to install CUDA
7 | # version when we're running with CUDA base image
8 | COPY ./nvidia.sh /nvidia.sh
9 | RUN chmod +x /nvidia.sh && /nvidia.sh
10 |
11 | # Install nexa
12 | RUN apt-get update && apt-get install -y curl
13 | RUN curl -fsSL https://public-storage.nexa4ai.com/install.sh | sh
14 |
15 | ENTRYPOINT [ "nexa" ]
--------------------------------------------------------------------------------
/nexa/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "nexa" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/nexa/proxy.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12
2 |
3 | WORKDIR /app
4 | RUN pip install fastapi uvicorn httpx
5 | COPY ./proxy_server.py /app/proxy_server.py
6 |
7 | CMD ["uvicorn", "proxy_server:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
--------------------------------------------------------------------------------
/ol1/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 |
3 | WORKDIR /app
4 | RUN git clone https://github.com/tcsenpai/ol1 /app
5 | RUN pip install python-dotenv streamlit requests
6 |
7 | ENTRYPOINT [ "streamlit", "run", "app.py" ]
--------------------------------------------------------------------------------
/ol1/README.md:
--------------------------------------------------------------------------------
1 | # ol1
2 |
3 | This is an adjusted version of the:
4 | https://github.com/tcsenpai/ol1
5 |
6 | Which is a fork of the original:
7 | https://github.com/bklieger-groq/g1
--------------------------------------------------------------------------------
/ol1/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "ol1" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/ollama/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/ollama/.gitkeep
--------------------------------------------------------------------------------
/ollama/modelfiles/hargent.Modelfile:
--------------------------------------------------------------------------------
1 | # FROM hf.co/bartowski/Replete-LLM-V2.5-Qwen-7b-GGUF:Q6_K_L
2 | # FROM hf.co/bartowski/Qwen2.5-14B-Instruct-GGUF:Q4_K_M
3 | FROM hf.co/bartowski/Qwen2.5.1-Coder-7B-Instruct-GGUF:Q8_0
4 |
5 | PARAMETER num_ctx 32000
6 |
--------------------------------------------------------------------------------
/ollama/modelfiles/llama3.1_8b.Modelfile:
--------------------------------------------------------------------------------
1 | # Modelfile generated by "ollama show"
2 | FROM llama3.1:8b
3 | PARAMETER num_ctx 64000
4 |
--------------------------------------------------------------------------------
/ollama/modelfiles/llama3.1_q6k_48k.Modelfile:
--------------------------------------------------------------------------------
1 | # Modelfile generated by "ollama show"
2 | FROM llama3.1:8b-instruct-q6_K
3 | PARAMETER num_ctx 46000
4 |
--------------------------------------------------------------------------------
/ollama/modelfiles/qwen2.5_7b_q8_32k.Modelfile:
--------------------------------------------------------------------------------
1 | # Modelfile generated by "ollama show"
2 | FROM qwen2.5-coder:7b-instruct-q8_0
3 | PARAMETER num_ctx 32000
4 |
--------------------------------------------------------------------------------
/ollama/modelfiles/qwen2.5_7b_q8_48k.Modelfile:
--------------------------------------------------------------------------------
1 | # Modelfile generated by "ollama show"
2 | FROM qwen2.5-coder:7b-instruct-q8_0
3 | PARAMETER num_ctx 48000
4 |
--------------------------------------------------------------------------------
/ollama/modelfiles/qwen2.5_q6k_32k.Modelfile:
--------------------------------------------------------------------------------
1 | # Modelfile generated by "ollama show"
2 | FROM qwen2.5:14b-instruct-q6_K
3 | PARAMETER num_ctx 32000
4 |
--------------------------------------------------------------------------------
/ollama/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "ollama" service.
3 | # You can also use the "harbor env" command to set these variables.
4 | OLLAMA_CONTEXT_LENGTH="4096"
5 | OLLAMA_NUM_PARALLEL="4"
--------------------------------------------------------------------------------
/omnichain/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:lts
2 |
3 | WORKDIR /app
4 | RUN git clone https://github.com/zenoverflow/omnichain && cd omnichain
5 |
6 | WORKDIR /app/omnichain
7 | RUN npm install
8 |
9 | COPY entrypoint.sh /app/omnichain/entrypoint.sh
10 |
11 | ENTRYPOINT [ "/app/omnichain/entrypoint.sh" ]
12 | CMD ["npm", "run", "serve"]
--------------------------------------------------------------------------------
/omnichain/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "omnichain" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/omniparser/override.env:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/omniparser/override.env
--------------------------------------------------------------------------------
/open-webui/configs/config.agent.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://agent:8000/v1"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_AGENT_API_KEY}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.airllm.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://airllm:5000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-airllm"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.aphrodite.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://aphrodite:2242/v1"
5 | ],
6 | "api_keys": [
7 | "sk-aphrodite"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.boost.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://boost:8000/v1"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_BOOST_API_KEY}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.dify.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://dify-openai:3000/v1"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_DIFY_OPENAI_WORKFLOW}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.json:
--------------------------------------------------------------------------------
1 | {}
--------------------------------------------------------------------------------
/open-webui/configs/config.kobold.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://kobold:5001/v1"
5 | ],
6 | "api_keys": [
7 | "sk-kobold"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.ktransformers.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://ktransformers:12456/v1"
5 | ],
6 | "api_keys": [
7 | "sk-ktransformers"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.litellm.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://litellm:4000/v1"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_LITELLM_MASTER_KEY}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.llamacpp.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://llamacpp:8080/v1"
5 | ],
6 | "api_keys": [
7 | "sk-llamacpp"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.llamaswap.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://llamaswap:8080/v1"
5 | ],
6 | "api_keys": [
7 | "sk-llamaswap"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.mistralrs.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://mistralrs:8021/v1"
5 | ],
6 | "api_keys": [
7 | "sk-mistralrs"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.modularmax.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://modularmax:8000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-modularmax"
8 | ],
9 | "enabled": true
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/open-webui/configs/config.nexa.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://nexa-proxy:8000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-nexa"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.ollama.json:
--------------------------------------------------------------------------------
1 | {
2 | "ollama": {
3 | "base_urls": [
4 | "${HARBOR_OLLAMA_INTERNAL_URL}"
5 | ]
6 | }
7 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.omnichain.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://omnichain:34082/v1"
5 | ],
6 | "api_keys": [
7 | "sk-omnichain"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.optillm.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://optillm:8000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-optillm"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.override.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "${...HARBOR_OPENAI_URLS}"
5 | ],
6 | "api_keys": [
7 | "${...HARBOR_OPENAI_KEYS}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.parler.json:
--------------------------------------------------------------------------------
1 | {
2 | "audio": {
3 | "tts": {
4 | "openai": {
5 | "api_base_url": "http://parler:8000/v1",
6 | "api_key": "sk-dummy-key"
7 | },
8 | "engine": "openai",
9 | "model": "${HARBOR_PARLER_MODEL}",
10 | "voice": "${HARBOR_PARLER_VOICE}"
11 | },
12 | "stt": {
13 | "engine": "",
14 | "model": "whisper-1"
15 | }
16 | }
17 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.pipelines.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://pipelines:9099"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_PIPELINES_API_KEY}"
8 | ],
9 | "enabled": true
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/open-webui/configs/config.sglang.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://sglang:30000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-sglang"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.stt.json:
--------------------------------------------------------------------------------
1 | {
2 | "audio": {
3 | "stt": {
4 | "openai": {
5 | "api_base_url": "http://stt:8000/v1",
6 | "api_key": "sk-stt"
7 | },
8 | "engine": "openai",
9 | "model": "${HARBOR_STT_MODEL}"
10 | }
11 | }
12 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.tabbyapi.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://tabbyapi:5000/v1"
5 | ],
6 | "api_keys": [
7 | "${HARBOR_TABBYAPI_ADMIN_KEY}"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.tts.json:
--------------------------------------------------------------------------------
1 | {
2 | "audio": {
3 | "tts": {
4 | "openai": {
5 | "api_base_url": "http://tts:8000/v1",
6 | "api_key": "sk-dummy-key"
7 | },
8 | "engine": "openai",
9 | "model": "tts-1",
10 | "voice": "shimmer"
11 | },
12 | "stt": {
13 | "engine": "",
14 | "model": "whisper-1"
15 | }
16 | }
17 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.vllm.json:
--------------------------------------------------------------------------------
1 | {
2 | "openai": {
3 | "api_base_urls": [
4 | "http://vllm:8000/v1"
5 | ],
6 | "api_keys": [
7 | "sk-vllm"
8 | ],
9 | "enabled": true
10 | }
11 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.x.mcpo.metamcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "tool_server": {
3 | "connections": [
4 | {
5 | "url": "http://harbor.mcpo:8000/metamcp",
6 | "path": "openapi.json",
7 | "auth_type": "bearer",
8 | "key": "",
9 | "config": {
10 | "enable": true,
11 | "access_control": null
12 | }
13 | }
14 | ]
15 | }
16 | }
--------------------------------------------------------------------------------
/open-webui/configs/config.x.searxng.ollama.json:
--------------------------------------------------------------------------------
1 | {
2 | "rag": {
3 | "embedding_engine": "ollama",
4 | "embedding_model": "mxbai-embed-large:latest"
5 | }
6 | }
--------------------------------------------------------------------------------
/open-webui/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "webui" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/openhands/.gitignore:
--------------------------------------------------------------------------------
1 | state/
--------------------------------------------------------------------------------
/openhands/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "openhands" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/openinterpreter/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 | RUN pip install open-interpreter
3 |
4 | ENTRYPOINT [ "interpreter" ]
--------------------------------------------------------------------------------
/openinterpreter/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "opint" service and CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/optillm/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/optillm/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'optillm' service.
3 | # You can also use the "harbor env" command to set these variables.
4 | OPTILLM_APPROACH="z3"
5 |
--------------------------------------------------------------------------------
/oterm/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/oterm/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 |
3 | RUN pip install oterm
4 | CMD oterm
--------------------------------------------------------------------------------
/oterm/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'oterm' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@avcodes/harbor",
3 | "version": "0.3.12",
4 | "description": "Effortlessly run LLM backends, APIs, frontends, and services with one command.",
5 | "private": false,
6 | "author": "av (https://av.codes)",
7 | "bin": {
8 | "harbor": "./harbor.sh"
9 | },
10 | "homepage": "https://github.com/av/harbor",
11 | "license": "Apache-2.0"
12 | }
--------------------------------------------------------------------------------
/parler/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "parler" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/parllama/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM pkgxdev/pkgx
2 |
3 | # Install required packages
4 | RUN pkgx install python@3.11 pipx openssl && \
5 | pipx install parllama
6 |
7 | RUN echo 'export LD_LIBRARY_PATH=$(find / -name "*.so" -exec dirname {} \; | sort -u | tr "\n" ":" | sed '\''s/:$//'\'')"${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"' >> ~/.bashrc
8 |
9 | CMD parllama
--------------------------------------------------------------------------------
/parllama/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "parllama" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/perplexica/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "perplexica" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/perplexica/source.config.toml:
--------------------------------------------------------------------------------
1 | # This config will be overridden by Perplexica on save
--------------------------------------------------------------------------------
/perplexideez/.gitignore:
--------------------------------------------------------------------------------
1 | pgdata/
--------------------------------------------------------------------------------
/pipelines/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "pipelines" service and CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/plandex/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine
2 |
3 | RUN apk update
4 | RUN apk add bash
5 | RUN apk add curl
6 | RUN wget https://plandex.ai/install.sh
7 | RUN bash install.sh
8 |
9 | ENTRYPOINT plandex
--------------------------------------------------------------------------------
/plandex/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "plandex" service and CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
2 | package = []
3 |
4 | [metadata]
5 | lock-version = "2.0"
6 | python-versions = "3.12.2"
7 | content-hash = "a78b5a5c0d8c3dc580319e2ce5499c7988539642781ea96b0d7bfd8ad523b8cd"
8 |
--------------------------------------------------------------------------------
/promptfoo/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/promptfoo/examples/bias/README.md:
--------------------------------------------------------------------------------
1 | To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected.
2 |
3 | Next, edit promptfooconfig.yaml.
4 |
5 | Then run:
6 | ```
7 | promptfoo eval
8 | ```
9 |
10 | Afterwards, you can view the results by running `promptfoo view`
11 |
--------------------------------------------------------------------------------
/promptfoo/examples/hello-promptfoo/README.md:
--------------------------------------------------------------------------------
1 | To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected.
2 |
3 | Next, edit promptfooconfig.yaml.
4 |
5 | Then run:
6 | ```
7 | promptfoo eval
8 | ```
9 |
10 | Afterwards, you can view the results by running `promptfoo view`
11 |
--------------------------------------------------------------------------------
/promptfoo/examples/misguided/README.md:
--------------------------------------------------------------------------------
1 | To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected.
2 |
3 | Next, edit promptfooconfig.yaml.
4 |
5 | Then run:
6 | ```
7 | promptfoo eval
8 | ```
9 |
10 | Afterwards, you can view the results by running `promptfoo view`
11 |
--------------------------------------------------------------------------------
/promptfoo/examples/temp-test/README.md:
--------------------------------------------------------------------------------
1 | To get started, set your OPENAI_API_KEY environment variable, or other required keys for the providers you selected.
2 |
3 | Next, edit promptfooconfig.yaml.
4 |
5 | Then run:
6 | ```
7 | promptfoo eval
8 | ```
9 |
10 | Afterwards, you can view the results by running `promptfoo view`
11 |
--------------------------------------------------------------------------------
/promptfoo/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the Promptfoo CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/qdrant/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "qdrant" service and CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/qrgen/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM denoland/deno
2 |
3 | WORKDIR /app
4 |
5 | COPY ./gen.ts /app/gen.ts
6 | RUN deno cache /app/gen.ts
7 |
8 | ENTRYPOINT [ "deno", "gen.ts" ]
--------------------------------------------------------------------------------
/qrgen/gen.ts:
--------------------------------------------------------------------------------
1 | import qrcode from 'npm:qrcode-terminal';
2 |
3 | // Get the URL from the command line arguments
4 | const url = Deno.args[0];
5 |
6 | if (!url) {
7 | console.log('Usage: node qrgen/gen.ts ');
8 | Deno.exit(1);
9 | }
10 |
11 | console.log('QR Code:');
12 | qrcode.generate(url);
--------------------------------------------------------------------------------
/raglite/.gitignore:
--------------------------------------------------------------------------------
1 | data/
2 | chainlit/
--------------------------------------------------------------------------------
/raglite/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11
2 |
3 | RUN pip install raglite[chainlit,pandoc,ragas]
4 | RUN pip install --upgrade litellm
5 |
6 | CMD raglite chainlit
--------------------------------------------------------------------------------
/raglite/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'raglite' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/repopack/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:lts-slim
2 |
3 | RUN npm install repopack -g
4 |
5 | ENTRYPOINT ["repopack"]
6 |
--------------------------------------------------------------------------------
/repopack/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "repopack" CLI.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/searxng/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "searxng" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/shared/README.md:
--------------------------------------------------------------------------------
1 | # Shared
2 |
3 | This folder can contain artifacts that are abstract and can be added to services as additional utilities.
--------------------------------------------------------------------------------
/shared/harbor_entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -eo pipefail
4 | /harbor/harbor.sh "$@"
--------------------------------------------------------------------------------
/speaches/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional
2 | # environment variable overrides that will
3 | # only be visible to the speaches service.
4 |
--------------------------------------------------------------------------------
/sqlchat/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'sqlchat' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/stt/override.env:
--------------------------------------------------------------------------------
1 | # This file can contain additional environment
2 | # variables for the whisper service.
3 | # Config reference:
4 | # https://github.com/fedirz/faster-whisper-server/blob/master/src/faster_whisper_server/config.py
5 | # You can also use the "harbor env" command to set these variables.
6 |
--------------------------------------------------------------------------------
/supergateway/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the 'supergateway' service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/tabbyapi/api_tokens.yml:
--------------------------------------------------------------------------------
1 | admin_key: ${HARBOR_TABBYAPI_ADMIN_KEY}
2 | api_key: ${HARBOR_TABBYAPI_API_KEY}
--------------------------------------------------------------------------------
/tabbyapi/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "tabbyapi" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/tabbyapi/start_tabbyapi.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Harbor: Custom Tabby API Entrypoint"
4 | python --version
5 |
6 | python /app/yaml_config_merger.py --pattern ".yml" --output "/config.yml" --directory "/app/configs"
7 | python /app/yaml_config_merger.py --pattern ".yml" --output "/api_tokens.yml" --directory "/app/tokens"
8 |
9 | echo "Merged Configs:"
10 | cat /config.yml
11 |
12 | echo "Merged Tokens:"
13 | cat /api_tokens.yml
14 |
15 | # Original entrypoint
16 | python3 /app/main.py $@
--------------------------------------------------------------------------------
/textgrad/Dockerfile:
--------------------------------------------------------------------------------
1 | # Base image for parler/airllm/jupyter services, reusing
2 | FROM pytorch/pytorch:2.3.0-cuda12.1-cudnn8-runtime
3 |
4 | WORKDIR /app
5 | RUN pip install textgrad jupyterlab
--------------------------------------------------------------------------------
/textgrad/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "textgrad" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/tgi/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "tgi" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/tools/config.yaml:
--------------------------------------------------------------------------------
1 | tools:
2 | time:
3 | command: uvx
4 | args:
5 | - mcp-server-time
6 | openapi: true
7 | cwd: ~/code/harbor
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/traefik/acme.json:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/av/harbor/fa070dfb5c4050449a00375526e3ae1987978210/traefik/acme.json
--------------------------------------------------------------------------------
/traefik/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "traefik" service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/tts/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the "tts" (openedai speech) service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------
/vllm/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG HARBOR_VLLM_VERSION=latest
2 | FROM vllm/vllm-openai:${HARBOR_VLLM_VERSION}
3 |
4 | # Install:
5 | # - bitsandbytes for additional quantization support
6 | RUN pip install bitsandbytes
7 |
--------------------------------------------------------------------------------
/vllm/override.env:
--------------------------------------------------------------------------------
1 | # You can specify additional override environment variables
2 | # for vLLM here.
3 | # Official env vars reference:
4 | # https://docs.vllm.ai/en/latest/serving/env_vars.html
5 | # VLLM_RPC_GET_DATA_TIMEOUT_MS=120000
6 | # You can also use the "harbor env" command to set these variables.
7 |
--------------------------------------------------------------------------------
/webtop/.gitignore:
--------------------------------------------------------------------------------
1 | data/
--------------------------------------------------------------------------------
/webtop/init/fix_desktop_app.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Update the desktop entry to use absolute path
4 | exec_patch="Exec=env PATH=$HOME/.local/bin:/config/.local/bin:$PATH harbor-app"
5 |
6 | echo "Updating Harbor.desktop"
7 |
8 | # Update original desktop entry
9 | sed -i "s|^Exec=harbor-app$|Exec=$exec_patch|" "/usr/share/applications/Harbor.desktop"
10 |
--------------------------------------------------------------------------------
/webtop/override.env:
--------------------------------------------------------------------------------
1 | # This file can be used for additional environment variables
2 | # specifically for the Webtop service.
3 | # You can also use the "harbor env" command to set these variables.
4 |
--------------------------------------------------------------------------------