├── .env.example ├── .gitattributes ├── .github └── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md ├── .gitignore ├── Caddyfile ├── LICENSE ├── Local_RAG_AI_Agent_n8n_Workflow.json ├── README.md ├── assets └── n8n-demo.gif ├── docker-compose.yml ├── flowise ├── Web Search + n8n Agent Chatflow.json ├── create_google_doc-CustomTool.json ├── get_postgres_tables-CustomTool.json ├── send_slack_message_through_n8n-CustomTool.json └── summarize_slack_conversation-CustomTool.json ├── n8n-tool-workflows ├── Create_Google_Doc.json ├── Get_Postgres_Tables.json ├── Post_Message_to_Slack.json └── Summarize_Slack_Conversation.json ├── n8n └── backup │ └── workflows │ ├── V1_Local_RAG_AI_Agent.json │ ├── V2_Local_Supabase_RAG_AI_Agent.json │ └── V3_Local_Agentic_RAG_AI_Agent.json ├── n8n_pipe.py ├── searxng └── settings-base.yml └── start_services.py /.env.example: -------------------------------------------------------------------------------- 1 | # Change the name of this file to .env after updating it! 2 | 3 | ############ 4 | # [required] 5 | # n8n credentials - you set this to whatever you want, just make it a long and secure string for both! 6 | ############ 7 | 8 | N8N_ENCRYPTION_KEY=super-secret-key 9 | N8N_USER_MANAGEMENT_JWT_SECRET=even-more-secret 10 | 11 | 12 | ############ 13 | # [required] 14 | # Supabase Secrets 15 | 16 | # YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION 17 | # Read these docs for any help: https://supabase.com/docs/guides/self-hosting/docker 18 | # For the JWT Secret and keys, see: https://supabase.com/docs/guides/self-hosting/docker#generate-api-keys 19 | # For the other secrets, see: https://supabase.com/docs/guides/self-hosting/docker#update-secrets 20 | # You can really decide any value for POOLER_TENANT_ID like 1000. 21 | ############ 22 | 23 | POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password 24 | JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long 25 | ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE 26 | SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q 27 | DASHBOARD_USERNAME=supabase 28 | DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated 29 | POOLER_TENANT_ID=your-tenant-id 30 | 31 | ############ 32 | # [required] 33 | # Langfuse credentials 34 | # Each of the secret keys you can set to whatever you want, just make it secure! 35 | # For the encryption key, use the command `openssl rand -hex 32` 36 | # openssl is available by defualt on Linux/Mac 37 | # For Windows, you can use the 'Git Bash' terminal installed with git 38 | ############ 39 | 40 | CLICKHOUSE_PASSWORD=super-secret-key-1 41 | MINIO_ROOT_PASSWORD=super-secret-key-2 42 | LANGFUSE_SALT=super-secret-key-3 43 | NEXTAUTH_SECRET=super-secret-key-4 44 | ENCRYPTION_KEY=generate-with-openssl # generate via `openssl rand -hex 32` 45 | 46 | ############ 47 | # [required for prod] 48 | # Caddy Config 49 | 50 | # By default listen on https://localhost:[service port] and don't use an email for SSL 51 | # To change this for production: 52 | # Uncomment all of these environment variables for the services you want exposed 53 | # Note that you might not want to expose Ollama or SearXNG since they aren't secured by default 54 | # Replace the placeholder value with the host for each service (like n8n.yourdomain.com) 55 | # Replace internal by your email (require to create a Let's Encrypt certificate) 56 | ############ 57 | 58 | # N8N_HOSTNAME=n8n.yourdomain.com 59 | # WEBUI_HOSTNAME=openwebui.yourdomain.com 60 | # FLOWISE_HOSTNAME=flowise.yourdomain.com 61 | # SUPABASE_HOSTNAME=supabase.yourdomain.com 62 | # LANGFUSE_HOSTNAME=langfuse.yourdomain.com 63 | # OLLAMA_HOSTNAME=ollama.yourdomain.com 64 | # SEARXNG_HOSTNAME=searxng.yourdomain.com 65 | # LETSENCRYPT_EMAIL=internal 66 | 67 | 68 | 69 | # Everything below this point is optional. 70 | # Default values will suffice unless you need more features/customization. 71 | 72 | # 73 | # 74 | ####### 75 | ##### 76 | # 77 | 78 | ############ 79 | # Optional SearXNG Config 80 | # If you run a very small or a very large instance, you might want to change the amount of used uwsgi workers and threads per worker 81 | # More workers (= processes) means that more search requests can be handled at the same time, but it also causes more resource usage 82 | ############ 83 | 84 | # SEARXNG_UWSGI_WORKERS=4 85 | # SEARXNG_UWSGI_THREADS=4 86 | 87 | ############ 88 | # Database - You can change these to any PostgreSQL database that has logical replication enabled. 89 | ############ 90 | 91 | POSTGRES_HOST=db 92 | POSTGRES_DB=postgres 93 | POSTGRES_PORT=5432 94 | # default user is postgres 95 | 96 | 97 | ############ 98 | # Supavisor -- Database pooler and others that can be left as default values 99 | ############ 100 | POOLER_PROXY_PORT_TRANSACTION=6543 101 | POOLER_DEFAULT_POOL_SIZE=20 102 | POOLER_MAX_CLIENT_CONN=100 103 | SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq 104 | VAULT_ENC_KEY=your-32-character-encryption-key 105 | 106 | 107 | ############ 108 | # API Proxy - Configuration for the Kong Reverse proxy. 109 | ############ 110 | 111 | KONG_HTTP_PORT=8000 112 | KONG_HTTPS_PORT=8443 113 | 114 | 115 | ############ 116 | # API - Configuration for PostgREST. 117 | ############ 118 | 119 | PGRST_DB_SCHEMAS=public,storage,graphql_public 120 | 121 | 122 | ############ 123 | # Auth - Configuration for the GoTrue authentication server. 124 | ############ 125 | 126 | ## General 127 | SITE_URL=http://localhost:3000 128 | ADDITIONAL_REDIRECT_URLS= 129 | JWT_EXPIRY=3600 130 | DISABLE_SIGNUP=false 131 | API_EXTERNAL_URL=http://localhost:8000 132 | 133 | ## Mailer Config 134 | MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify" 135 | MAILER_URLPATHS_INVITE="/auth/v1/verify" 136 | MAILER_URLPATHS_RECOVERY="/auth/v1/verify" 137 | MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify" 138 | 139 | ## Email auth 140 | ENABLE_EMAIL_SIGNUP=true 141 | ENABLE_EMAIL_AUTOCONFIRM=false 142 | SMTP_ADMIN_EMAIL=admin@example.com 143 | SMTP_HOST=supabase-mail 144 | SMTP_PORT=2500 145 | SMTP_USER=fake_mail_user 146 | SMTP_PASS=fake_mail_password 147 | SMTP_SENDER_NAME=fake_sender 148 | ENABLE_ANONYMOUS_USERS=false 149 | 150 | ## Phone auth 151 | ENABLE_PHONE_SIGNUP=true 152 | ENABLE_PHONE_AUTOCONFIRM=true 153 | 154 | 155 | ############ 156 | # Studio - Configuration for the Dashboard 157 | ############ 158 | 159 | STUDIO_DEFAULT_ORGANIZATION=Default Organization 160 | STUDIO_DEFAULT_PROJECT=Default Project 161 | 162 | STUDIO_PORT=3000 163 | # replace if you intend to use Studio outside of localhost 164 | SUPABASE_PUBLIC_URL=http://localhost:8000 165 | 166 | # Enable webp support 167 | IMGPROXY_ENABLE_WEBP_DETECTION=true 168 | 169 | # Add your OpenAI API key to enable SQL Editor Assistant 170 | OPENAI_API_KEY= 171 | 172 | 173 | ############ 174 | # Functions - Configuration for Functions 175 | ############ 176 | # NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet. 177 | FUNCTIONS_VERIFY_JWT=false 178 | 179 | 180 | ############ 181 | # Logs - Configuration for Logflare 182 | # Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction 183 | ############ 184 | 185 | LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key 186 | 187 | # Change vector.toml sinks to reflect this change 188 | LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key 189 | 190 | # Docker socket location - this value will differ depending on your OS 191 | DOCKER_SOCKET_LOCATION=/var/run/docker.sock 192 | 193 | # Google Cloud Project details 194 | GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID 195 | GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Create a report to help improve the Local AI Package 4 | title: '[BUG] ' 5 | labels: bug 6 | assignees: '' 7 | --- 8 | 9 | ## Description 10 | A clear and concise description of the issue. 11 | 12 | ## Steps to Reproduce 13 | 1. Go to '...' 14 | 2. Click on '....' 15 | 3. Scroll down to '....' 16 | 4. See error 17 | 18 | ## Expected Behavior 19 | A clear and concise description of what you expected to happen. 20 | 21 | ## Actual Behavior 22 | A clear and concise description of what actually happened. 23 | 24 | ## Screenshots 25 | If applicable, add screenshots to help explain your problem. 26 | 27 | ## Environment 28 | - OS: [e.g. Windows 10, macOS Monterey, Ubuntu 22.04] 29 | - Using Docker Desktop, WSL, etc. 30 | 31 | ## Additional Context 32 | Add any other context about the problem here, such as: 33 | - Does this happen consistently or intermittently? 34 | - Were there any recent changes that might be related? 35 | - Any workarounds you've discovered? 36 | 37 | ## Possible Solution 38 | If you have suggestions on how to fix the issue or what might be causing it. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Local AI Community 4 | url: https://thinktank.ottomator.ai/c/local-ai/18 5 | about: Please ask questions and start conversations about the Local AI Package here in the oTTomator Think Tank! -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest an idea for the Local AI Package 4 | title: '[FEATURE] ' 5 | labels: enhancement 6 | assignees: '' 7 | --- 8 | 9 | ## Describe the feature you'd like and why 10 | A clear and concise description of what you want to happen. 11 | 12 | ## User Impact 13 | Who would benefit from this feature and how? 14 | 15 | ## Implementation Details (optional) 16 | Any thoughts on how this might be implemented? 17 | 18 | ## Additional context 19 | Add any other screenshots, mockups, or context about the feature request here. -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | .env.test 3 | volumes/ 4 | shared/ 5 | supabase/ 6 | searxng/uwsgi.ini 7 | searxng/settings.yml -------------------------------------------------------------------------------- /Caddyfile: -------------------------------------------------------------------------------- 1 | { 2 | # Global options - works for both environments 3 | email {$LETSENCRYPT_EMAIL} 4 | } 5 | 6 | # N8N 7 | {$N8N_HOSTNAME} { 8 | # For domains, Caddy will automatically use Let's Encrypt 9 | # For localhost/port addresses, HTTPS won't be enabled 10 | reverse_proxy localhost:5678 11 | } 12 | 13 | # Open WebUI 14 | {$WEBUI_HOSTNAME} { 15 | reverse_proxy localhost:3000 16 | } 17 | 18 | # Flowise 19 | {$FLOWISE_HOSTNAME} { 20 | reverse_proxy localhost:3001 21 | } 22 | 23 | # Langfuse 24 | {$LANGFUSE_HOSTNAME} { 25 | reverse_proxy localhost:3002 26 | } 27 | 28 | # Ollama API 29 | {$OLLAMA_HOSTNAME} { 30 | reverse_proxy localhost:11434 31 | } 32 | 33 | # Supabase 34 | {$SUPABASE_HOSTNAME} { 35 | reverse_proxy localhost:8000 36 | } 37 | 38 | # SearXNG 39 | {$SEARXNG_HOSTNAME} { 40 | encode zstd gzip 41 | 42 | @api { 43 | path /config 44 | path /healthz 45 | path /stats/errors 46 | path /stats/checker 47 | } 48 | @search { 49 | path /search 50 | } 51 | @imageproxy { 52 | path /image_proxy 53 | } 54 | @static { 55 | path /static/* 56 | } 57 | 58 | header { 59 | # CSP (https://content-security-policy.com) 60 | Content-Security-Policy "upgrade-insecure-requests; default-src 'none'; script-src 'self'; style-src 'self' 'unsafe-inline'; form-action 'self' https://github.com/searxng/searxng/issues/new; font-src 'self'; frame-ancestors 'self'; base-uri 'self'; connect-src 'self' https://overpass-api.de; img-src * data:; frame-src https://www.youtube-nocookie.com https://player.vimeo.com https://www.dailymotion.com https://www.deezer.com https://www.mixcloud.com https://w.soundcloud.com https://embed.spotify.com;" 61 | # Disable some browser features 62 | Permissions-Policy "accelerometer=(),camera=(),geolocation=(),gyroscope=(),magnetometer=(),microphone=(),payment=(),usb=()" 63 | # Set referrer policy 64 | Referrer-Policy "no-referrer" 65 | # Force clients to use HTTPS 66 | Strict-Transport-Security "max-age=31536000" 67 | # Prevent MIME type sniffing from the declared Content-Type 68 | X-Content-Type-Options "nosniff" 69 | # X-Robots-Tag (comment to allow site indexing) 70 | X-Robots-Tag "noindex, noarchive, nofollow" 71 | # Remove "Server" header 72 | -Server 73 | } 74 | 75 | header @api { 76 | Access-Control-Allow-Methods "GET, OPTIONS" 77 | Access-Control-Allow-Origin "*" 78 | } 79 | 80 | route { 81 | # Cache policy 82 | header Cache-Control "max-age=0, no-store" 83 | header @search Cache-Control "max-age=5, private" 84 | header @imageproxy Cache-Control "max-age=604800, public" 85 | header @static Cache-Control "max-age=31536000, public, immutable" 86 | } 87 | 88 | # SearXNG (uWSGI) 89 | reverse_proxy localhost:8080 { 90 | header_up X-Forwarded-Port {http.request.port} 91 | header_up X-Real-IP {http.request.remote.host} 92 | # https://github.com/searx/searx-docker/issues/24 93 | header_up Connection "close" 94 | } 95 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2025-present Cole Medin and Contributors 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Local_RAG_AI_Agent_n8n_Workflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Local RAG AI Agent", 3 | "nodes": [ 4 | { 5 | "parameters": {}, 6 | "id": "99b30fd7-b36c-44ba-9daa-408585aaaee9", 7 | "name": "Postgres Chat Memory", 8 | "type": "@n8n/n8n-nodes-langchain.memoryPostgresChat", 9 | "typeVersion": 1.1, 10 | "position": [ 11 | 1040, 12 | 560 13 | ], 14 | "credentials": { 15 | "postgres": { 16 | "id": "iN7fO2CgatVwq73z", 17 | "name": "Postgres account" 18 | } 19 | } 20 | }, 21 | { 22 | "parameters": { 23 | "model": "llama3.1:latest", 24 | "options": {} 25 | }, 26 | "id": "c7632a7c-2661-492e-bd6f-aab994818998", 27 | "name": "Ollama Chat Model", 28 | "type": "@n8n/n8n-nodes-langchain.lmChatOllama", 29 | "typeVersion": 1, 30 | "position": [ 31 | 920, 32 | 560 33 | ], 34 | "credentials": { 35 | "ollamaApi": { 36 | "id": "eOwAotC7AUgJlvHM", 37 | "name": "Ollama account" 38 | } 39 | } 40 | }, 41 | { 42 | "parameters": { 43 | "model": "llama3.1:latest", 44 | "options": {} 45 | }, 46 | "id": "73d773a4-5c72-4af3-a52d-144f0e417823", 47 | "name": "Ollama Model", 48 | "type": "@n8n/n8n-nodes-langchain.lmOllama", 49 | "typeVersion": 1, 50 | "position": [ 51 | 1960, 52 | 500 53 | ], 54 | "credentials": { 55 | "ollamaApi": { 56 | "id": "eOwAotC7AUgJlvHM", 57 | "name": "Ollama account" 58 | } 59 | } 60 | }, 61 | { 62 | "parameters": { 63 | "name": "documents", 64 | "topK": 3 65 | }, 66 | "id": "3f882fa7-c8ed-4531-b236-a34c16c55838", 67 | "name": "Vector Store Tool", 68 | "type": "@n8n/n8n-nodes-langchain.toolVectorStore", 69 | "typeVersion": 1, 70 | "position": [ 71 | 1740, 72 | 340 73 | ] 74 | }, 75 | { 76 | "parameters": { 77 | "model": "nomic-embed-text:latest" 78 | }, 79 | "id": "3a8e3fa0-3997-4bce-985c-975fb5ad4013", 80 | "name": "Embeddings Ollama", 81 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 82 | "typeVersion": 1, 83 | "position": [ 84 | 1840, 85 | 600 86 | ], 87 | "credentials": { 88 | "ollamaApi": { 89 | "id": "eOwAotC7AUgJlvHM", 90 | "name": "Ollama account" 91 | } 92 | } 93 | }, 94 | { 95 | "parameters": { 96 | "pollTimes": { 97 | "item": [ 98 | { 99 | "mode": "everyMinute" 100 | } 101 | ] 102 | }, 103 | "triggerOn": "specificFolder", 104 | "folderToWatch": { 105 | "__rl": true, 106 | "value": "1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC", 107 | "mode": "list", 108 | "cachedResultName": "Meeting Notes", 109 | "cachedResultUrl": "https://drive.google.com/drive/folders/1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC" 110 | }, 111 | "event": "fileCreated", 112 | "options": {} 113 | }, 114 | "id": "41fb71dd-236a-48bc-9761-5841d52ca1b3", 115 | "name": "File Created", 116 | "type": "n8n-nodes-base.googleDriveTrigger", 117 | "typeVersion": 1, 118 | "position": [ 119 | 600, 120 | 880 121 | ], 122 | "credentials": { 123 | "googleDriveOAuth2Api": { 124 | "id": "vzcL2pD7uQzqDpdK", 125 | "name": "Google Drive account" 126 | } 127 | } 128 | }, 129 | { 130 | "parameters": { 131 | "pollTimes": { 132 | "item": [ 133 | { 134 | "mode": "everyMinute" 135 | } 136 | ] 137 | }, 138 | "triggerOn": "specificFolder", 139 | "folderToWatch": { 140 | "__rl": true, 141 | "value": "1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC", 142 | "mode": "list", 143 | "cachedResultName": "Meeting Notes", 144 | "cachedResultUrl": "https://drive.google.com/drive/folders/1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC" 145 | }, 146 | "event": "fileUpdated", 147 | "options": {} 148 | }, 149 | "id": "7b904686-89ae-4722-9ce5-a9da1b13b1a1", 150 | "name": "File Updated", 151 | "type": "n8n-nodes-base.googleDriveTrigger", 152 | "typeVersion": 1, 153 | "position": [ 154 | 600, 155 | 1100 156 | ], 157 | "credentials": { 158 | "googleDriveOAuth2Api": { 159 | "id": "vzcL2pD7uQzqDpdK", 160 | "name": "Google Drive account" 161 | } 162 | } 163 | }, 164 | { 165 | "parameters": { 166 | "assignments": { 167 | "assignments": [ 168 | { 169 | "id": "10646eae-ae46-4327-a4dc-9987c2d76173", 170 | "name": "file_id", 171 | "value": "={{ $json.id }}", 172 | "type": "string" 173 | }, 174 | { 175 | "id": "dd0aa081-79e7-4714-8a67-1e898285554c", 176 | "name": "folder_id", 177 | "value": "={{ $json.parents[0] }}", 178 | "type": "string" 179 | } 180 | ] 181 | }, 182 | "options": {} 183 | }, 184 | "id": "87f8bbb0-92c5-4b25-be63-7a9d91fc46f8", 185 | "name": "Set File ID", 186 | "type": "n8n-nodes-base.set", 187 | "typeVersion": 3.4, 188 | "position": [ 189 | 860, 190 | 880 191 | ] 192 | }, 193 | { 194 | "parameters": { 195 | "operation": "download", 196 | "fileId": { 197 | "__rl": true, 198 | "value": "={{ $('Set File ID').item.json.file_id }}", 199 | "mode": "id" 200 | }, 201 | "options": { 202 | "googleFileConversion": { 203 | "conversion": { 204 | "docsToFormat": "text/plain" 205 | } 206 | } 207 | } 208 | }, 209 | "id": "9f1e08fb-4ef3-4c4d-9473-5a7a1608b8e3", 210 | "name": "Download File", 211 | "type": "n8n-nodes-base.googleDrive", 212 | "typeVersion": 3, 213 | "position": [ 214 | 1300, 215 | 880 216 | ], 217 | "executeOnce": true, 218 | "credentials": { 219 | "googleDriveOAuth2Api": { 220 | "id": "vzcL2pD7uQzqDpdK", 221 | "name": "Google Drive account" 222 | } 223 | } 224 | }, 225 | { 226 | "parameters": { 227 | "operation": "text", 228 | "options": {} 229 | }, 230 | "id": "7efee822-68ad-4fe2-a616-ba19fd127684", 231 | "name": "Extract Document Text", 232 | "type": "n8n-nodes-base.extractFromFile", 233 | "typeVersion": 1, 234 | "position": [ 235 | 1540, 236 | 880 237 | ], 238 | "alwaysOutputData": true 239 | }, 240 | { 241 | "parameters": { 242 | "options": { 243 | "metadata": { 244 | "metadataValues": [ 245 | { 246 | "name": "file_id", 247 | "value": "={{ $('Set File ID').item.json.file_id }}" 248 | }, 249 | { 250 | "name": "folder_id", 251 | "value": "={{ $('Set File ID').item.json.folder_id }}" 252 | } 253 | ] 254 | } 255 | } 256 | }, 257 | "id": "da4c8b29-4944-43c4-9df3-e380366c594a", 258 | "name": "Default Data Loader", 259 | "type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader", 260 | "typeVersion": 1, 261 | "position": [ 262 | 1860, 263 | 1100 264 | ] 265 | }, 266 | { 267 | "parameters": { 268 | "chunkSize": 100, 269 | "options": {} 270 | }, 271 | "id": "d11c39b9-3fa7-4d5d-838f-da0d258c67c5", 272 | "name": "Recursive Character Text Splitter", 273 | "type": "@n8n/n8n-nodes-langchain.textSplitterRecursiveCharacterTextSplitter", 274 | "typeVersion": 1, 275 | "position": [ 276 | 1860, 277 | 1320 278 | ] 279 | }, 280 | { 281 | "parameters": { 282 | "model": "nomic-embed-text:latest" 283 | }, 284 | "id": "8a04559c-dfe8-479f-8998-a2e9bc994a0a", 285 | "name": "Embeddings Ollama1", 286 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 287 | "typeVersion": 1, 288 | "position": [ 289 | 1700, 290 | 1100 291 | ], 292 | "credentials": { 293 | "ollamaApi": { 294 | "id": "eOwAotC7AUgJlvHM", 295 | "name": "Ollama account" 296 | } 297 | } 298 | }, 299 | { 300 | "parameters": { 301 | "content": "## Local RAG AI Agent with Chat Interface", 302 | "height": 527.3027193303974, 303 | "width": 969.0343804425795 304 | }, 305 | "id": "a18773ae-1eb3-46b8-91cf-4184c66cf14f", 306 | "name": "Sticky Note2", 307 | "type": "n8n-nodes-base.stickyNote", 308 | "typeVersion": 1, 309 | "position": [ 310 | 560, 311 | 220 312 | ] 313 | }, 314 | { 315 | "parameters": { 316 | "content": "## Agent Tools for Local RAG", 317 | "height": 528.85546469693, 318 | "width": 583.4552380860637, 319 | "color": 4 320 | }, 321 | "id": "fa010a11-3dda-4bd5-b261-463a3a6b88d9", 322 | "name": "Sticky Note", 323 | "type": "n8n-nodes-base.stickyNote", 324 | "typeVersion": 1, 325 | "position": [ 326 | 1540, 327 | 220 328 | ] 329 | }, 330 | { 331 | "parameters": { 332 | "content": "## Workflow to Create Local Knowledgebase from Google Drive Folder", 333 | "height": 705.2695614889159, 334 | "width": 1568.9362829025763, 335 | "color": 5 336 | }, 337 | "id": "f29e6cc7-015e-47cb-a4fd-fecd6ffb0d24", 338 | "name": "Sticky Note1", 339 | "type": "n8n-nodes-base.stickyNote", 340 | "typeVersion": 1, 341 | "position": [ 342 | 560, 343 | 760 344 | ] 345 | }, 346 | { 347 | "parameters": { 348 | "options": {} 349 | }, 350 | "id": "5da52326-dfbd-4350-919c-843461f58913", 351 | "name": "When chat message received", 352 | "type": "@n8n/n8n-nodes-langchain.chatTrigger", 353 | "typeVersion": 1.1, 354 | "position": [ 355 | 620, 356 | 340 357 | ], 358 | "webhookId": "4b3b1838-d6b3-447e-9d79-d0931eddb9f8" 359 | }, 360 | { 361 | "parameters": { 362 | "qdrantCollection": { 363 | "__rl": true, 364 | "value": "documents", 365 | "mode": "list", 366 | "cachedResultName": "documents" 367 | }, 368 | "options": {} 369 | }, 370 | "id": "355370e0-2174-4e5b-830b-dd0f123b2e40", 371 | "name": "Qdrant Vector Store", 372 | "type": "@n8n/n8n-nodes-langchain.vectorStoreQdrant", 373 | "typeVersion": 1, 374 | "position": [ 375 | 1560, 376 | 480 377 | ], 378 | "credentials": { 379 | "qdrantApi": { 380 | "id": "VOnegFP8eijBkbNO", 381 | "name": "QdrantApi account" 382 | } 383 | } 384 | }, 385 | { 386 | "parameters": { 387 | "code": { 388 | "execute": { 389 | "code": "const { QdrantVectorStore } = require(\"@langchain/qdrant\");\nconst { OllamaEmbeddings } = require(\"@langchain/community/embeddings/ollama\");\n\nconst embeddings = new OllamaEmbeddings({\n model: \"nomic-embed-text\",\n baseUrl: \"http://ollama:11434\"\n});\n\nconst vectorStore = await QdrantVectorStore.fromExistingCollection(\n embeddings,\n {\n url: \"http://qdrant:6333\",\n collectionName: \"documents\",\n }\n);\n\nconst fileIdToDelete = this.getInputData()[0].json.file_id;\n\nconst filter = {\n must: [\n {\n key: \"metadata.file_id\",\n match: {\n value: fileIdToDelete,\n },\n },\n ],\n }\n\n// const results = await vectorStore.similaritySearch(\"this\", 10, filter);\n// const idsToDelete = results.map((doc) => doc.id);\n\n// NOT IMPLEMENTED!\n// await vectorStore.delete({ ids: idsToDelete });\n\nvectorStore.client.delete(\"documents\", {\n filter\n});\n\nreturn [ {json: { file_id: fileIdToDelete } } ];\n" 390 | } 391 | }, 392 | "inputs": { 393 | "input": [ 394 | { 395 | "type": "main", 396 | "required": true 397 | } 398 | ] 399 | }, 400 | "outputs": { 401 | "output": [ 402 | { 403 | "type": "main" 404 | } 405 | ] 406 | } 407 | }, 408 | "id": "b93bd001-0c4d-42fe-939a-eb441f354917", 409 | "name": "Clear Old Vectors", 410 | "type": "@n8n/n8n-nodes-langchain.code", 411 | "typeVersion": 1, 412 | "position": [ 413 | 1080, 414 | 880 415 | ], 416 | "alwaysOutputData": false 417 | }, 418 | { 419 | "parameters": { 420 | "mode": "insert", 421 | "qdrantCollection": { 422 | "__rl": true, 423 | "value": "documents", 424 | "mode": "list", 425 | "cachedResultName": "documents" 426 | }, 427 | "options": {} 428 | }, 429 | "id": "97ec4618-c0ea-445b-9406-5d41784d7836", 430 | "name": "Qdrant Vector Store Insert", 431 | "type": "@n8n/n8n-nodes-langchain.vectorStoreQdrant", 432 | "typeVersion": 1, 433 | "position": [ 434 | 1760, 435 | 880 436 | ], 437 | "credentials": { 438 | "qdrantApi": { 439 | "id": "VOnegFP8eijBkbNO", 440 | "name": "QdrantApi account" 441 | } 442 | } 443 | }, 444 | { 445 | "parameters": { 446 | "options": {} 447 | }, 448 | "id": "e537544a-37d5-4b00-b5ff-bc71f041f4bb", 449 | "name": "Respond to Webhook", 450 | "type": "n8n-nodes-base.respondToWebhook", 451 | "typeVersion": 1.1, 452 | "position": [ 453 | 1340, 454 | 340 455 | ] 456 | }, 457 | { 458 | "parameters": { 459 | "httpMethod": "POST", 460 | "path": "invoke_n8n_agent", 461 | "responseMode": "responseNode", 462 | "options": {} 463 | }, 464 | "id": "2b8cd01f-30a8-4aab-b0dd-56d2b658f059", 465 | "name": "Webhook", 466 | "type": "n8n-nodes-base.webhook", 467 | "typeVersion": 2, 468 | "position": [ 469 | 620, 470 | 520 471 | ], 472 | "webhookId": "4a839da9-b8a2-45f8-bcaf-c484f9a5912d" 473 | }, 474 | { 475 | "parameters": { 476 | "options": {} 477 | }, 478 | "id": "c9dfe906-178b-4375-8bda-f9290f35f222", 479 | "name": "AI Agent", 480 | "type": "@n8n/n8n-nodes-langchain.agent", 481 | "typeVersion": 1.6, 482 | "position": [ 483 | 1000, 484 | 340 485 | ] 486 | }, 487 | { 488 | "parameters": { 489 | "assignments": { 490 | "assignments": [ 491 | { 492 | "id": "75ebfdef-c8e2-4c3e-b716-1479d0cc2a73", 493 | "name": "chatInput", 494 | "value": "={{ $json?.chatInput || $json.body.chatInput }}", 495 | "type": "string" 496 | }, 497 | { 498 | "id": "59b7a20f-0626-4861-93e2-015d430c266e", 499 | "name": "sessionId", 500 | "value": "={{ $json?.sessionId || $json.body.sessionId}}", 501 | "type": "string" 502 | } 503 | ] 504 | }, 505 | "options": {} 506 | }, 507 | "id": "8f974a15-aa2f-4525-8278-ad58ad296076", 508 | "name": "Edit Fields", 509 | "type": "n8n-nodes-base.set", 510 | "typeVersion": 3.4, 511 | "position": [ 512 | 820, 513 | 340 514 | ] 515 | } 516 | ], 517 | "pinData": {}, 518 | "connections": { 519 | "Postgres Chat Memory": { 520 | "ai_memory": [ 521 | [ 522 | { 523 | "node": "AI Agent", 524 | "type": "ai_memory", 525 | "index": 0 526 | } 527 | ] 528 | ] 529 | }, 530 | "Ollama Chat Model": { 531 | "ai_languageModel": [ 532 | [ 533 | { 534 | "node": "AI Agent", 535 | "type": "ai_languageModel", 536 | "index": 0 537 | } 538 | ] 539 | ] 540 | }, 541 | "Ollama Model": { 542 | "ai_languageModel": [ 543 | [ 544 | { 545 | "node": "Vector Store Tool", 546 | "type": "ai_languageModel", 547 | "index": 0 548 | } 549 | ] 550 | ] 551 | }, 552 | "Embeddings Ollama": { 553 | "ai_embedding": [ 554 | [ 555 | { 556 | "node": "Qdrant Vector Store", 557 | "type": "ai_embedding", 558 | "index": 0 559 | } 560 | ] 561 | ] 562 | }, 563 | "File Created": { 564 | "main": [ 565 | [ 566 | { 567 | "node": "Set File ID", 568 | "type": "main", 569 | "index": 0 570 | } 571 | ] 572 | ] 573 | }, 574 | "File Updated": { 575 | "main": [ 576 | [ 577 | { 578 | "node": "Set File ID", 579 | "type": "main", 580 | "index": 0 581 | } 582 | ] 583 | ] 584 | }, 585 | "Set File ID": { 586 | "main": [ 587 | [ 588 | { 589 | "node": "Clear Old Vectors", 590 | "type": "main", 591 | "index": 0 592 | } 593 | ] 594 | ] 595 | }, 596 | "Download File": { 597 | "main": [ 598 | [ 599 | { 600 | "node": "Extract Document Text", 601 | "type": "main", 602 | "index": 0 603 | } 604 | ] 605 | ] 606 | }, 607 | "Extract Document Text": { 608 | "main": [ 609 | [ 610 | { 611 | "node": "Qdrant Vector Store Insert", 612 | "type": "main", 613 | "index": 0 614 | } 615 | ] 616 | ] 617 | }, 618 | "Default Data Loader": { 619 | "ai_document": [ 620 | [ 621 | { 622 | "node": "Qdrant Vector Store Insert", 623 | "type": "ai_document", 624 | "index": 0 625 | } 626 | ] 627 | ] 628 | }, 629 | "Recursive Character Text Splitter": { 630 | "ai_textSplitter": [ 631 | [ 632 | { 633 | "node": "Default Data Loader", 634 | "type": "ai_textSplitter", 635 | "index": 0 636 | } 637 | ] 638 | ] 639 | }, 640 | "Embeddings Ollama1": { 641 | "ai_embedding": [ 642 | [ 643 | { 644 | "node": "Qdrant Vector Store Insert", 645 | "type": "ai_embedding", 646 | "index": 0 647 | } 648 | ] 649 | ] 650 | }, 651 | "When chat message received": { 652 | "main": [ 653 | [ 654 | { 655 | "node": "Edit Fields", 656 | "type": "main", 657 | "index": 0 658 | } 659 | ] 660 | ] 661 | }, 662 | "Qdrant Vector Store": { 663 | "ai_vectorStore": [ 664 | [ 665 | { 666 | "node": "Vector Store Tool", 667 | "type": "ai_vectorStore", 668 | "index": 0 669 | } 670 | ] 671 | ] 672 | }, 673 | "Clear Old Vectors": { 674 | "main": [ 675 | [ 676 | { 677 | "node": "Download File", 678 | "type": "main", 679 | "index": 0 680 | } 681 | ] 682 | ] 683 | }, 684 | "Webhook": { 685 | "main": [ 686 | [ 687 | { 688 | "node": "Edit Fields", 689 | "type": "main", 690 | "index": 0 691 | } 692 | ] 693 | ] 694 | }, 695 | "AI Agent": { 696 | "main": [ 697 | [ 698 | { 699 | "node": "Respond to Webhook", 700 | "type": "main", 701 | "index": 0 702 | } 703 | ] 704 | ] 705 | }, 706 | "Edit Fields": { 707 | "main": [ 708 | [ 709 | { 710 | "node": "AI Agent", 711 | "type": "main", 712 | "index": 0 713 | } 714 | ] 715 | ] 716 | }, 717 | "Vector Store Tool": { 718 | "ai_tool": [ 719 | [ 720 | { 721 | "node": "AI Agent", 722 | "type": "ai_tool", 723 | "index": 0 724 | } 725 | ] 726 | ] 727 | } 728 | }, 729 | "active": true, 730 | "settings": { 731 | "executionOrder": "v1" 732 | }, 733 | "versionId": "19f9691c-4682-4704-81f2-33fdec9d0be2", 734 | "meta": { 735 | "templateCredsSetupCompleted": true, 736 | "instanceId": "f722e3e1e81e942a38faa434ad0aee8699371bbff9f883b9d5c59a7c726605af" 737 | }, 738 | "id": "vTN9y2dLXqTiDfPT", 739 | "tags": [] 740 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Self-hosted AI Package 2 | 3 | **Self-hosted AI Package** is an open, docker compose template that 4 | quickly bootstraps a fully featured Local AI and Low Code development 5 | environment including Ollama for your local LLMs, Open WebUI for an interface to chat with your N8N agents, and Supabase for your database, vector store, and authentication. 6 | 7 | This is Cole's version with a couple of improvements and the addition of Supabase, Open WebUI, Flowise, Langfuse, SearXNG, and Caddy! 8 | Also, the local RAG AI Agent workflows from the video will be automatically in your 9 | n8n instance if you use this setup instead of the base one provided by n8n! 10 | 11 | ## Important Links 12 | 13 | - [Local AI community](https://thinktank.ottomator.ai/c/local-ai/18) forum over in the oTTomator Think Tank 14 | 15 | - [GitHub Kanban board](https://github.com/users/coleam00/projects/2/views/1) for feature implementation and bug squashing. 16 | 17 | - [Original Local AI Starter Kit](https://github.com/n8n-io/self-hosted-ai-starter-kit) by the n8n team 18 | 19 | - Download my N8N + OpenWebUI integration [directly on the Open WebUI site.](https://openwebui.com/f/coleam/n8n_pipe/) (more instructions below) 20 | 21 | ![n8n.io - Screenshot](https://raw.githubusercontent.com/n8n-io/self-hosted-ai-starter-kit/main/assets/n8n-demo.gif) 22 | 23 | Curated by and , it combines the self-hosted n8n 24 | platform with a curated list of compatible AI products and components to 25 | quickly get started with building self-hosted AI workflows. 26 | 27 | ### What’s included 28 | 29 | ✅ [**Self-hosted n8n**](https://n8n.io/) - Low-code platform with over 400 30 | integrations and advanced AI components 31 | 32 | ✅ [**Supabase**](https://supabase.com/) - Open source database as a service - 33 | most widely used database for AI agents 34 | 35 | ✅ [**Ollama**](https://ollama.com/) - Cross-platform LLM platform to install 36 | and run the latest local LLMs 37 | 38 | ✅ [**Open WebUI**](https://openwebui.com/) - ChatGPT-like interface to 39 | privately interact with your local models and N8N agents 40 | 41 | ✅ [**Flowise**](https://flowiseai.com/) - No/low code AI agent 42 | builder that pairs very well with n8n 43 | 44 | ✅ [**Qdrant**](https://qdrant.tech/) - Open source, high performance vector 45 | store with an comprehensive API. Even though you can use Supabase for RAG, this was 46 | kept unlike Postgres since it's faster than Supabase so sometimes is the better option. 47 | 48 | ✅ [**SearXNG**](https://searxng.org/) - Open source, free internet metasearch engine which aggregates 49 | results from up to 229 search services. Users are neither tracked nor profiled, hence the fit with the local AI package. 50 | 51 | ✅ [**Caddy**](https://caddyserver.com/) - Managed HTTPS/TLS for custom domains 52 | 53 | ✅ [**Langfuse**](https://langfuse.com/) - Open source LLM engineering platform for agent observability 54 | 55 | ## Prerequisites 56 | 57 | Before you begin, make sure you have the following software installed: 58 | 59 | - [Python](https://www.python.org/downloads/) - Required to run the setup script 60 | - [Git/GitHub Desktop](https://desktop.github.com/) - For easy repository management 61 | - [Docker/Docker Desktop](https://www.docker.com/products/docker-desktop/) - Required to run all services 62 | 63 | ## Installation 64 | 65 | Clone the repository and navigate to the project directory: 66 | ```bash 67 | git clone https://github.com/coleam00/local-ai-packaged.git 68 | cd local-ai-packaged 69 | ``` 70 | 71 | Before running the services, you need to set up your environment variables for Supabase following their [self-hosting guide](https://supabase.com/docs/guides/self-hosting/docker#securing-your-services). 72 | 73 | 1. Make a copy of `.env.example` and rename it to `.env` in the root directory of the project 74 | 2. Set the following required environment variables: 75 | ```bash 76 | ############ 77 | # N8N Configuration 78 | ############ 79 | N8N_ENCRYPTION_KEY= 80 | N8N_USER_MANAGEMENT_JWT_SECRET= 81 | 82 | ############ 83 | # Supabase Secrets 84 | ############ 85 | POSTGRES_PASSWORD= 86 | JWT_SECRET= 87 | ANON_KEY= 88 | SERVICE_ROLE_KEY= 89 | DASHBOARD_USERNAME= 90 | DASHBOARD_PASSWORD= 91 | POOLER_TENANT_ID= 92 | 93 | ############ 94 | # Langfuse credentials 95 | ############ 96 | 97 | CLICKHOUSE_PASSWORD= 98 | MINIO_ROOT_PASSWORD= 99 | LANGFUSE_SALT= 100 | NEXTAUTH_SECRET= 101 | ENCRYPTION_KEY= 102 | ``` 103 | 104 | > [!IMPORTANT] 105 | > Make sure to generate secure random values for all secrets. Never use the example values in production. 106 | 107 | 3. Set the following environment variables if deploying to production, otherwise leave commented: 108 | ```bash 109 | ############ 110 | # Caddy Config 111 | ############ 112 | 113 | N8N_HOSTNAME=n8n.yourdomain.com 114 | WEBUI_HOSTNAME=:openwebui.yourdomain.com 115 | FLOWISE_HOSTNAME=:flowise.yourdomain.com 116 | SUPABASE_HOSTNAME=:supabase.yourdomain.com 117 | OLLAMA_HOSTNAME=:ollama.yourdomain.com 118 | SEARXNG_HOSTNAME=searxng.yourdomain.com 119 | LETSENCRYPT_EMAIL=your-email-address 120 | ``` 121 | 122 | --- 123 | 124 | The project includes a `start_services.py` script that handles starting both the Supabase and local AI services. The script accepts a `--profile` flag to specify which GPU configuration to use. 125 | 126 | ### For Nvidia GPU users 127 | 128 | ```bash 129 | python start_services.py --profile gpu-nvidia 130 | ``` 131 | 132 | > [!NOTE] 133 | > If you have not used your Nvidia GPU with Docker before, please follow the 134 | > [Ollama Docker instructions](https://github.com/ollama/ollama/blob/main/docs/docker.md). 135 | 136 | ### For AMD GPU users on Linux 137 | 138 | ```bash 139 | python start_services.py --profile gpu-amd 140 | ``` 141 | 142 | ### For Mac / Apple Silicon users 143 | 144 | If you're using a Mac with an M1 or newer processor, you can't expose your GPU to the Docker instance, unfortunately. There are two options in this case: 145 | 146 | 1. Run the starter kit fully on CPU: 147 | ```bash 148 | python start_services.py --profile cpu 149 | ``` 150 | 151 | 2. Run Ollama on your Mac for faster inference, and connect to that from the n8n instance: 152 | ```bash 153 | python start_services.py --profile none 154 | ``` 155 | 156 | If you want to run Ollama on your mac, check the [Ollama homepage](https://ollama.com/) for installation instructions. 157 | 158 | #### For Mac users running OLLAMA locally 159 | 160 | If you're running OLLAMA locally on your Mac (not in Docker), you need to modify the OLLAMA_HOST environment variable in the n8n service configuration. Update the x-n8n section in your Docker Compose file as follows: 161 | 162 | ```yaml 163 | x-n8n: &service-n8n 164 | # ... other configurations ... 165 | environment: 166 | # ... other environment variables ... 167 | - OLLAMA_HOST=host.docker.internal:11434 168 | ``` 169 | 170 | Additionally, after you see "Editor is now accessible via: http://localhost:5678/": 171 | 172 | 1. Head to http://localhost:5678/home/credentials 173 | 2. Click on "Local Ollama service" 174 | 3. Change the base URL to "http://host.docker.internal:11434/" 175 | 176 | ### For everyone else 177 | 178 | ```bash 179 | python start_services.py --profile cpu 180 | ``` 181 | 182 | ## Deploying to the Cloud 183 | 184 | ### Prerequisites for the below steps 185 | 186 | - Linux machine (preferably Unbuntu) with Nano, Git, and Docker installed 187 | 188 | ### Extra steps 189 | 190 | Before running the above commands to pull the repo and install everything: 191 | 192 | 1. Run the commands as root to open up the necessary ports: 193 | - ufw enable 194 | - ufw allow 8000 && ufw allow 3000 && ufw allow 5678 && ufw allow 3002 && ufw allow 80 && ufw allow 443 195 | - ufw allow 3001 (if you want to expose Flowise, you will have to set up the [environment variables](https://docs.flowiseai.com/configuration/environment-variables) to enable authentication) 196 | - ufw allow 8080 (if you want to expose SearXNG) 197 | - ufw allow 11434 (if you want to expose Ollama) 198 | - ufw reload 199 | 200 | 2. Set up A records for your DNS provider to point your subdomains you'll set up in the .env file for Caddy 201 | to the IP address of your cloud instance. 202 | 203 | For example, A record to point n8n to [cloud instance IP] for n8n.yourdomain.com 204 | 205 | ## ⚡️ Quick start and usage 206 | 207 | The main component of the self-hosted AI starter kit is a docker compose file 208 | pre-configured with network and disk so there isn’t much else you need to 209 | install. After completing the installation steps above, follow the steps below 210 | to get started. 211 | 212 | 1. Open in your browser to set up n8n. You’ll only 213 | have to do this once. You are NOT creating an account with n8n in the setup here, 214 | it is only a local account for your instance! 215 | 2. Open the included workflow: 216 | 217 | 3. Create credentials for every service: 218 | 219 | Ollama URL: http://ollama:11434 220 | 221 | Postgres (through Supabase): use DB, username, and password from .env. IMPORTANT: Host is 'db' 222 | Since that is the name of the service running Supabase 223 | 224 | Qdrant URL: http://qdrant:6333 (API key can be whatever since this is running locally) 225 | 226 | Google Drive: Follow [this guide from n8n](https://docs.n8n.io/integrations/builtin/credentials/google/). 227 | Don't use localhost for the redirect URI, just use another domain you have, it will still work! 228 | Alternatively, you can set up [local file triggers](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.localfiletrigger/). 229 | 4. Select **Test workflow** to start running the workflow. 230 | 5. If this is the first time you’re running the workflow, you may need to wait 231 | until Ollama finishes downloading Llama3.1. You can inspect the docker 232 | console logs to check on the progress. 233 | 6. Make sure to toggle the workflow as active and copy the "Production" webhook URL! 234 | 7. Open in your browser to set up Open WebUI. 235 | You’ll only have to do this once. You are NOT creating an account with Open WebUI in the 236 | setup here, it is only a local account for your instance! 237 | 8. Go to Workspace -> Functions -> Add Function -> Give name + description then paste in 238 | the code from `n8n_pipe.py` 239 | 240 | The function is also [published here on Open WebUI's site](https://openwebui.com/f/coleam/n8n_pipe/). 241 | 242 | 9. Click on the gear icon and set the n8n_url to the production URL for the webhook 243 | you copied in a previous step. 244 | 10. Toggle the function on and now it will be available in your model dropdown in the top left! 245 | 246 | To open n8n at any time, visit in your browser. 247 | To open Open WebUI at any time, visit . 248 | 249 | With your n8n instance, you’ll have access to over 400 integrations and a 250 | suite of basic and advanced AI nodes such as 251 | [AI Agent](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/), 252 | [Text classifier](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.text-classifier/), 253 | and [Information Extractor](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.information-extractor/) 254 | nodes. To keep everything local, just remember to use the Ollama node for your 255 | language model and Qdrant as your vector store. 256 | 257 | > [!NOTE] 258 | > This starter kit is designed to help you get started with self-hosted AI 259 | > workflows. While it’s not fully optimized for production environments, it 260 | > combines robust components that work well together for proof-of-concept 261 | > projects. You can customize it to meet your specific needs 262 | 263 | ## Upgrading 264 | 265 | To update all containers to their latest versions (n8n, Open WebUI, etc.), run these commands: 266 | 267 | ```bash 268 | # Stop all services 269 | docker compose -p localai --profile -f docker-compose.yml -f supabase/docker/docker-compose.yml down 270 | 271 | # Pull latest versions of all containers 272 | docker compose -p localai --profile -f docker-compose.yml -f supabase/docker/docker-compose.yml pull 273 | 274 | # Start services again with your desired profile 275 | python start_services.py --profile 276 | ``` 277 | 278 | Replace `` with one of: `cpu`, `gpu-nvidia`, `gpu-amd`, or `none`. 279 | 280 | Note: The `start_services.py` script itself does not update containers - it only restarts them or pulls them if you are downloading these containers for the first time. To get the latest versions, you must explicitly run the commands above. 281 | 282 | ## Troubleshooting 283 | 284 | Here are solutions to common issues you might encounter: 285 | 286 | ### Supabase Issues 287 | 288 | - **Supabase Pooler Restarting**: If the supabase-pooler container keeps restarting itself, follow the instructions in [this GitHub issue](https://github.com/supabase/supabase/issues/30210#issuecomment-2456955578). 289 | 290 | - **Supabase Analytics Startup Failure**: If the supabase-analytics container fails to start after changing your Postgres password, delete the folder `supabase/docker/volumes/db/data`. 291 | 292 | - **If using Docker Desktop**: Go into the Docker settings and make sure "Expose daemon on tcp://localhost:2375 without TLS" is turned on 293 | 294 | - **Supabase Service Unavailable** - Make sure you don't have an "@" character in your Postgres password! If the connection to the kong container is working (the container logs say it is receiving requests from n8n) but n8n says it cannot connect, this is generally the problem from what the community has shared. Other characters might not be allowed too, the @ symbol is just the one I know for sure! 295 | 296 | ### GPU Support Issues 297 | 298 | - **Windows GPU Support**: If you're having trouble running Ollama with GPU support on Windows with Docker Desktop: 299 | 1. Open Docker Desktop settings 300 | 2. Enable WSL 2 backend 301 | 3. See the [Docker GPU documentation](https://docs.docker.com/desktop/features/gpu/) for more details 302 | 303 | - **Linux GPU Support**: If you're having trouble running Ollama with GPU support on Linux, follow the [Ollama Docker instructions](https://github.com/ollama/ollama/blob/main/docs/docker.md). 304 | 305 | ## 👓 Recommended reading 306 | 307 | n8n is full of useful content for getting started quickly with its AI concepts 308 | and nodes. If you run into an issue, go to [support](#support). 309 | 310 | - [AI agents for developers: from theory to practice with n8n](https://blog.n8n.io/ai-agents/) 311 | - [Tutorial: Build an AI workflow in n8n](https://docs.n8n.io/advanced-ai/intro-tutorial/) 312 | - [Langchain Concepts in n8n](https://docs.n8n.io/advanced-ai/langchain/langchain-n8n/) 313 | - [Demonstration of key differences between agents and chains](https://docs.n8n.io/advanced-ai/examples/agent-chain-comparison/) 314 | - [What are vector databases?](https://docs.n8n.io/advanced-ai/examples/understand-vector-databases/) 315 | 316 | ## 🎥 Video walkthrough 317 | 318 | - [Cole's Guide to the Local AI Starter Kit](https://youtu.be/pOsO40HSbOo) 319 | 320 | ## 🛍️ More AI templates 321 | 322 | For more AI workflow ideas, visit the [**official n8n AI template 323 | gallery**](https://n8n.io/workflows/?categories=AI). From each workflow, 324 | select the **Use workflow** button to automatically import the workflow into 325 | your local n8n instance. 326 | 327 | ### Learn AI key concepts 328 | 329 | - [AI Agent Chat](https://n8n.io/workflows/1954-ai-agent-chat/) 330 | - [AI chat with any data source (using the n8n workflow too)](https://n8n.io/workflows/2026-ai-chat-with-any-data-source-using-the-n8n-workflow-tool/) 331 | - [Chat with OpenAI Assistant (by adding a memory)](https://n8n.io/workflows/2098-chat-with-openai-assistant-by-adding-a-memory/) 332 | - [Use an open-source LLM (via HuggingFace)](https://n8n.io/workflows/1980-use-an-open-source-llm-via-huggingface/) 333 | - [Chat with PDF docs using AI (quoting sources)](https://n8n.io/workflows/2165-chat-with-pdf-docs-using-ai-quoting-sources/) 334 | - [AI agent that can scrape webpages](https://n8n.io/workflows/2006-ai-agent-that-can-scrape-webpages/) 335 | 336 | ### Local AI templates 337 | 338 | - [Tax Code Assistant](https://n8n.io/workflows/2341-build-a-tax-code-assistant-with-qdrant-mistralai-and-openai/) 339 | - [Breakdown Documents into Study Notes with MistralAI and Qdrant](https://n8n.io/workflows/2339-breakdown-documents-into-study-notes-using-templating-mistralai-and-qdrant/) 340 | - [Financial Documents Assistant using Qdrant and](https://n8n.io/workflows/2335-build-a-financial-documents-assistant-using-qdrant-and-mistralai/) [ Mistral.ai](http://mistral.ai/) 341 | - [Recipe Recommendations with Qdrant and Mistral](https://n8n.io/workflows/2333-recipe-recommendations-with-qdrant-and-mistral/) 342 | 343 | ## Tips & tricks 344 | 345 | ### Accessing local files 346 | 347 | The self-hosted AI starter kit will create a shared folder (by default, 348 | located in the same directory) which is mounted to the n8n container and 349 | allows n8n to access files on disk. This folder within the n8n container is 350 | located at `/data/shared` -- this is the path you’ll need to use in nodes that 351 | interact with the local filesystem. 352 | 353 | **Nodes that interact with the local filesystem** 354 | 355 | - [Read/Write Files from Disk](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.filesreadwrite/) 356 | - [Local File Trigger](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.localfiletrigger/) 357 | - [Execute Command](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.executecommand/) 358 | 359 | ## 📜 License 360 | 361 | This project (originally created by the n8n team, link at the top of the README) is licensed under the Apache License 2.0 - see the 362 | [LICENSE](LICENSE) file for details. 363 | -------------------------------------------------------------------------------- /assets/n8n-demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/coleam00/local-ai-packaged/afc5dad48c8e662875d1c99107b207440509fa35/assets/n8n-demo.gif -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | volumes: 2 | n8n_storage: 3 | ollama_storage: 4 | qdrant_storage: 5 | open-webui: 6 | flowise: 7 | caddy-data: 8 | caddy-config: 9 | valkey-data: 10 | langfuse_postgres_data: 11 | langfuse_clickhouse_data: 12 | langfuse_clickhouse_logs: 13 | langfuse_minio_data: 14 | 15 | x-n8n: &service-n8n 16 | image: n8nio/n8n:latest 17 | environment: 18 | - DB_TYPE=postgresdb 19 | - DB_POSTGRESDB_HOST=db 20 | - DB_POSTGRESDB_USER=postgres 21 | - DB_POSTGRESDB_PASSWORD=${POSTGRES_PASSWORD} 22 | - DB_POSTGRESDB_DATABASE=postgres 23 | - N8N_DIAGNOSTICS_ENABLED=false 24 | - N8N_PERSONALIZATION_ENABLED=false 25 | - N8N_ENCRYPTION_KEY 26 | - N8N_USER_MANAGEMENT_JWT_SECRET 27 | - WEBHOOK_URL=${N8N_HOSTNAME:+https://}${N8N_HOSTNAME:-http://localhost:5678} 28 | 29 | x-ollama: &service-ollama 30 | image: ollama/ollama:latest 31 | container_name: ollama 32 | restart: unless-stopped 33 | ports: 34 | - 11434:11434 35 | environment: 36 | - OLLAMA_CONTEXT_LENGTH=8192 37 | volumes: 38 | - ollama_storage:/root/.ollama 39 | 40 | x-init-ollama: &init-ollama 41 | image: ollama/ollama:latest 42 | container_name: ollama-pull-llama 43 | volumes: 44 | - ollama_storage:/root/.ollama 45 | entrypoint: /bin/sh 46 | command: 47 | - "-c" 48 | - "sleep 3; OLLAMA_HOST=ollama:11434 ollama pull qwen2.5:7b-instruct-q4_K_M; OLLAMA_HOST=ollama:11434 ollama pull nomic-embed-text" 49 | # For a larger context length verison of the model, run these commands: 50 | # echo "FROM qwen2.5:7b-instruct-q4_K_M\n\nPARAMETER num_ctx 8096" > Modelfile 51 | # ollama create qwen2.5:7b-8k -f ./Modelfile 52 | # Change the name of the LLM and num_ctx as you see fit. 53 | 54 | services: 55 | flowise: 56 | image: flowiseai/flowise 57 | restart: unless-stopped 58 | container_name: flowise 59 | environment: 60 | - PORT=3001 61 | ports: 62 | - 3001:3001 63 | extra_hosts: 64 | - "host.docker.internal:host-gateway" 65 | volumes: 66 | - ~/.flowise:/root/.flowise 67 | entrypoint: /bin/sh -c "sleep 3; flowise start" 68 | 69 | open-webui: 70 | image: ghcr.io/open-webui/open-webui:main 71 | restart: unless-stopped 72 | container_name: open-webui 73 | ports: 74 | - "3000:8080" 75 | extra_hosts: 76 | - "host.docker.internal:host-gateway" 77 | volumes: 78 | - open-webui:/app/backend/data 79 | 80 | n8n-import: 81 | <<: *service-n8n 82 | container_name: n8n-import 83 | entrypoint: /bin/sh 84 | command: 85 | - "-c" 86 | - "n8n import:credentials --separate --input=/backup/credentials && n8n import:workflow --separate --input=/backup/workflows" 87 | volumes: 88 | - ./n8n/backup:/backup 89 | 90 | n8n: 91 | <<: *service-n8n 92 | container_name: n8n 93 | restart: unless-stopped 94 | ports: 95 | - 5678:5678 96 | volumes: 97 | - n8n_storage:/home/node/.n8n 98 | - ./n8n/backup:/backup 99 | - ./shared:/data/shared 100 | depends_on: 101 | n8n-import: 102 | condition: service_completed_successfully 103 | 104 | qdrant: 105 | image: qdrant/qdrant 106 | container_name: qdrant 107 | restart: unless-stopped 108 | ports: 109 | - 6333:6333 110 | volumes: 111 | - qdrant_storage:/qdrant/storage 112 | 113 | caddy: 114 | container_name: caddy 115 | image: docker.io/library/caddy:2-alpine 116 | network_mode: host 117 | restart: unless-stopped 118 | volumes: 119 | - ./Caddyfile:/etc/caddy/Caddyfile:ro 120 | - caddy-data:/data:rw 121 | - caddy-config:/config:rw 122 | environment: 123 | - N8N_HOSTNAME=${N8N_HOSTNAME:-":8001"} 124 | - WEBUI_HOSTNAME=${WEBUI_HOSTNAME:-":8002"} 125 | - FLOWISE_HOSTNAME=${FLOWISE_HOSTNAME:-":8003"} 126 | - OLLAMA_HOSTNAME=${OLLAMA_HOSTNAME:-":8004"} 127 | - SUPABASE_HOSTNAME=${SUPABASE_HOSTNAME:-":8005"} 128 | - SEARXNG_HOSTNAME=${SEARXNG_HOSTNAME:-":8006"} 129 | - LANGFUSE_HOSTNAME=${LANGFUSE_HOSTNAME:-":8007"} 130 | - LETSENCRYPT_EMAIL=${LETSENCRYPT_EMAIL:-internal} 131 | cap_drop: 132 | - ALL 133 | cap_add: 134 | - NET_BIND_SERVICE 135 | logging: 136 | driver: "json-file" 137 | options: 138 | max-size: "1m" 139 | max-file: "1" 140 | 141 | langfuse-worker: 142 | image: langfuse/langfuse-worker:3 143 | restart: always 144 | depends_on: &langfuse-depends-on 145 | postgres: 146 | condition: service_healthy 147 | minio: 148 | condition: service_healthy 149 | redis: 150 | condition: service_healthy 151 | clickhouse: 152 | condition: service_healthy 153 | ports: 154 | - 127.0.0.1:3030:3030 155 | environment: &langfuse-worker-env 156 | DATABASE_URL: postgresql://postgres:${POSTGRES_PASSWORD}@postgres:5432/postgres 157 | SALT: ${LANGFUSE_SALT} 158 | ENCRYPTION_KEY: ${ENCRYPTION_KEY} 159 | TELEMETRY_ENABLED: ${TELEMETRY_ENABLED:-true} 160 | LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES: ${LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES:-true} 161 | CLICKHOUSE_MIGRATION_URL: ${CLICKHOUSE_MIGRATION_URL:-clickhouse://clickhouse:9000} 162 | CLICKHOUSE_URL: ${CLICKHOUSE_URL:-http://clickhouse:8123} 163 | CLICKHOUSE_USER: ${CLICKHOUSE_USER:-clickhouse} 164 | CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} 165 | CLICKHOUSE_CLUSTER_ENABLED: ${CLICKHOUSE_CLUSTER_ENABLED:-false} 166 | LANGFUSE_S3_EVENT_UPLOAD_BUCKET: ${LANGFUSE_S3_EVENT_UPLOAD_BUCKET:-langfuse} 167 | LANGFUSE_S3_EVENT_UPLOAD_REGION: ${LANGFUSE_S3_EVENT_UPLOAD_REGION:-auto} 168 | LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID: ${LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID:-minio} 169 | LANGFUSE_S3_EVENT_UPLOAD_SECRET_ACCESS_KEY: ${MINIO_ROOT_PASSWORD} 170 | LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT: ${LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT:-http://minio:9000} 171 | LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE: ${LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE:-true} 172 | LANGFUSE_S3_EVENT_UPLOAD_PREFIX: ${LANGFUSE_S3_EVENT_UPLOAD_PREFIX:-events/} 173 | LANGFUSE_S3_MEDIA_UPLOAD_BUCKET: ${LANGFUSE_S3_MEDIA_UPLOAD_BUCKET:-langfuse} 174 | LANGFUSE_S3_MEDIA_UPLOAD_REGION: ${LANGFUSE_S3_MEDIA_UPLOAD_REGION:-auto} 175 | LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID: ${LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID:-minio} 176 | LANGFUSE_S3_MEDIA_UPLOAD_SECRET_ACCESS_KEY: ${MINIO_ROOT_PASSWORD} 177 | LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT: ${LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT:-http://localhost:9090} 178 | LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE: ${LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE:-true} 179 | LANGFUSE_S3_MEDIA_UPLOAD_PREFIX: ${LANGFUSE_S3_MEDIA_UPLOAD_PREFIX:-media/} 180 | LANGFUSE_S3_BATCH_EXPORT_ENABLED: ${LANGFUSE_S3_BATCH_EXPORT_ENABLED:-false} 181 | LANGFUSE_S3_BATCH_EXPORT_BUCKET: ${LANGFUSE_S3_BATCH_EXPORT_BUCKET:-langfuse} 182 | LANGFUSE_S3_BATCH_EXPORT_PREFIX: ${LANGFUSE_S3_BATCH_EXPORT_PREFIX:-exports/} 183 | LANGFUSE_S3_BATCH_EXPORT_REGION: ${LANGFUSE_S3_BATCH_EXPORT_REGION:-auto} 184 | LANGFUSE_S3_BATCH_EXPORT_ENDPOINT: ${LANGFUSE_S3_BATCH_EXPORT_ENDPOINT:-http://minio:9000} 185 | LANGFUSE_S3_BATCH_EXPORT_EXTERNAL_ENDPOINT: ${LANGFUSE_S3_BATCH_EXPORT_EXTERNAL_ENDPOINT:-http://localhost:9090} 186 | LANGFUSE_S3_BATCH_EXPORT_ACCESS_KEY_ID: ${LANGFUSE_S3_BATCH_EXPORT_ACCESS_KEY_ID:-minio} 187 | LANGFUSE_S3_BATCH_EXPORT_SECRET_ACCESS_KEY: ${MINIO_ROOT_PASSWORD} 188 | LANGFUSE_S3_BATCH_EXPORT_FORCE_PATH_STYLE: ${LANGFUSE_S3_BATCH_EXPORT_FORCE_PATH_STYLE:-true} 189 | LANGFUSE_INGESTION_QUEUE_DELAY_MS: ${LANGFUSE_INGESTION_QUEUE_DELAY_MS:-} 190 | LANGFUSE_INGESTION_CLICKHOUSE_WRITE_INTERVAL_MS: ${LANGFUSE_INGESTION_CLICKHOUSE_WRITE_INTERVAL_MS:-} 191 | REDIS_HOST: ${REDIS_HOST:-redis} 192 | REDIS_PORT: ${REDIS_PORT:-6379} 193 | REDIS_AUTH: ${REDIS_AUTH:-LOCALONLYREDIS} 194 | REDIS_TLS_ENABLED: ${REDIS_TLS_ENABLED:-false} 195 | REDIS_TLS_CA: ${REDIS_TLS_CA:-/certs/ca.crt} 196 | REDIS_TLS_CERT: ${REDIS_TLS_CERT:-/certs/redis.crt} 197 | REDIS_TLS_KEY: ${REDIS_TLS_KEY:-/certs/redis.key} 198 | 199 | langfuse-web: 200 | image: langfuse/langfuse:3 201 | restart: always 202 | depends_on: *langfuse-depends-on 203 | ports: 204 | - 3002:3000 205 | environment: 206 | <<: *langfuse-worker-env 207 | NEXTAUTH_URL: http://localhost:3002 208 | NEXTAUTH_SECRET: ${NEXTAUTH_SECRET} 209 | LANGFUSE_INIT_ORG_ID: ${LANGFUSE_INIT_ORG_ID:-} 210 | LANGFUSE_INIT_ORG_NAME: ${LANGFUSE_INIT_ORG_NAME:-} 211 | LANGFUSE_INIT_PROJECT_ID: ${LANGFUSE_INIT_PROJECT_ID:-} 212 | LANGFUSE_INIT_PROJECT_NAME: ${LANGFUSE_INIT_PROJECT_NAME:-} 213 | LANGFUSE_INIT_PROJECT_PUBLIC_KEY: ${LANGFUSE_INIT_PROJECT_PUBLIC_KEY:-} 214 | LANGFUSE_INIT_PROJECT_SECRET_KEY: ${LANGFUSE_INIT_PROJECT_SECRET_KEY:-} 215 | LANGFUSE_INIT_USER_EMAIL: ${LANGFUSE_INIT_USER_EMAIL:-} 216 | LANGFUSE_INIT_USER_NAME: ${LANGFUSE_INIT_USER_NAME:-} 217 | LANGFUSE_INIT_USER_PASSWORD: ${LANGFUSE_INIT_USER_PASSWORD:-} 218 | 219 | clickhouse: 220 | image: clickhouse/clickhouse-server 221 | restart: always 222 | user: "101:101" 223 | environment: 224 | CLICKHOUSE_DB: default 225 | CLICKHOUSE_USER: clickhouse 226 | CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD} 227 | volumes: 228 | - langfuse_clickhouse_data:/var/lib/clickhouse 229 | - langfuse_clickhouse_logs:/var/log/clickhouse-server 230 | ports: 231 | - 127.0.0.1:8123:8123 232 | - 127.0.0.1:9000:9000 233 | healthcheck: 234 | test: wget --no-verbose --tries=1 --spider http://localhost:8123/ping || exit 1 235 | interval: 5s 236 | timeout: 5s 237 | retries: 10 238 | start_period: 1s 239 | 240 | minio: 241 | image: minio/minio 242 | restart: always 243 | entrypoint: sh 244 | # create the 'langfuse' bucket before starting the service 245 | command: -c 'mkdir -p /data/langfuse && minio server --address ":9000" --console-address ":9001" /data' 246 | environment: 247 | MINIO_ROOT_USER: minio 248 | MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD} 249 | ports: 250 | - 9090:9000 251 | - 127.0.0.1:9091:9001 252 | volumes: 253 | - langfuse_minio_data:/data 254 | healthcheck: 255 | test: ["CMD", "mc", "ready", "local"] 256 | interval: 1s 257 | timeout: 5s 258 | retries: 5 259 | start_period: 1s 260 | 261 | postgres: 262 | image: postgres:${POSTGRES_VERSION:-latest} 263 | restart: unless-stopped 264 | healthcheck: 265 | test: ["CMD-SHELL", "pg_isready -U postgres"] 266 | interval: 3s 267 | timeout: 3s 268 | retries: 10 269 | environment: 270 | POSTGRES_USER: postgres 271 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} 272 | POSTGRES_DB: postgres 273 | ports: 274 | - 127.0.0.1:5433:5432 275 | volumes: 276 | - langfuse_postgres_data:/var/lib/postgresql/data 277 | 278 | redis: 279 | container_name: redis 280 | image: docker.io/valkey/valkey:8-alpine 281 | command: valkey-server --save 30 1 --loglevel warning 282 | restart: unless-stopped 283 | volumes: 284 | - valkey-data:/data 285 | cap_drop: 286 | - ALL 287 | cap_add: 288 | - SETGID 289 | - SETUID 290 | - DAC_OVERRIDE 291 | logging: 292 | driver: "json-file" 293 | options: 294 | max-size: "1m" 295 | max-file: "1" 296 | healthcheck: 297 | test: ["CMD", "redis-cli", "ping"] 298 | interval: 3s 299 | timeout: 10s 300 | retries: 10 301 | 302 | searxng: 303 | container_name: searxng 304 | image: docker.io/searxng/searxng:latest 305 | restart: unless-stopped 306 | ports: 307 | - 8080:8080 308 | volumes: 309 | - ./searxng:/etc/searxng:rw 310 | environment: 311 | - SEARXNG_BASE_URL=https://${SEARXNG_HOSTNAME:-localhost}/ 312 | - UWSGI_WORKERS=${SEARXNG_UWSGI_WORKERS:-4} 313 | - UWSGI_THREADS=${SEARXNG_UWSGI_THREADS:-4} 314 | cap_drop: 315 | - ALL 316 | cap_add: 317 | - CHOWN 318 | - SETGID 319 | - SETUID 320 | logging: 321 | driver: "json-file" 322 | options: 323 | max-size: "1m" 324 | max-file: "1" 325 | 326 | ollama-cpu: 327 | profiles: ["cpu"] 328 | <<: *service-ollama 329 | 330 | ollama-gpu: 331 | profiles: ["gpu-nvidia"] 332 | <<: *service-ollama 333 | deploy: 334 | resources: 335 | reservations: 336 | devices: 337 | - driver: nvidia 338 | count: 1 339 | capabilities: [gpu] 340 | 341 | ollama-gpu-amd: 342 | profiles: ["gpu-amd"] 343 | <<: *service-ollama 344 | image: ollama/ollama:rocm 345 | devices: 346 | - "/dev/kfd" 347 | - "/dev/dri" 348 | 349 | ollama-pull-llama-cpu: 350 | profiles: ["cpu"] 351 | <<: *init-ollama 352 | depends_on: 353 | - ollama-cpu 354 | 355 | ollama-pull-llama-gpu: 356 | profiles: ["gpu-nvidia"] 357 | <<: *init-ollama 358 | depends_on: 359 | - ollama-gpu 360 | 361 | ollama-pull-llama-gpu-amd: 362 | profiles: [gpu-amd] 363 | <<: *init-ollama 364 | image: ollama/ollama:rocm 365 | depends_on: 366 | - ollama-gpu-amd 367 | -------------------------------------------------------------------------------- /flowise/Web Search + n8n Agent Chatflow.json: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": [ 3 | { 4 | "id": "toolAgent_0", 5 | "position": { 6 | "x": 656, 7 | "y": 156 8 | }, 9 | "type": "customNode", 10 | "data": { 11 | "id": "toolAgent_0", 12 | "label": "Tool Agent", 13 | "version": 2, 14 | "name": "toolAgent", 15 | "type": "AgentExecutor", 16 | "baseClasses": [ 17 | "AgentExecutor", 18 | "BaseChain", 19 | "Runnable" 20 | ], 21 | "category": "Agents", 22 | "description": "Agent that uses Function Calling to pick the tools and args to call", 23 | "inputParams": [ 24 | { 25 | "label": "System Message", 26 | "name": "systemMessage", 27 | "type": "string", 28 | "default": "You are a helpful AI assistant.", 29 | "description": "If Chat Prompt Template is provided, this will be ignored", 30 | "rows": 4, 31 | "optional": true, 32 | "additionalParams": true, 33 | "id": "toolAgent_0-input-systemMessage-string" 34 | }, 35 | { 36 | "label": "Max Iterations", 37 | "name": "maxIterations", 38 | "type": "number", 39 | "optional": true, 40 | "additionalParams": true, 41 | "id": "toolAgent_0-input-maxIterations-number" 42 | } 43 | ], 44 | "inputAnchors": [ 45 | { 46 | "label": "Tools", 47 | "name": "tools", 48 | "type": "Tool", 49 | "list": true, 50 | "id": "toolAgent_0-input-tools-Tool" 51 | }, 52 | { 53 | "label": "Memory", 54 | "name": "memory", 55 | "type": "BaseChatMemory", 56 | "id": "toolAgent_0-input-memory-BaseChatMemory" 57 | }, 58 | { 59 | "label": "Tool Calling Chat Model", 60 | "name": "model", 61 | "type": "BaseChatModel", 62 | "description": "Only compatible with models that are capable of function calling: ChatOpenAI, ChatMistral, ChatAnthropic, ChatGoogleGenerativeAI, ChatVertexAI, GroqChat", 63 | "id": "toolAgent_0-input-model-BaseChatModel" 64 | }, 65 | { 66 | "label": "Chat Prompt Template", 67 | "name": "chatPromptTemplate", 68 | "type": "ChatPromptTemplate", 69 | "description": "Override existing prompt with Chat Prompt Template. Human Message must includes {input} variable", 70 | "optional": true, 71 | "id": "toolAgent_0-input-chatPromptTemplate-ChatPromptTemplate" 72 | }, 73 | { 74 | "label": "Input Moderation", 75 | "description": "Detect text that could generate harmful output and prevent it from being sent to the language model", 76 | "name": "inputModeration", 77 | "type": "Moderation", 78 | "optional": true, 79 | "list": true, 80 | "id": "toolAgent_0-input-inputModeration-Moderation" 81 | } 82 | ], 83 | "inputs": { 84 | "tools": [ 85 | "{{braveSearchAPI_0.data.instance}}", 86 | "{{customTool_0.data.instance}}", 87 | "{{customTool_1.data.instance}}", 88 | "{{customTool_2.data.instance}}", 89 | "{{customTool_3.data.instance}}" 90 | ], 91 | "memory": "{{bufferMemory_0.data.instance}}", 92 | "model": "{{chatOllama_0.data.instance}}", 93 | "chatPromptTemplate": "", 94 | "systemMessage": "You are a helpful AI assistant.", 95 | "inputModeration": "", 96 | "maxIterations": "" 97 | }, 98 | "outputAnchors": [ 99 | { 100 | "id": "toolAgent_0-output-toolAgent-AgentExecutor|BaseChain|Runnable", 101 | "name": "toolAgent", 102 | "label": "AgentExecutor", 103 | "description": "Agent that uses Function Calling to pick the tools and args to call", 104 | "type": "AgentExecutor | BaseChain | Runnable" 105 | } 106 | ], 107 | "outputs": {}, 108 | "selected": false 109 | }, 110 | "width": 300, 111 | "height": 484, 112 | "selected": false, 113 | "positionAbsolute": { 114 | "x": 656, 115 | "y": 156 116 | } 117 | }, 118 | { 119 | "id": "bufferMemory_0", 120 | "position": { 121 | "x": 232.6843615160352, 122 | "y": -162.57529096209916 123 | }, 124 | "type": "customNode", 125 | "data": { 126 | "id": "bufferMemory_0", 127 | "label": "Buffer Memory", 128 | "version": 2, 129 | "name": "bufferMemory", 130 | "type": "BufferMemory", 131 | "baseClasses": [ 132 | "BufferMemory", 133 | "BaseChatMemory", 134 | "BaseMemory" 135 | ], 136 | "category": "Memory", 137 | "description": "Retrieve chat messages stored in database", 138 | "inputParams": [ 139 | { 140 | "label": "Session Id", 141 | "name": "sessionId", 142 | "type": "string", 143 | "description": "If not specified, a random id will be used. Learn more", 144 | "default": "", 145 | "additionalParams": true, 146 | "optional": true, 147 | "id": "bufferMemory_0-input-sessionId-string" 148 | }, 149 | { 150 | "label": "Memory Key", 151 | "name": "memoryKey", 152 | "type": "string", 153 | "default": "chat_history", 154 | "additionalParams": true, 155 | "id": "bufferMemory_0-input-memoryKey-string" 156 | } 157 | ], 158 | "inputAnchors": [], 159 | "inputs": { 160 | "sessionId": "", 161 | "memoryKey": "chat_history" 162 | }, 163 | "outputAnchors": [ 164 | { 165 | "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", 166 | "name": "bufferMemory", 167 | "label": "BufferMemory", 168 | "description": "Retrieve chat messages stored in database", 169 | "type": "BufferMemory | BaseChatMemory | BaseMemory" 170 | } 171 | ], 172 | "outputs": {}, 173 | "selected": false 174 | }, 175 | "width": 300, 176 | "height": 251, 177 | "selected": false, 178 | "positionAbsolute": { 179 | "x": 232.6843615160352, 180 | "y": -162.57529096209916 181 | }, 182 | "dragging": false 183 | }, 184 | { 185 | "id": "chatOllama_0", 186 | "position": { 187 | "x": 203.62742857142882, 188 | "y": 133.58191020408157 189 | }, 190 | "type": "customNode", 191 | "data": { 192 | "id": "chatOllama_0", 193 | "label": "ChatOllama", 194 | "version": 5, 195 | "name": "chatOllama", 196 | "type": "ChatOllama", 197 | "baseClasses": [ 198 | "ChatOllama", 199 | "ChatOllama", 200 | "BaseChatModel", 201 | "BaseLanguageModel", 202 | "Runnable" 203 | ], 204 | "category": "Chat Models", 205 | "description": "Chat completion using open-source LLM on Ollama", 206 | "inputParams": [ 207 | { 208 | "label": "Base URL", 209 | "name": "baseUrl", 210 | "type": "string", 211 | "default": "http://localhost:11434", 212 | "id": "chatOllama_0-input-baseUrl-string" 213 | }, 214 | { 215 | "label": "Model Name", 216 | "name": "modelName", 217 | "type": "string", 218 | "placeholder": "llama2", 219 | "id": "chatOllama_0-input-modelName-string" 220 | }, 221 | { 222 | "label": "Temperature", 223 | "name": "temperature", 224 | "type": "number", 225 | "description": "The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8). Refer to docs for more details", 226 | "step": 0.1, 227 | "default": 0.9, 228 | "optional": true, 229 | "id": "chatOllama_0-input-temperature-number" 230 | }, 231 | { 232 | "label": "Allow Image Uploads", 233 | "name": "allowImageUploads", 234 | "type": "boolean", 235 | "description": "Allow image input. Refer to the docs for more details.", 236 | "default": false, 237 | "optional": true, 238 | "id": "chatOllama_0-input-allowImageUploads-boolean" 239 | }, 240 | { 241 | "label": "Streaming", 242 | "name": "streaming", 243 | "type": "boolean", 244 | "default": true, 245 | "optional": true, 246 | "additionalParams": true, 247 | "id": "chatOllama_0-input-streaming-boolean" 248 | }, 249 | { 250 | "label": "JSON Mode", 251 | "name": "jsonMode", 252 | "type": "boolean", 253 | "description": "Coerces model outputs to only return JSON. Specify in the system prompt to return JSON. Ex: Format all responses as JSON object", 254 | "optional": true, 255 | "additionalParams": true, 256 | "id": "chatOllama_0-input-jsonMode-boolean" 257 | }, 258 | { 259 | "label": "Keep Alive", 260 | "name": "keepAlive", 261 | "type": "string", 262 | "description": "How long to keep connection alive. A duration string (such as \"10m\" or \"24h\")", 263 | "default": "5m", 264 | "optional": true, 265 | "additionalParams": true, 266 | "id": "chatOllama_0-input-keepAlive-string" 267 | }, 268 | { 269 | "label": "Top P", 270 | "name": "topP", 271 | "type": "number", 272 | "description": "Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to docs for more details", 273 | "step": 0.1, 274 | "optional": true, 275 | "additionalParams": true, 276 | "id": "chatOllama_0-input-topP-number" 277 | }, 278 | { 279 | "label": "Top K", 280 | "name": "topK", 281 | "type": "number", 282 | "description": "Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to docs for more details", 283 | "step": 1, 284 | "optional": true, 285 | "additionalParams": true, 286 | "id": "chatOllama_0-input-topK-number" 287 | }, 288 | { 289 | "label": "Mirostat", 290 | "name": "mirostat", 291 | "type": "number", 292 | "description": "Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to docs for more details", 293 | "step": 1, 294 | "optional": true, 295 | "additionalParams": true, 296 | "id": "chatOllama_0-input-mirostat-number" 297 | }, 298 | { 299 | "label": "Mirostat ETA", 300 | "name": "mirostatEta", 301 | "type": "number", 302 | "description": "Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to docs for more details", 303 | "step": 0.1, 304 | "optional": true, 305 | "additionalParams": true, 306 | "id": "chatOllama_0-input-mirostatEta-number" 307 | }, 308 | { 309 | "label": "Mirostat TAU", 310 | "name": "mirostatTau", 311 | "type": "number", 312 | "description": "Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to docs for more details", 313 | "step": 0.1, 314 | "optional": true, 315 | "additionalParams": true, 316 | "id": "chatOllama_0-input-mirostatTau-number" 317 | }, 318 | { 319 | "label": "Context Window Size", 320 | "name": "numCtx", 321 | "type": "number", 322 | "description": "Sets the size of the context window used to generate the next token. (Default: 2048) Refer to docs for more details", 323 | "step": 1, 324 | "optional": true, 325 | "additionalParams": true, 326 | "id": "chatOllama_0-input-numCtx-number" 327 | }, 328 | { 329 | "label": "Number of GPU", 330 | "name": "numGpu", 331 | "type": "number", 332 | "description": "The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to docs for more details", 333 | "step": 1, 334 | "optional": true, 335 | "additionalParams": true, 336 | "id": "chatOllama_0-input-numGpu-number" 337 | }, 338 | { 339 | "label": "Number of Thread", 340 | "name": "numThread", 341 | "type": "number", 342 | "description": "Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details", 343 | "step": 1, 344 | "optional": true, 345 | "additionalParams": true, 346 | "id": "chatOllama_0-input-numThread-number" 347 | }, 348 | { 349 | "label": "Repeat Last N", 350 | "name": "repeatLastN", 351 | "type": "number", 352 | "description": "Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to docs for more details", 353 | "step": 1, 354 | "optional": true, 355 | "additionalParams": true, 356 | "id": "chatOllama_0-input-repeatLastN-number" 357 | }, 358 | { 359 | "label": "Repeat Penalty", 360 | "name": "repeatPenalty", 361 | "type": "number", 362 | "description": "Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to docs for more details", 363 | "step": 0.1, 364 | "optional": true, 365 | "additionalParams": true, 366 | "id": "chatOllama_0-input-repeatPenalty-number" 367 | }, 368 | { 369 | "label": "Stop Sequence", 370 | "name": "stop", 371 | "type": "string", 372 | "rows": 4, 373 | "placeholder": "AI assistant:", 374 | "description": "Sets the stop sequences to use. Use comma to seperate different sequences. Refer to docs for more details", 375 | "optional": true, 376 | "additionalParams": true, 377 | "id": "chatOllama_0-input-stop-string" 378 | }, 379 | { 380 | "label": "Tail Free Sampling", 381 | "name": "tfsZ", 382 | "type": "number", 383 | "description": "Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to docs for more details", 384 | "step": 0.1, 385 | "optional": true, 386 | "additionalParams": true, 387 | "id": "chatOllama_0-input-tfsZ-number" 388 | } 389 | ], 390 | "inputAnchors": [ 391 | { 392 | "label": "Cache", 393 | "name": "cache", 394 | "type": "BaseCache", 395 | "optional": true, 396 | "id": "chatOllama_0-input-cache-BaseCache" 397 | } 398 | ], 399 | "inputs": { 400 | "cache": "{{inMemoryCache_0.data.instance}}", 401 | "baseUrl": "http://host.docker.internal:11434", 402 | "modelName": "qwen2.5-coder:32b", 403 | "temperature": "0.5", 404 | "allowImageUploads": "", 405 | "streaming": true, 406 | "jsonMode": "", 407 | "keepAlive": "5m", 408 | "topP": "", 409 | "topK": "", 410 | "mirostat": "", 411 | "mirostatEta": "", 412 | "mirostatTau": "", 413 | "numCtx": "32768", 414 | "numGpu": "", 415 | "numThread": "", 416 | "repeatLastN": "", 417 | "repeatPenalty": "", 418 | "stop": "", 419 | "tfsZ": "" 420 | }, 421 | "outputAnchors": [ 422 | { 423 | "id": "chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable", 424 | "name": "chatOllama", 425 | "label": "ChatOllama", 426 | "description": "Chat completion using open-source LLM on Ollama", 427 | "type": "ChatOllama | ChatOllama | BaseChatModel | BaseLanguageModel | Runnable" 428 | } 429 | ], 430 | "outputs": {}, 431 | "selected": false 432 | }, 433 | "width": 300, 434 | "height": 675, 435 | "selected": false, 436 | "positionAbsolute": { 437 | "x": 203.62742857142882, 438 | "y": 133.58191020408157 439 | }, 440 | "dragging": false 441 | }, 442 | { 443 | "id": "inMemoryCache_0", 444 | "position": { 445 | "x": -222.16839650145752, 446 | "y": 87.76136209912531 447 | }, 448 | "type": "customNode", 449 | "data": { 450 | "id": "inMemoryCache_0", 451 | "label": "InMemory Cache", 452 | "version": 1, 453 | "name": "inMemoryCache", 454 | "type": "InMemoryCache", 455 | "baseClasses": [ 456 | "InMemoryCache", 457 | "BaseCache" 458 | ], 459 | "category": "Cache", 460 | "description": "Cache LLM response in memory, will be cleared once app restarted", 461 | "inputParams": [], 462 | "inputAnchors": [], 463 | "inputs": {}, 464 | "outputAnchors": [ 465 | { 466 | "id": "inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache", 467 | "name": "inMemoryCache", 468 | "label": "InMemoryCache", 469 | "description": "Cache LLM response in memory, will be cleared once app restarted", 470 | "type": "InMemoryCache | BaseCache" 471 | } 472 | ], 473 | "outputs": {}, 474 | "selected": false 475 | }, 476 | "width": 300, 477 | "height": 143, 478 | "selected": false, 479 | "positionAbsolute": { 480 | "x": -222.16839650145752, 481 | "y": 87.76136209912531 482 | }, 483 | "dragging": false 484 | }, 485 | { 486 | "id": "braveSearchAPI_0", 487 | "position": { 488 | "x": 557.8984956268223, 489 | "y": -202.80796734693882 490 | }, 491 | "type": "customNode", 492 | "data": { 493 | "id": "braveSearchAPI_0", 494 | "label": "BraveSearch API", 495 | "version": 1, 496 | "name": "braveSearchAPI", 497 | "type": "BraveSearchAPI", 498 | "baseClasses": [ 499 | "BraveSearchAPI", 500 | "Tool", 501 | "StructuredTool", 502 | "Runnable" 503 | ], 504 | "category": "Tools", 505 | "description": "Wrapper around BraveSearch API - a real-time API to access Brave search results", 506 | "inputParams": [ 507 | { 508 | "label": "Connect Credential", 509 | "name": "credential", 510 | "type": "credential", 511 | "credentialNames": [ 512 | "braveSearchApi" 513 | ], 514 | "id": "braveSearchAPI_0-input-credential-credential" 515 | } 516 | ], 517 | "inputAnchors": [], 518 | "inputs": {}, 519 | "outputAnchors": [ 520 | { 521 | "id": "braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable", 522 | "name": "braveSearchAPI", 523 | "label": "BraveSearchAPI", 524 | "description": "Wrapper around BraveSearch API - a real-time API to access Brave search results", 525 | "type": "BraveSearchAPI | Tool | StructuredTool | Runnable" 526 | } 527 | ], 528 | "outputs": {}, 529 | "selected": false 530 | }, 531 | "width": 300, 532 | "height": 275, 533 | "selected": false, 534 | "positionAbsolute": { 535 | "x": 557.8984956268223, 536 | "y": -202.80796734693882 537 | }, 538 | "dragging": false 539 | }, 540 | { 541 | "id": "customTool_0", 542 | "position": { 543 | "x": 919.9367100136845, 544 | "y": -303.9223829475814 545 | }, 546 | "type": "customNode", 547 | "data": { 548 | "id": "customTool_0", 549 | "label": "Custom Tool", 550 | "version": 2, 551 | "name": "customTool", 552 | "type": "CustomTool", 553 | "baseClasses": [ 554 | "CustomTool", 555 | "Tool", 556 | "StructuredTool", 557 | "Runnable" 558 | ], 559 | "category": "Tools", 560 | "description": "Use custom tool you've created in Flowise within chatflow", 561 | "inputParams": [ 562 | { 563 | "label": "Select Tool", 564 | "name": "selectedTool", 565 | "type": "asyncOptions", 566 | "loadMethod": "listTools", 567 | "id": "customTool_0-input-selectedTool-asyncOptions" 568 | }, 569 | { 570 | "label": "Return Direct", 571 | "name": "returnDirect", 572 | "description": "Return the output of the tool directly to the user", 573 | "type": "boolean", 574 | "optional": true, 575 | "id": "customTool_0-input-returnDirect-boolean" 576 | } 577 | ], 578 | "inputAnchors": [], 579 | "inputs": { 580 | "selectedTool": "cbc24643-ad81-4769-911f-089c8e4c87ab", 581 | "returnDirect": "" 582 | }, 583 | "outputAnchors": [ 584 | { 585 | "id": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 586 | "name": "customTool", 587 | "label": "CustomTool", 588 | "description": "Use custom tool you've created in Flowise within chatflow", 589 | "type": "CustomTool | Tool | StructuredTool | Runnable" 590 | } 591 | ], 592 | "outputs": {}, 593 | "selected": false 594 | }, 595 | "width": 300, 596 | "height": 371, 597 | "selected": false, 598 | "positionAbsolute": { 599 | "x": 919.9367100136845, 600 | "y": -303.9223829475814 601 | }, 602 | "dragging": false 603 | }, 604 | { 605 | "id": "customTool_1", 606 | "position": { 607 | "x": 1055.4606486069163, 608 | "y": 157.78311597410726 609 | }, 610 | "type": "customNode", 611 | "data": { 612 | "id": "customTool_1", 613 | "label": "Custom Tool", 614 | "version": 2, 615 | "name": "customTool", 616 | "type": "CustomTool", 617 | "baseClasses": [ 618 | "CustomTool", 619 | "Tool", 620 | "StructuredTool", 621 | "Runnable" 622 | ], 623 | "category": "Tools", 624 | "description": "Use custom tool you've created in Flowise within chatflow", 625 | "inputParams": [ 626 | { 627 | "label": "Select Tool", 628 | "name": "selectedTool", 629 | "type": "asyncOptions", 630 | "loadMethod": "listTools", 631 | "id": "customTool_1-input-selectedTool-asyncOptions" 632 | }, 633 | { 634 | "label": "Return Direct", 635 | "name": "returnDirect", 636 | "description": "Return the output of the tool directly to the user", 637 | "type": "boolean", 638 | "optional": true, 639 | "id": "customTool_1-input-returnDirect-boolean" 640 | } 641 | ], 642 | "inputAnchors": [], 643 | "inputs": { 644 | "selectedTool": "54bc0754-c127-416c-8cd0-db2902f2fce8", 645 | "returnDirect": "" 646 | }, 647 | "outputAnchors": [ 648 | { 649 | "id": "customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 650 | "name": "customTool", 651 | "label": "CustomTool", 652 | "description": "Use custom tool you've created in Flowise within chatflow", 653 | "type": "CustomTool | Tool | StructuredTool | Runnable" 654 | } 655 | ], 656 | "outputs": {}, 657 | "selected": false 658 | }, 659 | "width": 300, 660 | "height": 371, 661 | "selected": false, 662 | "dragging": false, 663 | "positionAbsolute": { 664 | "x": 1055.4606486069163, 665 | "y": 157.78311597410726 666 | } 667 | }, 668 | { 669 | "id": "customTool_2", 670 | "position": { 671 | "x": 1058.5329191281724, 672 | "y": 583.5403469198069 673 | }, 674 | "type": "customNode", 675 | "data": { 676 | "id": "customTool_2", 677 | "label": "Custom Tool", 678 | "version": 2, 679 | "name": "customTool", 680 | "type": "CustomTool", 681 | "baseClasses": [ 682 | "CustomTool", 683 | "Tool", 684 | "StructuredTool", 685 | "Runnable" 686 | ], 687 | "category": "Tools", 688 | "description": "Use custom tool you've created in Flowise within chatflow", 689 | "inputParams": [ 690 | { 691 | "label": "Select Tool", 692 | "name": "selectedTool", 693 | "type": "asyncOptions", 694 | "loadMethod": "listTools", 695 | "id": "customTool_2-input-selectedTool-asyncOptions" 696 | }, 697 | { 698 | "label": "Return Direct", 699 | "name": "returnDirect", 700 | "description": "Return the output of the tool directly to the user", 701 | "type": "boolean", 702 | "optional": true, 703 | "id": "customTool_2-input-returnDirect-boolean" 704 | } 705 | ], 706 | "inputAnchors": [], 707 | "inputs": { 708 | "selectedTool": "83ef936c-4579-48a3-b95f-fbefc6926b65", 709 | "returnDirect": "" 710 | }, 711 | "outputAnchors": [ 712 | { 713 | "id": "customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 714 | "name": "customTool", 715 | "label": "CustomTool", 716 | "description": "Use custom tool you've created in Flowise within chatflow", 717 | "type": "CustomTool | Tool | StructuredTool | Runnable" 718 | } 719 | ], 720 | "outputs": {}, 721 | "selected": false 722 | }, 723 | "width": 300, 724 | "height": 371, 725 | "selected": false, 726 | "dragging": false, 727 | "positionAbsolute": { 728 | "x": 1058.5329191281724, 729 | "y": 583.5403469198069 730 | } 731 | }, 732 | { 733 | "id": "customTool_3", 734 | "position": { 735 | "x": 650.5308240717925, 736 | "y": 747.2264330934981 737 | }, 738 | "type": "customNode", 739 | "data": { 740 | "id": "customTool_3", 741 | "label": "Custom Tool", 742 | "version": 2, 743 | "name": "customTool", 744 | "type": "CustomTool", 745 | "baseClasses": [ 746 | "CustomTool", 747 | "Tool", 748 | "StructuredTool", 749 | "Runnable" 750 | ], 751 | "category": "Tools", 752 | "description": "Use custom tool you've created in Flowise within chatflow", 753 | "inputParams": [ 754 | { 755 | "label": "Select Tool", 756 | "name": "selectedTool", 757 | "type": "asyncOptions", 758 | "loadMethod": "listTools", 759 | "id": "customTool_3-input-selectedTool-asyncOptions" 760 | }, 761 | { 762 | "label": "Return Direct", 763 | "name": "returnDirect", 764 | "description": "Return the output of the tool directly to the user", 765 | "type": "boolean", 766 | "optional": true, 767 | "id": "customTool_3-input-returnDirect-boolean" 768 | } 769 | ], 770 | "inputAnchors": [], 771 | "inputs": { 772 | "selectedTool": "59ace78e-575c-4f38-958d-a80e46be1e64", 773 | "returnDirect": "" 774 | }, 775 | "outputAnchors": [ 776 | { 777 | "id": "customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 778 | "name": "customTool", 779 | "label": "CustomTool", 780 | "description": "Use custom tool you've created in Flowise within chatflow", 781 | "type": "CustomTool | Tool | StructuredTool | Runnable" 782 | } 783 | ], 784 | "outputs": {}, 785 | "selected": false 786 | }, 787 | "width": 300, 788 | "height": 371, 789 | "selected": false, 790 | "positionAbsolute": { 791 | "x": 650.5308240717925, 792 | "y": 747.2264330934981 793 | }, 794 | "dragging": false 795 | } 796 | ], 797 | "edges": [ 798 | { 799 | "source": "bufferMemory_0", 800 | "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", 801 | "target": "toolAgent_0", 802 | "targetHandle": "toolAgent_0-input-memory-BaseChatMemory", 803 | "type": "buttonedge", 804 | "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-toolAgent_0-toolAgent_0-input-memory-BaseChatMemory" 805 | }, 806 | { 807 | "source": "inMemoryCache_0", 808 | "sourceHandle": "inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache", 809 | "target": "chatOllama_0", 810 | "targetHandle": "chatOllama_0-input-cache-BaseCache", 811 | "type": "buttonedge", 812 | "id": "inMemoryCache_0-inMemoryCache_0-output-inMemoryCache-InMemoryCache|BaseCache-chatOllama_0-chatOllama_0-input-cache-BaseCache" 813 | }, 814 | { 815 | "source": "chatOllama_0", 816 | "sourceHandle": "chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable", 817 | "target": "toolAgent_0", 818 | "targetHandle": "toolAgent_0-input-model-BaseChatModel", 819 | "type": "buttonedge", 820 | "id": "chatOllama_0-chatOllama_0-output-chatOllama-ChatOllama|ChatOllama|BaseChatModel|BaseLanguageModel|Runnable-toolAgent_0-toolAgent_0-input-model-BaseChatModel" 821 | }, 822 | { 823 | "source": "braveSearchAPI_0", 824 | "sourceHandle": "braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable", 825 | "target": "toolAgent_0", 826 | "targetHandle": "toolAgent_0-input-tools-Tool", 827 | "type": "buttonedge", 828 | "id": "braveSearchAPI_0-braveSearchAPI_0-output-braveSearchAPI-BraveSearchAPI|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool" 829 | }, 830 | { 831 | "source": "customTool_0", 832 | "sourceHandle": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 833 | "target": "toolAgent_0", 834 | "targetHandle": "toolAgent_0-input-tools-Tool", 835 | "type": "buttonedge", 836 | "id": "customTool_0-customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool" 837 | }, 838 | { 839 | "source": "customTool_1", 840 | "sourceHandle": "customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 841 | "target": "toolAgent_0", 842 | "targetHandle": "toolAgent_0-input-tools-Tool", 843 | "type": "buttonedge", 844 | "id": "customTool_1-customTool_1-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool" 845 | }, 846 | { 847 | "source": "customTool_2", 848 | "sourceHandle": "customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 849 | "target": "toolAgent_0", 850 | "targetHandle": "toolAgent_0-input-tools-Tool", 851 | "type": "buttonedge", 852 | "id": "customTool_2-customTool_2-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool" 853 | }, 854 | { 855 | "source": "customTool_3", 856 | "sourceHandle": "customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable", 857 | "target": "toolAgent_0", 858 | "targetHandle": "toolAgent_0-input-tools-Tool", 859 | "type": "buttonedge", 860 | "id": "customTool_3-customTool_3-output-customTool-CustomTool|Tool|StructuredTool|Runnable-toolAgent_0-toolAgent_0-input-tools-Tool" 861 | } 862 | ] 863 | } -------------------------------------------------------------------------------- /flowise/create_google_doc-CustomTool.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "create_google_doc", 3 | "description": "Use this tool to create a Google Doc.", 4 | "color": "linear-gradient(rgb(148,138,24), rgb(211,202,27))", 5 | "iconSrc": "", 6 | "schema": "[{\"id\":0,\"property\":\"document_text\",\"description\":\"The text to put in the Google Doc\",\"type\":\"string\",\"required\":true},{\"id\":1,\"property\":\"document_title\",\"description\":\"The title for the document\",\"type\":\"string\",\"required\":true}]", 7 | "func": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'Your n8n Webhook URL';\nconst options = {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${$vars.headerauth}`\n },\n body: JSON.stringify({\n 'document_title': $document_title,\n 'document_text': $document_text\n })\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}" 8 | } -------------------------------------------------------------------------------- /flowise/get_postgres_tables-CustomTool.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "get_postgres_tables", 3 | "description": "Use this tool to get the Postgres table.", 4 | "color": "linear-gradient(rgb(233,65,152), rgb(71,142,232))", 5 | "iconSrc": "", 6 | "schema": "[{\"id\":0,\"property\":\"database\",\"description\":\"the database name\",\"type\":\"string\",\"required\":true}]", 7 | "func": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'http://n8n:5678/webhook/d8db9fa3-04fe-43c8-9acf-e1912463477f';\nconst options = {\n method: 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${$vars.headerauth}`\n }\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}" 8 | } -------------------------------------------------------------------------------- /flowise/send_slack_message_through_n8n-CustomTool.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "send_slack_message_through_n8n", 3 | "description": "Use this tool to Send a message in Slack.", 4 | "color": "linear-gradient(rgb(25,248,134), rgb(46,226,32))", 5 | "iconSrc": "", 6 | "schema": "[{\"id\":0,\"property\":\"message\",\"description\":\"The message to send in Slack\",\"type\":\"string\",\"required\":true}]", 7 | "func": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'Your n8n Webhook URL';\nconst options = {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${$vars.headerauth}`\n },\n body: JSON.stringify({\n 'message': $message\n })\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}" 8 | } -------------------------------------------------------------------------------- /flowise/summarize_slack_conversation-CustomTool.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "summarize_slack_conversation", 3 | "description": "Use this tool to get a summary of a Slack conversation in a channel.", 4 | "color": "linear-gradient(rgb(235,70,180), rgb(87,203,87))", 5 | "iconSrc": "", 6 | "schema": "[{\"id\":0,\"property\":\"channel\",\"description\":\"The channel name to summarize\",\"type\":\"string\",\"required\":false}]", 7 | "func": "/*\n* You can use any libraries imported in Flowise\n* You can use properties specified in Input Schema as variables. Ex: Property = userid, Variable = $userid\n* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state\n* You can get custom variables: $vars.\n* Must return a string value at the end of function\n*/\n\nconst fetch = require('node-fetch');\nconst url = 'Your n8n Webhook URL';\nconst options = {\n method: 'GET',\n headers: {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${$vars.headerauth}`\n }\n};\ntry {\n const response = await fetch(url, options);\n const text = await response.text();\n return text;\n} catch (error) {\n console.error(error);\n return '';\n}" 8 | } -------------------------------------------------------------------------------- /n8n-tool-workflows/Create_Google_Doc.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Create Google Doc", 3 | "nodes": [ 4 | { 5 | "parameters": { 6 | "operation": "createFromText", 7 | "content": "={{ $json.body.document_text }}", 8 | "name": "={{ $json.body.document_title }}", 9 | "driveId": { 10 | "__rl": true, 11 | "mode": "list", 12 | "value": "My Drive" 13 | }, 14 | "folderId": { 15 | "__rl": true, 16 | "value": "1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC", 17 | "mode": "list", 18 | "cachedResultName": "Meeting Notes", 19 | "cachedResultUrl": "https://drive.google.com/drive/folders/1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC" 20 | }, 21 | "options": { 22 | "convertToGoogleDocument": true 23 | } 24 | }, 25 | "id": "abb2ee3f-7dd0-4d6e-96f0-6cc91eb64a5e", 26 | "name": "Google Drive", 27 | "type": "n8n-nodes-base.googleDrive", 28 | "typeVersion": 3, 29 | "position": [ 30 | 1040, 31 | 360 32 | ], 33 | "credentials": { 34 | "googleDriveOAuth2Api": { 35 | "id": "cfNochbuJikPwwl2", 36 | "name": "Google Drive account" 37 | } 38 | } 39 | }, 40 | { 41 | "parameters": { 42 | "options": {} 43 | }, 44 | "id": "9904e7b7-c9f6-49b5-ab72-6b199c6e2f46", 45 | "name": "Respond to Webhook", 46 | "type": "n8n-nodes-base.respondToWebhook", 47 | "typeVersion": 1.1, 48 | "position": [ 49 | 1260, 50 | 360 51 | ] 52 | }, 53 | { 54 | "parameters": { 55 | "httpMethod": "POST", 56 | "path": "d8db9fa3-04fe-43c8-9acf-e1912463477f", 57 | "authentication": "headerAuth", 58 | "responseMode": "responseNode", 59 | "options": {} 60 | }, 61 | "id": "7e761bed-99f6-4a8a-959b-d1b3542b6071", 62 | "name": "Webhook", 63 | "type": "n8n-nodes-base.webhook", 64 | "typeVersion": 2, 65 | "position": [ 66 | 820, 67 | 360 68 | ], 69 | "webhookId": "d8db9fa3-04fe-43c8-9acf-e1912463477f", 70 | "credentials": { 71 | "httpHeaderAuth": { 72 | "id": "PGr0hc0kn43Di1sz", 73 | "name": "testauth" 74 | } 75 | } 76 | } 77 | ], 78 | "pinData": {}, 79 | "connections": { 80 | "Google Drive": { 81 | "main": [ 82 | [ 83 | { 84 | "node": "Respond to Webhook", 85 | "type": "main", 86 | "index": 0 87 | } 88 | ] 89 | ] 90 | }, 91 | "Webhook": { 92 | "main": [ 93 | [ 94 | { 95 | "node": "Google Drive", 96 | "type": "main", 97 | "index": 0 98 | } 99 | ] 100 | ] 101 | } 102 | }, 103 | "active": true, 104 | "settings": { 105 | "executionOrder": "v1" 106 | }, 107 | "versionId": "c1f66b1c-c7dc-48ba-9197-6a0e3ac3e8f4", 108 | "meta": { 109 | "templateCredsSetupCompleted": true, 110 | "instanceId": "620f0d7e3114cb344761d7d45a21ef2a32096f91d8696e7057756042e1999e2c" 111 | }, 112 | "id": "LYrReDbpmqK3eX2P", 113 | "tags": [] 114 | } -------------------------------------------------------------------------------- /n8n-tool-workflows/Get_Postgres_Tables.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Get Postgres Tables", 3 | "nodes": [ 4 | { 5 | "parameters": { 6 | "options": {} 7 | }, 8 | "id": "74a8e48c-c13f-450c-835e-1702d87f894c", 9 | "name": "Respond to Webhook", 10 | "type": "n8n-nodes-base.respondToWebhook", 11 | "typeVersion": 1.1, 12 | "position": [ 13 | 880, 14 | 60 15 | ] 16 | }, 17 | { 18 | "parameters": { 19 | "operation": "executeQuery", 20 | "query": "SELECT table_name \nFROM information_schema.tables\nWHERE table_schema = 'public'\n AND table_type = 'BASE TABLE'\nORDER BY table_name;", 21 | "options": {} 22 | }, 23 | "id": "2cd15801-ea79-4a53-b1be-c0470429966a", 24 | "name": "Postgres", 25 | "type": "n8n-nodes-base.postgres", 26 | "typeVersion": 2.5, 27 | "position": [ 28 | 400, 29 | 60 30 | ], 31 | "credentials": { 32 | "postgres": { 33 | "id": "AXJoQJaFRsoL9Qk8", 34 | "name": "Postgres account" 35 | } 36 | } 37 | }, 38 | { 39 | "parameters": { 40 | "fieldsToSummarize": { 41 | "values": [ 42 | { 43 | "aggregation": "concatenate", 44 | "field": "table_name" 45 | } 46 | ] 47 | }, 48 | "options": {} 49 | }, 50 | "id": "8b66e814-e553-451c-818a-fc93699b341c", 51 | "name": "Summarize", 52 | "type": "n8n-nodes-base.summarize", 53 | "typeVersion": 1, 54 | "position": [ 55 | 640, 56 | 60 57 | ] 58 | }, 59 | { 60 | "parameters": { 61 | "path": "d8db9fa3-04fe-43c8-9acf-e1912463477f", 62 | "authentication": "headerAuth", 63 | "responseMode": "responseNode", 64 | "options": {} 65 | }, 66 | "id": "3eb54a82-034c-4f3f-aa99-3b6aeb744ce2", 67 | "name": "Webhook", 68 | "type": "n8n-nodes-base.webhook", 69 | "typeVersion": 2, 70 | "position": [ 71 | 180, 72 | 60 73 | ], 74 | "webhookId": "d8db9fa3-04fe-43c8-9acf-e1912463477f", 75 | "credentials": { 76 | "httpHeaderAuth": { 77 | "id": "upxO7NGaOTeIP4XU", 78 | "name": "testauth" 79 | } 80 | } 81 | } 82 | ], 83 | "pinData": {}, 84 | "connections": { 85 | "Postgres": { 86 | "main": [ 87 | [ 88 | { 89 | "node": "Summarize", 90 | "type": "main", 91 | "index": 0 92 | } 93 | ] 94 | ] 95 | }, 96 | "Summarize": { 97 | "main": [ 98 | [ 99 | { 100 | "node": "Respond to Webhook", 101 | "type": "main", 102 | "index": 0 103 | } 104 | ] 105 | ] 106 | }, 107 | "Webhook": { 108 | "main": [ 109 | [ 110 | { 111 | "node": "Postgres", 112 | "type": "main", 113 | "index": 0 114 | } 115 | ] 116 | ] 117 | } 118 | }, 119 | "active": true, 120 | "settings": { 121 | "executionOrder": "v1" 122 | }, 123 | "versionId": "bd8d96b6-53e1-4cb9-8ee7-0c8be9b86dc9", 124 | "meta": { 125 | "templateCredsSetupCompleted": true, 126 | "instanceId": "73cb7a3e883df514bb47e8d1b34526d30e2abb8f56cd99f10d5948a1e11b25aa" 127 | }, 128 | "id": "t15NIcuhUMXOE8DM", 129 | "tags": [] 130 | } -------------------------------------------------------------------------------- /n8n-tool-workflows/Post_Message_to_Slack.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Post Message to Slack", 3 | "nodes": [ 4 | { 5 | "parameters": { 6 | "authentication": "oAuth2", 7 | "select": "channel", 8 | "channelId": { 9 | "__rl": true, 10 | "value": "C083QQBQTAM", 11 | "mode": "list", 12 | "cachedResultName": "flowise-n8n" 13 | }, 14 | "text": "={{ $json.body.message }}", 15 | "otherOptions": { 16 | "includeLinkToWorkflow": false 17 | } 18 | }, 19 | "id": "4882859e-bf35-475b-9fd2-a068ac4fc602", 20 | "name": "Slack", 21 | "type": "n8n-nodes-base.slack", 22 | "typeVersion": 2.2, 23 | "position": [ 24 | 1040, 25 | 360 26 | ], 27 | "credentials": { 28 | "slackOAuth2Api": { 29 | "id": "XtpcBxLD5axuhMm8", 30 | "name": "Slack account" 31 | } 32 | } 33 | }, 34 | { 35 | "parameters": { 36 | "options": {} 37 | }, 38 | "id": "a9e7aa8a-04ba-4dc6-8a14-f2c435df4ad8", 39 | "name": "Respond to Webhook", 40 | "type": "n8n-nodes-base.respondToWebhook", 41 | "typeVersion": 1.1, 42 | "position": [ 43 | 1260, 44 | 360 45 | ] 46 | }, 47 | { 48 | "parameters": { 49 | "httpMethod": "POST", 50 | "path": "f17f77e5-51dc-4589-8b51-4c8adc23c3c0", 51 | "authentication": "headerAuth", 52 | "responseMode": "responseNode", 53 | "options": {} 54 | }, 55 | "id": "db4d3557-e423-43e9-8f0c-b4309f304567", 56 | "name": "Webhook", 57 | "type": "n8n-nodes-base.webhook", 58 | "typeVersion": 2, 59 | "position": [ 60 | 820, 61 | 360 62 | ], 63 | "webhookId": "f17f77e5-51dc-4589-8b51-4c8adc23c3c0", 64 | "credentials": { 65 | "httpHeaderAuth": { 66 | "id": "PGr0hc0kn43Di1sz", 67 | "name": "testauth" 68 | } 69 | } 70 | } 71 | ], 72 | "pinData": {}, 73 | "connections": { 74 | "Slack": { 75 | "main": [ 76 | [ 77 | { 78 | "node": "Respond to Webhook", 79 | "type": "main", 80 | "index": 0 81 | } 82 | ] 83 | ] 84 | }, 85 | "Webhook": { 86 | "main": [ 87 | [ 88 | { 89 | "node": "Slack", 90 | "type": "main", 91 | "index": 0 92 | } 93 | ] 94 | ] 95 | } 96 | }, 97 | "active": true, 98 | "settings": { 99 | "executionOrder": "v1" 100 | }, 101 | "versionId": "74bd88b9-5eb7-4a1a-8bc3-be2636a3639c", 102 | "meta": { 103 | "templateCredsSetupCompleted": true, 104 | "instanceId": "620f0d7e3114cb344761d7d45a21ef2a32096f91d8696e7057756042e1999e2c" 105 | }, 106 | "id": "dBTFcNDVqjuQ619T", 107 | "tags": [] 108 | } -------------------------------------------------------------------------------- /n8n-tool-workflows/Summarize_Slack_Conversation.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Summarize Slack Conversation", 3 | "nodes": [ 4 | { 5 | "parameters": { 6 | "authentication": "oAuth2", 7 | "resource": "channel", 8 | "operation": "history", 9 | "channelId": { 10 | "__rl": true, 11 | "value": "C083QQBQTAM", 12 | "mode": "list", 13 | "cachedResultName": "flowise-n8n" 14 | }, 15 | "limit": 10, 16 | "filters": {} 17 | }, 18 | "id": "d572a7b3-311e-4864-a604-11d679ecc855", 19 | "name": "Slack", 20 | "type": "n8n-nodes-base.slack", 21 | "typeVersion": 2.2, 22 | "position": [ 23 | 1040, 24 | 360 25 | ], 26 | "credentials": { 27 | "slackOAuth2Api": { 28 | "id": "XtpcBxLD5axuhMm8", 29 | "name": "Slack account" 30 | } 31 | } 32 | }, 33 | { 34 | "parameters": { 35 | "options": {} 36 | }, 37 | "id": "c30ce367-cb19-4ef9-b8e7-0e03982960f6", 38 | "name": "Respond to Webhook", 39 | "type": "n8n-nodes-base.respondToWebhook", 40 | "typeVersion": 1.1, 41 | "position": [ 42 | 1840, 43 | 360 44 | ] 45 | }, 46 | { 47 | "parameters": { 48 | "fieldsToAggregate": { 49 | "fieldToAggregate": [ 50 | { 51 | "fieldToAggregate": "text" 52 | } 53 | ] 54 | }, 55 | "options": {} 56 | }, 57 | "id": "fdb91643-0f0f-4ad5-b333-2294029f0ae7", 58 | "name": "Aggregate", 59 | "type": "n8n-nodes-base.aggregate", 60 | "typeVersion": 1, 61 | "position": [ 62 | 1260, 63 | 360 64 | ] 65 | }, 66 | { 67 | "parameters": { 68 | "model": "gpt-4o-mini", 69 | "options": {} 70 | }, 71 | "id": "bf2c887e-bb34-44fc-89ea-fad409db0317", 72 | "name": "OpenAI Chat Model", 73 | "type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", 74 | "typeVersion": 1, 75 | "position": [ 76 | 1480, 77 | 580 78 | ], 79 | "credentials": { 80 | "openAiApi": { 81 | "id": "JJjD91oisPv9cs01", 82 | "name": "OpenAi account" 83 | } 84 | } 85 | }, 86 | { 87 | "parameters": { 88 | "promptType": "define", 89 | "text": "=Summarize the following conversation, outputting just the summary and nothing else - no preabmle or explanation. \n\nKeep in mind the latest message is the first in the list: \n\n{{ $json.text }}" 90 | }, 91 | "id": "90984360-3628-494d-a077-08ec69fd88e5", 92 | "name": "Basic LLM Chain", 93 | "type": "@n8n/n8n-nodes-langchain.chainLlm", 94 | "typeVersion": 1.4, 95 | "position": [ 96 | 1480, 97 | 360 98 | ] 99 | }, 100 | { 101 | "parameters": { 102 | "path": "66a5755c-5030-4a6b-9b5e-2e09a21456d6", 103 | "authentication": "headerAuth", 104 | "responseMode": "responseNode", 105 | "options": {} 106 | }, 107 | "id": "930860ba-d339-4668-a13c-4e2277308161", 108 | "name": "Webhook", 109 | "type": "n8n-nodes-base.webhook", 110 | "typeVersion": 2, 111 | "position": [ 112 | 820, 113 | 360 114 | ], 115 | "webhookId": "66a5755c-5030-4a6b-9b5e-2e09a21456d6", 116 | "credentials": { 117 | "httpHeaderAuth": { 118 | "id": "PGr0hc0kn43Di1sz", 119 | "name": "testauth" 120 | } 121 | } 122 | } 123 | ], 124 | "pinData": {}, 125 | "connections": { 126 | "Slack": { 127 | "main": [ 128 | [ 129 | { 130 | "node": "Aggregate", 131 | "type": "main", 132 | "index": 0 133 | } 134 | ] 135 | ] 136 | }, 137 | "Aggregate": { 138 | "main": [ 139 | [ 140 | { 141 | "node": "Basic LLM Chain", 142 | "type": "main", 143 | "index": 0 144 | } 145 | ] 146 | ] 147 | }, 148 | "OpenAI Chat Model": { 149 | "ai_languageModel": [ 150 | [ 151 | { 152 | "node": "Basic LLM Chain", 153 | "type": "ai_languageModel", 154 | "index": 0 155 | } 156 | ] 157 | ] 158 | }, 159 | "Basic LLM Chain": { 160 | "main": [ 161 | [ 162 | { 163 | "node": "Respond to Webhook", 164 | "type": "main", 165 | "index": 0 166 | } 167 | ] 168 | ] 169 | }, 170 | "Webhook": { 171 | "main": [ 172 | [ 173 | { 174 | "node": "Slack", 175 | "type": "main", 176 | "index": 0 177 | } 178 | ] 179 | ] 180 | } 181 | }, 182 | "active": true, 183 | "settings": { 184 | "executionOrder": "v1" 185 | }, 186 | "versionId": "039f8fd7-6a15-4070-81ae-1f2851183c86", 187 | "meta": { 188 | "templateCredsSetupCompleted": true, 189 | "instanceId": "620f0d7e3114cb344761d7d45a21ef2a32096f91d8696e7057756042e1999e2c" 190 | }, 191 | "id": "vchuPxsgQU32o0v1", 192 | "tags": [] 193 | } -------------------------------------------------------------------------------- /n8n/backup/workflows/V1_Local_RAG_AI_Agent.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "V1 ocal RAG AI Agent", 3 | "nodes": [ 4 | { 5 | "parameters": {}, 6 | "id": "99b30fd7-b36c-44ba-9daa-408585aaaee9", 7 | "name": "Postgres Chat Memory", 8 | "type": "@n8n/n8n-nodes-langchain.memoryPostgresChat", 9 | "typeVersion": 1.1, 10 | "position": [ 11 | 1040, 12 | 560 13 | ], 14 | "credentials": { 15 | "postgres": { 16 | "id": "iN7fO2CgatVwq73z", 17 | "name": "Postgres account" 18 | } 19 | } 20 | }, 21 | { 22 | "parameters": { 23 | "model": "llama3.1:latest", 24 | "options": {} 25 | }, 26 | "id": "c7632a7c-2661-492e-bd6f-aab994818998", 27 | "name": "Ollama Chat Model", 28 | "type": "@n8n/n8n-nodes-langchain.lmChatOllama", 29 | "typeVersion": 1, 30 | "position": [ 31 | 920, 32 | 560 33 | ], 34 | "credentials": { 35 | "ollamaApi": { 36 | "id": "eOwAotC7AUgJlvHM", 37 | "name": "Ollama account" 38 | } 39 | } 40 | }, 41 | { 42 | "parameters": { 43 | "model": "llama3.1:latest", 44 | "options": {} 45 | }, 46 | "id": "73d773a4-5c72-4af3-a52d-144f0e417823", 47 | "name": "Ollama Model", 48 | "type": "@n8n/n8n-nodes-langchain.lmOllama", 49 | "typeVersion": 1, 50 | "position": [ 51 | 1960, 52 | 500 53 | ], 54 | "credentials": { 55 | "ollamaApi": { 56 | "id": "eOwAotC7AUgJlvHM", 57 | "name": "Ollama account" 58 | } 59 | } 60 | }, 61 | { 62 | "parameters": { 63 | "name": "documents", 64 | "topK": 3 65 | }, 66 | "id": "3f882fa7-c8ed-4531-b236-a34c16c55838", 67 | "name": "Vector Store Tool", 68 | "type": "@n8n/n8n-nodes-langchain.toolVectorStore", 69 | "typeVersion": 1, 70 | "position": [ 71 | 1740, 72 | 340 73 | ] 74 | }, 75 | { 76 | "parameters": { 77 | "model": "nomic-embed-text:latest" 78 | }, 79 | "id": "3a8e3fa0-3997-4bce-985c-975fb5ad4013", 80 | "name": "Embeddings Ollama", 81 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 82 | "typeVersion": 1, 83 | "position": [ 84 | 1840, 85 | 600 86 | ], 87 | "credentials": { 88 | "ollamaApi": { 89 | "id": "eOwAotC7AUgJlvHM", 90 | "name": "Ollama account" 91 | } 92 | } 93 | }, 94 | { 95 | "parameters": { 96 | "pollTimes": { 97 | "item": [ 98 | { 99 | "mode": "everyMinute" 100 | } 101 | ] 102 | }, 103 | "triggerOn": "specificFolder", 104 | "folderToWatch": { 105 | "__rl": true, 106 | "value": "1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC", 107 | "mode": "list", 108 | "cachedResultName": "Meeting Notes", 109 | "cachedResultUrl": "https://drive.google.com/drive/folders/1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC" 110 | }, 111 | "event": "fileCreated", 112 | "options": {} 113 | }, 114 | "id": "41fb71dd-236a-48bc-9761-5841d52ca1b3", 115 | "name": "File Created", 116 | "type": "n8n-nodes-base.googleDriveTrigger", 117 | "typeVersion": 1, 118 | "position": [ 119 | 600, 120 | 880 121 | ], 122 | "credentials": { 123 | "googleDriveOAuth2Api": { 124 | "id": "vzcL2pD7uQzqDpdK", 125 | "name": "Google Drive account" 126 | } 127 | } 128 | }, 129 | { 130 | "parameters": { 131 | "pollTimes": { 132 | "item": [ 133 | { 134 | "mode": "everyMinute" 135 | } 136 | ] 137 | }, 138 | "triggerOn": "specificFolder", 139 | "folderToWatch": { 140 | "__rl": true, 141 | "value": "1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC", 142 | "mode": "list", 143 | "cachedResultName": "Meeting Notes", 144 | "cachedResultUrl": "https://drive.google.com/drive/folders/1914m3M7kRzkd5RJqAfzRY9EBcJrKemZC" 145 | }, 146 | "event": "fileUpdated", 147 | "options": {} 148 | }, 149 | "id": "7b904686-89ae-4722-9ce5-a9da1b13b1a1", 150 | "name": "File Updated", 151 | "type": "n8n-nodes-base.googleDriveTrigger", 152 | "typeVersion": 1, 153 | "position": [ 154 | 600, 155 | 1100 156 | ], 157 | "credentials": { 158 | "googleDriveOAuth2Api": { 159 | "id": "vzcL2pD7uQzqDpdK", 160 | "name": "Google Drive account" 161 | } 162 | } 163 | }, 164 | { 165 | "parameters": { 166 | "assignments": { 167 | "assignments": [ 168 | { 169 | "id": "10646eae-ae46-4327-a4dc-9987c2d76173", 170 | "name": "file_id", 171 | "value": "={{ $json.id }}", 172 | "type": "string" 173 | }, 174 | { 175 | "id": "dd0aa081-79e7-4714-8a67-1e898285554c", 176 | "name": "folder_id", 177 | "value": "={{ $json.parents[0] }}", 178 | "type": "string" 179 | } 180 | ] 181 | }, 182 | "options": {} 183 | }, 184 | "id": "87f8bbb0-92c5-4b25-be63-7a9d91fc46f8", 185 | "name": "Set File ID", 186 | "type": "n8n-nodes-base.set", 187 | "typeVersion": 3.4, 188 | "position": [ 189 | 860, 190 | 880 191 | ] 192 | }, 193 | { 194 | "parameters": { 195 | "operation": "download", 196 | "fileId": { 197 | "__rl": true, 198 | "value": "={{ $('Set File ID').item.json.file_id }}", 199 | "mode": "id" 200 | }, 201 | "options": { 202 | "googleFileConversion": { 203 | "conversion": { 204 | "docsToFormat": "text/plain" 205 | } 206 | } 207 | } 208 | }, 209 | "id": "9f1e08fb-4ef3-4c4d-9473-5a7a1608b8e3", 210 | "name": "Download File", 211 | "type": "n8n-nodes-base.googleDrive", 212 | "typeVersion": 3, 213 | "position": [ 214 | 1300, 215 | 880 216 | ], 217 | "executeOnce": true, 218 | "credentials": { 219 | "googleDriveOAuth2Api": { 220 | "id": "vzcL2pD7uQzqDpdK", 221 | "name": "Google Drive account" 222 | } 223 | } 224 | }, 225 | { 226 | "parameters": { 227 | "operation": "text", 228 | "options": {} 229 | }, 230 | "id": "7efee822-68ad-4fe2-a616-ba19fd127684", 231 | "name": "Extract Document Text", 232 | "type": "n8n-nodes-base.extractFromFile", 233 | "typeVersion": 1, 234 | "position": [ 235 | 1540, 236 | 880 237 | ], 238 | "alwaysOutputData": true 239 | }, 240 | { 241 | "parameters": { 242 | "options": { 243 | "metadata": { 244 | "metadataValues": [ 245 | { 246 | "name": "file_id", 247 | "value": "={{ $('Set File ID').item.json.file_id }}" 248 | }, 249 | { 250 | "name": "folder_id", 251 | "value": "={{ $('Set File ID').item.json.folder_id }}" 252 | } 253 | ] 254 | } 255 | } 256 | }, 257 | "id": "da4c8b29-4944-43c4-9df3-e380366c594a", 258 | "name": "Default Data Loader", 259 | "type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader", 260 | "typeVersion": 1, 261 | "position": [ 262 | 1860, 263 | 1100 264 | ] 265 | }, 266 | { 267 | "parameters": { 268 | "chunkSize": 100, 269 | "options": {} 270 | }, 271 | "id": "d11c39b9-3fa7-4d5d-838f-da0d258c67c5", 272 | "name": "Recursive Character Text Splitter", 273 | "type": "@n8n/n8n-nodes-langchain.textSplitterRecursiveCharacterTextSplitter", 274 | "typeVersion": 1, 275 | "position": [ 276 | 1860, 277 | 1320 278 | ] 279 | }, 280 | { 281 | "parameters": { 282 | "model": "nomic-embed-text:latest" 283 | }, 284 | "id": "8a04559c-dfe8-479f-8998-a2e9bc994a0a", 285 | "name": "Embeddings Ollama1", 286 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 287 | "typeVersion": 1, 288 | "position": [ 289 | 1700, 290 | 1100 291 | ], 292 | "credentials": { 293 | "ollamaApi": { 294 | "id": "eOwAotC7AUgJlvHM", 295 | "name": "Ollama account" 296 | } 297 | } 298 | }, 299 | { 300 | "parameters": { 301 | "content": "## Local RAG AI Agent with Chat Interface", 302 | "height": 527.3027193303974, 303 | "width": 969.0343804425795 304 | }, 305 | "id": "a18773ae-1eb3-46b8-91cf-4184c66cf14f", 306 | "name": "Sticky Note2", 307 | "type": "n8n-nodes-base.stickyNote", 308 | "typeVersion": 1, 309 | "position": [ 310 | 560, 311 | 220 312 | ] 313 | }, 314 | { 315 | "parameters": { 316 | "content": "## Agent Tools for Local RAG", 317 | "height": 528.85546469693, 318 | "width": 583.4552380860637, 319 | "color": 4 320 | }, 321 | "id": "fa010a11-3dda-4bd5-b261-463a3a6b88d9", 322 | "name": "Sticky Note", 323 | "type": "n8n-nodes-base.stickyNote", 324 | "typeVersion": 1, 325 | "position": [ 326 | 1540, 327 | 220 328 | ] 329 | }, 330 | { 331 | "parameters": { 332 | "content": "## Workflow to Create Local Knowledgebase from Google Drive Folder", 333 | "height": 705.2695614889159, 334 | "width": 1568.9362829025763, 335 | "color": 5 336 | }, 337 | "id": "f29e6cc7-015e-47cb-a4fd-fecd6ffb0d24", 338 | "name": "Sticky Note1", 339 | "type": "n8n-nodes-base.stickyNote", 340 | "typeVersion": 1, 341 | "position": [ 342 | 560, 343 | 760 344 | ] 345 | }, 346 | { 347 | "parameters": { 348 | "options": {} 349 | }, 350 | "id": "5da52326-dfbd-4350-919c-843461f58913", 351 | "name": "When chat message received", 352 | "type": "@n8n/n8n-nodes-langchain.chatTrigger", 353 | "typeVersion": 1.1, 354 | "position": [ 355 | 620, 356 | 340 357 | ], 358 | "webhookId": "4b3b1838-d6b3-447e-9d79-d0931eddb9f8" 359 | }, 360 | { 361 | "parameters": { 362 | "qdrantCollection": { 363 | "__rl": true, 364 | "value": "documents", 365 | "mode": "list", 366 | "cachedResultName": "documents" 367 | }, 368 | "options": {} 369 | }, 370 | "id": "355370e0-2174-4e5b-830b-dd0f123b2e40", 371 | "name": "Qdrant Vector Store", 372 | "type": "@n8n/n8n-nodes-langchain.vectorStoreQdrant", 373 | "typeVersion": 1, 374 | "position": [ 375 | 1560, 376 | 480 377 | ], 378 | "credentials": { 379 | "qdrantApi": { 380 | "id": "VOnegFP8eijBkbNO", 381 | "name": "QdrantApi account" 382 | } 383 | } 384 | }, 385 | { 386 | "parameters": { 387 | "code": { 388 | "execute": { 389 | "code": "const { QdrantVectorStore } = require(\"@langchain/qdrant\");\nconst { OllamaEmbeddings } = require(\"@langchain/community/embeddings/ollama\");\n\nconst embeddings = new OllamaEmbeddings({\n model: \"nomic-embed-text\",\n baseUrl: \"http://ollama:11434\"\n});\n\nconst vectorStore = await QdrantVectorStore.fromExistingCollection(\n embeddings,\n {\n url: \"http://qdrant:6333\",\n collectionName: \"documents\",\n }\n);\n\nconst fileIdToDelete = this.getInputData()[0].json.file_id;\n\nconst filter = {\n must: [\n {\n key: \"metadata.file_id\",\n match: {\n value: fileIdToDelete,\n },\n },\n ],\n }\n\n// const results = await vectorStore.similaritySearch(\"this\", 10, filter);\n// const idsToDelete = results.map((doc) => doc.id);\n\n// NOT IMPLEMENTED!\n// await vectorStore.delete({ ids: idsToDelete });\n\nvectorStore.client.delete(\"documents\", {\n filter\n});\n\nreturn [ {json: { file_id: fileIdToDelete } } ];\n" 390 | } 391 | }, 392 | "inputs": { 393 | "input": [ 394 | { 395 | "type": "main", 396 | "required": true 397 | } 398 | ] 399 | }, 400 | "outputs": { 401 | "output": [ 402 | { 403 | "type": "main" 404 | } 405 | ] 406 | } 407 | }, 408 | "id": "b93bd001-0c4d-42fe-939a-eb441f354917", 409 | "name": "Clear Old Vectors", 410 | "type": "@n8n/n8n-nodes-langchain.code", 411 | "typeVersion": 1, 412 | "position": [ 413 | 1080, 414 | 880 415 | ], 416 | "alwaysOutputData": false 417 | }, 418 | { 419 | "parameters": { 420 | "mode": "insert", 421 | "qdrantCollection": { 422 | "__rl": true, 423 | "value": "documents", 424 | "mode": "list", 425 | "cachedResultName": "documents" 426 | }, 427 | "options": {} 428 | }, 429 | "id": "97ec4618-c0ea-445b-9406-5d41784d7836", 430 | "name": "Qdrant Vector Store Insert", 431 | "type": "@n8n/n8n-nodes-langchain.vectorStoreQdrant", 432 | "typeVersion": 1, 433 | "position": [ 434 | 1760, 435 | 880 436 | ], 437 | "credentials": { 438 | "qdrantApi": { 439 | "id": "VOnegFP8eijBkbNO", 440 | "name": "QdrantApi account" 441 | } 442 | } 443 | }, 444 | { 445 | "parameters": { 446 | "options": {} 447 | }, 448 | "id": "e537544a-37d5-4b00-b5ff-bc71f041f4bb", 449 | "name": "Respond to Webhook", 450 | "type": "n8n-nodes-base.respondToWebhook", 451 | "typeVersion": 1.1, 452 | "position": [ 453 | 1340, 454 | 340 455 | ] 456 | }, 457 | { 458 | "parameters": { 459 | "httpMethod": "POST", 460 | "path": "invoke_n8n_agent", 461 | "responseMode": "responseNode", 462 | "options": {} 463 | }, 464 | "id": "2b8cd01f-30a8-4aab-b0dd-56d2b658f059", 465 | "name": "Webhook", 466 | "type": "n8n-nodes-base.webhook", 467 | "typeVersion": 2, 468 | "position": [ 469 | 620, 470 | 520 471 | ], 472 | "webhookId": "4a839da9-b8a2-45f8-bcaf-c484f9a5912d" 473 | }, 474 | { 475 | "parameters": { 476 | "options": {} 477 | }, 478 | "id": "c9dfe906-178b-4375-8bda-f9290f35f222", 479 | "name": "AI Agent", 480 | "type": "@n8n/n8n-nodes-langchain.agent", 481 | "typeVersion": 1.6, 482 | "position": [ 483 | 1000, 484 | 340 485 | ] 486 | }, 487 | { 488 | "parameters": { 489 | "assignments": { 490 | "assignments": [ 491 | { 492 | "id": "75ebfdef-c8e2-4c3e-b716-1479d0cc2a73", 493 | "name": "chatInput", 494 | "value": "={{ $json?.chatInput || $json.body.chatInput }}", 495 | "type": "string" 496 | }, 497 | { 498 | "id": "59b7a20f-0626-4861-93e2-015d430c266e", 499 | "name": "sessionId", 500 | "value": "={{ $json?.sessionId || $json.body.sessionId}}", 501 | "type": "string" 502 | } 503 | ] 504 | }, 505 | "options": {} 506 | }, 507 | "id": "8f974a15-aa2f-4525-8278-ad58ad296076", 508 | "name": "Edit Fields", 509 | "type": "n8n-nodes-base.set", 510 | "typeVersion": 3.4, 511 | "position": [ 512 | 820, 513 | 340 514 | ] 515 | } 516 | ], 517 | "pinData": {}, 518 | "connections": { 519 | "Postgres Chat Memory": { 520 | "ai_memory": [ 521 | [ 522 | { 523 | "node": "AI Agent", 524 | "type": "ai_memory", 525 | "index": 0 526 | } 527 | ] 528 | ] 529 | }, 530 | "Ollama Chat Model": { 531 | "ai_languageModel": [ 532 | [ 533 | { 534 | "node": "AI Agent", 535 | "type": "ai_languageModel", 536 | "index": 0 537 | } 538 | ] 539 | ] 540 | }, 541 | "Ollama Model": { 542 | "ai_languageModel": [ 543 | [ 544 | { 545 | "node": "Vector Store Tool", 546 | "type": "ai_languageModel", 547 | "index": 0 548 | } 549 | ] 550 | ] 551 | }, 552 | "Embeddings Ollama": { 553 | "ai_embedding": [ 554 | [ 555 | { 556 | "node": "Qdrant Vector Store", 557 | "type": "ai_embedding", 558 | "index": 0 559 | } 560 | ] 561 | ] 562 | }, 563 | "File Created": { 564 | "main": [ 565 | [ 566 | { 567 | "node": "Set File ID", 568 | "type": "main", 569 | "index": 0 570 | } 571 | ] 572 | ] 573 | }, 574 | "File Updated": { 575 | "main": [ 576 | [ 577 | { 578 | "node": "Set File ID", 579 | "type": "main", 580 | "index": 0 581 | } 582 | ] 583 | ] 584 | }, 585 | "Set File ID": { 586 | "main": [ 587 | [ 588 | { 589 | "node": "Clear Old Vectors", 590 | "type": "main", 591 | "index": 0 592 | } 593 | ] 594 | ] 595 | }, 596 | "Download File": { 597 | "main": [ 598 | [ 599 | { 600 | "node": "Extract Document Text", 601 | "type": "main", 602 | "index": 0 603 | } 604 | ] 605 | ] 606 | }, 607 | "Extract Document Text": { 608 | "main": [ 609 | [ 610 | { 611 | "node": "Qdrant Vector Store Insert", 612 | "type": "main", 613 | "index": 0 614 | } 615 | ] 616 | ] 617 | }, 618 | "Default Data Loader": { 619 | "ai_document": [ 620 | [ 621 | { 622 | "node": "Qdrant Vector Store Insert", 623 | "type": "ai_document", 624 | "index": 0 625 | } 626 | ] 627 | ] 628 | }, 629 | "Recursive Character Text Splitter": { 630 | "ai_textSplitter": [ 631 | [ 632 | { 633 | "node": "Default Data Loader", 634 | "type": "ai_textSplitter", 635 | "index": 0 636 | } 637 | ] 638 | ] 639 | }, 640 | "Embeddings Ollama1": { 641 | "ai_embedding": [ 642 | [ 643 | { 644 | "node": "Qdrant Vector Store Insert", 645 | "type": "ai_embedding", 646 | "index": 0 647 | } 648 | ] 649 | ] 650 | }, 651 | "When chat message received": { 652 | "main": [ 653 | [ 654 | { 655 | "node": "Edit Fields", 656 | "type": "main", 657 | "index": 0 658 | } 659 | ] 660 | ] 661 | }, 662 | "Qdrant Vector Store": { 663 | "ai_vectorStore": [ 664 | [ 665 | { 666 | "node": "Vector Store Tool", 667 | "type": "ai_vectorStore", 668 | "index": 0 669 | } 670 | ] 671 | ] 672 | }, 673 | "Clear Old Vectors": { 674 | "main": [ 675 | [ 676 | { 677 | "node": "Download File", 678 | "type": "main", 679 | "index": 0 680 | } 681 | ] 682 | ] 683 | }, 684 | "Webhook": { 685 | "main": [ 686 | [ 687 | { 688 | "node": "Edit Fields", 689 | "type": "main", 690 | "index": 0 691 | } 692 | ] 693 | ] 694 | }, 695 | "AI Agent": { 696 | "main": [ 697 | [ 698 | { 699 | "node": "Respond to Webhook", 700 | "type": "main", 701 | "index": 0 702 | } 703 | ] 704 | ] 705 | }, 706 | "Edit Fields": { 707 | "main": [ 708 | [ 709 | { 710 | "node": "AI Agent", 711 | "type": "main", 712 | "index": 0 713 | } 714 | ] 715 | ] 716 | }, 717 | "Vector Store Tool": { 718 | "ai_tool": [ 719 | [ 720 | { 721 | "node": "AI Agent", 722 | "type": "ai_tool", 723 | "index": 0 724 | } 725 | ] 726 | ] 727 | } 728 | }, 729 | "active": true, 730 | "settings": { 731 | "executionOrder": "v1" 732 | }, 733 | "versionId": "19f9691c-4682-4704-81f2-33fdec9d0be2", 734 | "meta": { 735 | "templateCredsSetupCompleted": true, 736 | "instanceId": "f722e3e1e81e942a38faa434ad0aee8699371bbff9f883b9d5c59a7c726605af" 737 | }, 738 | "id": "vTN9y2dLXqTiDfPT", 739 | "tags": [] 740 | } -------------------------------------------------------------------------------- /n8n/backup/workflows/V2_Local_Supabase_RAG_AI_Agent.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "V2 Supabase RAG AI Agent", 3 | "nodes": [ 4 | { 5 | "parameters": {}, 6 | "id": "3e70b57d-49fb-4cb0-8f9f-29d39adf6a65", 7 | "name": "Postgres Chat Memory", 8 | "type": "@n8n/n8n-nodes-langchain.memoryPostgresChat", 9 | "typeVersion": 1.1, 10 | "position": [ 11 | 480, 12 | 340 13 | ], 14 | "credentials": { 15 | "postgres": { 16 | "id": "UaTmh0frrACTMPxG", 17 | "name": "Postgres account" 18 | } 19 | } 20 | }, 21 | { 22 | "parameters": { 23 | "model": "qwen2.5:7b-instruct-q4_K_M", 24 | "options": {} 25 | }, 26 | "id": "8d61de27-45d8-4d10-97cc-3c36d224f865", 27 | "name": "Ollama Chat Model", 28 | "type": "@n8n/n8n-nodes-langchain.lmChatOllama", 29 | "typeVersion": 1, 30 | "position": [ 31 | 360, 32 | 340 33 | ], 34 | "credentials": { 35 | "ollamaApi": { 36 | "id": "GwjiKiEsG5HnTaAf", 37 | "name": "Ollama account" 38 | } 39 | } 40 | }, 41 | { 42 | "parameters": { 43 | "model": "qwen2.5:7b-instruct-q4_K_M", 44 | "options": {} 45 | }, 46 | "id": "cbd5b56f-7afc-4e83-a221-6be4d348374e", 47 | "name": "Ollama Model", 48 | "type": "@n8n/n8n-nodes-langchain.lmOllama", 49 | "typeVersion": 1, 50 | "position": [ 51 | 1400, 52 | 280 53 | ], 54 | "credentials": { 55 | "ollamaApi": { 56 | "id": "GwjiKiEsG5HnTaAf", 57 | "name": "Ollama account" 58 | } 59 | } 60 | }, 61 | { 62 | "parameters": { 63 | "name": "documents", 64 | "topK": 3 65 | }, 66 | "id": "c15bd0a8-286a-4076-8ce8-8c0e54f73e2a", 67 | "name": "Vector Store Tool", 68 | "type": "@n8n/n8n-nodes-langchain.toolVectorStore", 69 | "typeVersion": 1, 70 | "position": [ 71 | 1180, 72 | 100 73 | ] 74 | }, 75 | { 76 | "parameters": { 77 | "operation": "text", 78 | "destinationKey": "=data", 79 | "options": {} 80 | }, 81 | "id": "ab9fb44a-85c6-486b-b5de-0f9d768d91b2", 82 | "name": "Extract Document Text", 83 | "type": "n8n-nodes-base.extractFromFile", 84 | "typeVersion": 1, 85 | "position": [ 86 | 920, 87 | 640 88 | ], 89 | "alwaysOutputData": true 90 | }, 91 | { 92 | "parameters": { 93 | "options": { 94 | "metadata": { 95 | "metadataValues": [ 96 | { 97 | "name": "file_id", 98 | "value": "={{ $('Local File Trigger').item.json.path }}" 99 | } 100 | ] 101 | } 102 | } 103 | }, 104 | "id": "b5abf0a5-f62a-49a6-bf81-bf002ba4bb90", 105 | "name": "Default Data Loader", 106 | "type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader", 107 | "typeVersion": 1, 108 | "position": [ 109 | 1300, 110 | 880 111 | ] 112 | }, 113 | { 114 | "parameters": { 115 | "chunkSize": 100, 116 | "options": {} 117 | }, 118 | "id": "1e2791c1-86e7-4fe1-a76d-732f87e6f41a", 119 | "name": "Recursive Character Text Splitter", 120 | "type": "@n8n/n8n-nodes-langchain.textSplitterRecursiveCharacterTextSplitter", 121 | "typeVersion": 1, 122 | "position": [ 123 | 1300, 124 | 1100 125 | ] 126 | }, 127 | { 128 | "parameters": { 129 | "model": "nomic-embed-text:latest" 130 | }, 131 | "id": "909713ea-5be3-4916-b12e-a8e848c949cb", 132 | "name": "Embeddings Ollama1", 133 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 134 | "typeVersion": 1, 135 | "position": [ 136 | 1140, 137 | 880 138 | ], 139 | "credentials": { 140 | "ollamaApi": { 141 | "id": "GwjiKiEsG5HnTaAf", 142 | "name": "Ollama account" 143 | } 144 | } 145 | }, 146 | { 147 | "parameters": { 148 | "content": "## Local RAG AI Agent with Chat Interface", 149 | "height": 527.3027193303974, 150 | "width": 969.0343804425795 151 | }, 152 | "id": "de473f2f-b806-45bc-a0e0-2ef0d2aa9b55", 153 | "name": "Sticky Note2", 154 | "type": "n8n-nodes-base.stickyNote", 155 | "typeVersion": 1, 156 | "position": [ 157 | 0, 158 | 0 159 | ] 160 | }, 161 | { 162 | "parameters": { 163 | "content": "## Agent Tools for Local RAG", 164 | "height": 528.85546469693, 165 | "width": 583.4552380860637, 166 | "color": 4 167 | }, 168 | "id": "b23ef570-f642-4503-87b9-3494ffdbf768", 169 | "name": "Sticky Note", 170 | "type": "n8n-nodes-base.stickyNote", 171 | "typeVersion": 1, 172 | "position": [ 173 | 980, 174 | 0 175 | ] 176 | }, 177 | { 178 | "parameters": { 179 | "content": "## Workflow to Create Local Knowledgebase", 180 | "height": 705.2695614889159, 181 | "width": 1568.9362829025763, 182 | "color": 5 183 | }, 184 | "id": "836cd765-dae8-460e-951d-66e19d0cce77", 185 | "name": "Sticky Note1", 186 | "type": "n8n-nodes-base.stickyNote", 187 | "typeVersion": 1, 188 | "position": [ 189 | 0, 190 | 540 191 | ] 192 | }, 193 | { 194 | "parameters": { 195 | "options": {} 196 | }, 197 | "id": "f8b5039c-4b00-453a-b30e-31a59f5d36ad", 198 | "name": "When chat message received", 199 | "type": "@n8n/n8n-nodes-langchain.chatTrigger", 200 | "typeVersion": 1.1, 201 | "position": [ 202 | 60, 203 | 120 204 | ], 205 | "webhookId": "4b3b1838-d6b3-447e-9d79-d0931eddb9f8" 206 | }, 207 | { 208 | "parameters": { 209 | "options": {} 210 | }, 211 | "id": "2f64907b-42ef-4bd6-83ba-e97584271bc7", 212 | "name": "Respond to Webhook", 213 | "type": "n8n-nodes-base.respondToWebhook", 214 | "typeVersion": 1.1, 215 | "position": [ 216 | 780, 217 | 120 218 | ] 219 | }, 220 | { 221 | "parameters": { 222 | "httpMethod": "POST", 223 | "path": "invoke_n8n_agent", 224 | "responseMode": "responseNode", 225 | "options": {} 226 | }, 227 | "id": "9a30041f-0c14-41d8-a811-229b890bb1b7", 228 | "name": "Webhook", 229 | "type": "n8n-nodes-base.webhook", 230 | "typeVersion": 2, 231 | "position": [ 232 | 60, 233 | 300 234 | ], 235 | "webhookId": "4a839da9-b8a2-45f8-bcaf-c484f9a5912d" 236 | }, 237 | { 238 | "parameters": { 239 | "options": {} 240 | }, 241 | "id": "8311e22f-bddd-41f8-9d40-fde119126dc9", 242 | "name": "AI Agent", 243 | "type": "@n8n/n8n-nodes-langchain.agent", 244 | "typeVersion": 1.6, 245 | "position": [ 246 | 440, 247 | 120 248 | ] 249 | }, 250 | { 251 | "parameters": { 252 | "assignments": { 253 | "assignments": [ 254 | { 255 | "id": "75ebfdef-c8e2-4c3e-b716-1479d0cc2a73", 256 | "name": "chatInput", 257 | "value": "={{ $json?.chatInput || $json.body.chatInput }}", 258 | "type": "string" 259 | }, 260 | { 261 | "id": "59b7a20f-0626-4861-93e2-015d430c266e", 262 | "name": "sessionId", 263 | "value": "={{ $json?.sessionId || $json.body.sessionId}}", 264 | "type": "string" 265 | } 266 | ] 267 | }, 268 | "options": {} 269 | }, 270 | "id": "4988a14f-a2ea-4c4b-8f66-0f1773e11ea0", 271 | "name": "Edit Fields", 272 | "type": "n8n-nodes-base.set", 273 | "typeVersion": 3.4, 274 | "position": [ 275 | 260, 276 | 120 277 | ] 278 | }, 279 | { 280 | "parameters": { 281 | "tableName": { 282 | "__rl": true, 283 | "value": "documents", 284 | "mode": "list", 285 | "cachedResultName": "documents" 286 | }, 287 | "options": { 288 | "queryName": "match_documents" 289 | } 290 | }, 291 | "type": "@n8n/n8n-nodes-langchain.vectorStoreSupabase", 292 | "typeVersion": 1, 293 | "position": [ 294 | 1000, 295 | 240 296 | ], 297 | "id": "9841944a-5a85-437d-a9ed-a3e7393d7a8d", 298 | "name": "Supabase Vector Store", 299 | "credentials": { 300 | "supabaseApi": { 301 | "id": "3tjCDwujGZ7BlK7R", 302 | "name": "Supabase account" 303 | } 304 | } 305 | }, 306 | { 307 | "parameters": { 308 | "model": "nomic-embed-text:latest" 309 | }, 310 | "type": "@n8n/n8n-nodes-langchain.embeddingsOllama", 311 | "typeVersion": 1, 312 | "position": [ 313 | 1180, 314 | 380 315 | ], 316 | "id": "24e6e703-33c2-426d-ab59-7c9cad0fded9", 317 | "name": "Embeddings Ollama2", 318 | "credentials": { 319 | "ollamaApi": { 320 | "id": "GwjiKiEsG5HnTaAf", 321 | "name": "Ollama account" 322 | } 323 | } 324 | }, 325 | { 326 | "parameters": { 327 | "mode": "insert", 328 | "tableName": { 329 | "__rl": true, 330 | "value": "documents", 331 | "mode": "list", 332 | "cachedResultName": "documents" 333 | }, 334 | "options": { 335 | "queryName": "match_documents" 336 | } 337 | }, 338 | "type": "@n8n/n8n-nodes-langchain.vectorStoreSupabase", 339 | "typeVersion": 1, 340 | "position": [ 341 | 1180, 342 | 640 343 | ], 344 | "id": "d72c6f38-7766-4a10-9897-362539a6bcc0", 345 | "name": "Supabase Vector Store1", 346 | "credentials": { 347 | "supabaseApi": { 348 | "id": "3tjCDwujGZ7BlK7R", 349 | "name": "Supabase account" 350 | } 351 | } 352 | }, 353 | { 354 | "parameters": { 355 | "operation": "delete", 356 | "tableId": "documents", 357 | "filterType": "string", 358 | "filterString": "=metadata->>file_id=like.*{{ $json.path }}*" 359 | }, 360 | "id": "ab0e0395-9f88-438f-bc47-ea3913b869fe", 361 | "name": "Delete Old Doc Rows", 362 | "type": "n8n-nodes-base.supabase", 363 | "typeVersion": 1, 364 | "position": [ 365 | 460, 366 | 840 367 | ], 368 | "alwaysOutputData": true, 369 | "credentials": { 370 | "supabaseApi": { 371 | "id": "3tjCDwujGZ7BlK7R", 372 | "name": "Supabase account" 373 | } 374 | } 375 | }, 376 | { 377 | "parameters": { 378 | "triggerOn": "folder", 379 | "path": "/data/shared", 380 | "events": [ 381 | "add", 382 | "change" 383 | ], 384 | "options": { 385 | "followSymlinks": true, 386 | "usePolling": true 387 | } 388 | }, 389 | "type": "n8n-nodes-base.localFileTrigger", 390 | "typeVersion": 1, 391 | "position": [ 392 | 60, 393 | 840 394 | ], 395 | "id": "eba68fe1-738d-451b-a2a7-9ee2942dd727", 396 | "name": "Local File Trigger" 397 | }, 398 | { 399 | "parameters": { 400 | "fileSelector": "={{ $('Local File Trigger').item.json.path }}", 401 | "options": { 402 | "dataPropertyName": "=data" 403 | } 404 | }, 405 | "type": "n8n-nodes-base.readWriteFile", 406 | "typeVersion": 1, 407 | "position": [ 408 | 640, 409 | 640 410 | ], 411 | "id": "b9459d4d-836c-47c4-9651-ebf3129f8864", 412 | "name": "Read/Write Files from Disk", 413 | "executeOnce": true 414 | }, 415 | { 416 | "parameters": { 417 | "conditions": { 418 | "options": { 419 | "caseSensitive": true, 420 | "leftValue": "", 421 | "typeValidation": "strict", 422 | "version": 2 423 | }, 424 | "conditions": [ 425 | { 426 | "id": "e051736f-949a-4230-bf32-c9ade2674b12", 427 | "leftValue": "={{ $json.event }}", 428 | "rightValue": "add", 429 | "operator": { 430 | "type": "string", 431 | "operation": "equals", 432 | "name": "filter.operator.equals" 433 | } 434 | } 435 | ], 436 | "combinator": "and" 437 | }, 438 | "options": {} 439 | }, 440 | "type": "n8n-nodes-base.if", 441 | "typeVersion": 2.2, 442 | "position": [ 443 | 280, 444 | 660 445 | ], 446 | "id": "27123d6a-e27a-49a9-bd73-8a27235928ea", 447 | "name": "If" 448 | } 449 | ], 450 | "pinData": {}, 451 | "connections": { 452 | "Postgres Chat Memory": { 453 | "ai_memory": [ 454 | [ 455 | { 456 | "node": "AI Agent", 457 | "type": "ai_memory", 458 | "index": 0 459 | } 460 | ] 461 | ] 462 | }, 463 | "Ollama Chat Model": { 464 | "ai_languageModel": [ 465 | [ 466 | { 467 | "node": "AI Agent", 468 | "type": "ai_languageModel", 469 | "index": 0 470 | } 471 | ] 472 | ] 473 | }, 474 | "Ollama Model": { 475 | "ai_languageModel": [ 476 | [ 477 | { 478 | "node": "Vector Store Tool", 479 | "type": "ai_languageModel", 480 | "index": 0 481 | } 482 | ] 483 | ] 484 | }, 485 | "Extract Document Text": { 486 | "main": [ 487 | [ 488 | { 489 | "node": "Supabase Vector Store1", 490 | "type": "main", 491 | "index": 0 492 | } 493 | ] 494 | ] 495 | }, 496 | "Default Data Loader": { 497 | "ai_document": [ 498 | [ 499 | { 500 | "node": "Supabase Vector Store1", 501 | "type": "ai_document", 502 | "index": 0 503 | } 504 | ] 505 | ] 506 | }, 507 | "Recursive Character Text Splitter": { 508 | "ai_textSplitter": [ 509 | [ 510 | { 511 | "node": "Default Data Loader", 512 | "type": "ai_textSplitter", 513 | "index": 0 514 | } 515 | ] 516 | ] 517 | }, 518 | "Embeddings Ollama1": { 519 | "ai_embedding": [ 520 | [ 521 | { 522 | "node": "Supabase Vector Store1", 523 | "type": "ai_embedding", 524 | "index": 0 525 | } 526 | ] 527 | ] 528 | }, 529 | "When chat message received": { 530 | "main": [ 531 | [ 532 | { 533 | "node": "Edit Fields", 534 | "type": "main", 535 | "index": 0 536 | } 537 | ] 538 | ] 539 | }, 540 | "Webhook": { 541 | "main": [ 542 | [ 543 | { 544 | "node": "Edit Fields", 545 | "type": "main", 546 | "index": 0 547 | } 548 | ] 549 | ] 550 | }, 551 | "AI Agent": { 552 | "main": [ 553 | [ 554 | { 555 | "node": "Respond to Webhook", 556 | "type": "main", 557 | "index": 0 558 | } 559 | ] 560 | ] 561 | }, 562 | "Edit Fields": { 563 | "main": [ 564 | [ 565 | { 566 | "node": "AI Agent", 567 | "type": "main", 568 | "index": 0 569 | } 570 | ] 571 | ] 572 | }, 573 | "Vector Store Tool": { 574 | "ai_tool": [ 575 | [ 576 | { 577 | "node": "AI Agent", 578 | "type": "ai_tool", 579 | "index": 0 580 | } 581 | ] 582 | ] 583 | }, 584 | "Supabase Vector Store": { 585 | "ai_vectorStore": [ 586 | [ 587 | { 588 | "node": "Vector Store Tool", 589 | "type": "ai_vectorStore", 590 | "index": 0 591 | } 592 | ] 593 | ] 594 | }, 595 | "Embeddings Ollama2": { 596 | "ai_embedding": [ 597 | [ 598 | { 599 | "node": "Supabase Vector Store", 600 | "type": "ai_embedding", 601 | "index": 0 602 | } 603 | ] 604 | ] 605 | }, 606 | "Delete Old Doc Rows": { 607 | "main": [ 608 | [ 609 | { 610 | "node": "Read/Write Files from Disk", 611 | "type": "main", 612 | "index": 0 613 | } 614 | ] 615 | ] 616 | }, 617 | "Local File Trigger": { 618 | "main": [ 619 | [ 620 | { 621 | "node": "If", 622 | "type": "main", 623 | "index": 0 624 | } 625 | ] 626 | ] 627 | }, 628 | "Read/Write Files from Disk": { 629 | "main": [ 630 | [ 631 | { 632 | "node": "Extract Document Text", 633 | "type": "main", 634 | "index": 0 635 | } 636 | ] 637 | ] 638 | }, 639 | "If": { 640 | "main": [ 641 | [ 642 | { 643 | "node": "Read/Write Files from Disk", 644 | "type": "main", 645 | "index": 0 646 | } 647 | ], 648 | [ 649 | { 650 | "node": "Delete Old Doc Rows", 651 | "type": "main", 652 | "index": 0 653 | } 654 | ] 655 | ] 656 | } 657 | }, 658 | "active": false, 659 | "settings": { 660 | "executionOrder": "v1" 661 | }, 662 | "versionId": "ea9ff68c-8fc0-40b0-aa5d-48217cda89f3", 663 | "meta": { 664 | "instanceId": "73cb7a3e883df514bb47e8d1b34526d30e2abb8f56cd99f10d5948a1e11b25aa" 665 | }, 666 | "id": "hrnPh6dXgIbGVzIk", 667 | "tags": [] 668 | } -------------------------------------------------------------------------------- /n8n_pipe.py: -------------------------------------------------------------------------------- 1 | """ 2 | title: n8n Pipe Function 3 | author: Cole Medin 4 | author_url: https://www.youtube.com/@ColeMedin 5 | version: 0.1.0 6 | 7 | This module defines a Pipe class that utilizes N8N for an Agent 8 | """ 9 | 10 | from typing import Optional, Callable, Awaitable 11 | from pydantic import BaseModel, Field 12 | import os 13 | import time 14 | import requests 15 | 16 | def extract_event_info(event_emitter) -> tuple[Optional[str], Optional[str]]: 17 | if not event_emitter or not event_emitter.__closure__: 18 | return None, None 19 | for cell in event_emitter.__closure__: 20 | if isinstance(request_info := cell.cell_contents, dict): 21 | chat_id = request_info.get("chat_id") 22 | message_id = request_info.get("message_id") 23 | return chat_id, message_id 24 | return None, None 25 | 26 | class Pipe: 27 | class Valves(BaseModel): 28 | n8n_url: str = Field( 29 | default="https://n8n.[your domain].com/webhook/[your webhook URL]" 30 | ) 31 | n8n_bearer_token: str = Field(default="...") 32 | input_field: str = Field(default="chatInput") 33 | response_field: str = Field(default="output") 34 | emit_interval: float = Field( 35 | default=2.0, description="Interval in seconds between status emissions" 36 | ) 37 | enable_status_indicator: bool = Field( 38 | default=True, description="Enable or disable status indicator emissions" 39 | ) 40 | 41 | def __init__(self): 42 | self.type = "pipe" 43 | self.id = "n8n_pipe" 44 | self.name = "N8N Pipe" 45 | self.valves = self.Valves() 46 | self.last_emit_time = 0 47 | pass 48 | 49 | async def emit_status( 50 | self, 51 | __event_emitter__: Callable[[dict], Awaitable[None]], 52 | level: str, 53 | message: str, 54 | done: bool, 55 | ): 56 | current_time = time.time() 57 | if ( 58 | __event_emitter__ 59 | and self.valves.enable_status_indicator 60 | and ( 61 | current_time - self.last_emit_time >= self.valves.emit_interval or done 62 | ) 63 | ): 64 | await __event_emitter__( 65 | { 66 | "type": "status", 67 | "data": { 68 | "status": "complete" if done else "in_progress", 69 | "level": level, 70 | "description": message, 71 | "done": done, 72 | }, 73 | } 74 | ) 75 | self.last_emit_time = current_time 76 | 77 | async def pipe( 78 | self, 79 | body: dict, 80 | __user__: Optional[dict] = None, 81 | __event_emitter__: Callable[[dict], Awaitable[None]] = None, 82 | __event_call__: Callable[[dict], Awaitable[dict]] = None, 83 | ) -> Optional[dict]: 84 | await self.emit_status( 85 | __event_emitter__, "info", "/Calling N8N Workflow...", False 86 | ) 87 | chat_id, _ = extract_event_info(__event_emitter__) 88 | messages = body.get("messages", []) 89 | 90 | # Verify a message is available 91 | if messages: 92 | question = messages[-1]["content"] 93 | try: 94 | # Invoke N8N workflow 95 | headers = { 96 | "Authorization": f"Bearer {self.valves.n8n_bearer_token}", 97 | "Content-Type": "application/json", 98 | } 99 | payload = {"sessionId": f"{chat_id}"} 100 | payload[self.valves.input_field] = question 101 | response = requests.post( 102 | self.valves.n8n_url, json=payload, headers=headers 103 | ) 104 | if response.status_code == 200: 105 | n8n_response = response.json()[self.valves.response_field] 106 | else: 107 | raise Exception(f"Error: {response.status_code} - {response.text}") 108 | 109 | # Set assitant message with chain reply 110 | body["messages"].append({"role": "assistant", "content": n8n_response}) 111 | except Exception as e: 112 | await self.emit_status( 113 | __event_emitter__, 114 | "error", 115 | f"Error during sequence execution: {str(e)}", 116 | True, 117 | ) 118 | return {"error": str(e)} 119 | # If no message is available alert user 120 | else: 121 | await self.emit_status( 122 | __event_emitter__, 123 | "error", 124 | "No messages found in the request body", 125 | True, 126 | ) 127 | body["messages"].append( 128 | { 129 | "role": "assistant", 130 | "content": "No messages found in the request body", 131 | } 132 | ) 133 | 134 | await self.emit_status(__event_emitter__, "info", "Complete", True) 135 | return n8n_response 136 | -------------------------------------------------------------------------------- /searxng/settings-base.yml: -------------------------------------------------------------------------------- 1 | # see https://docs.searxng.org/admin/settings/settings.html#settings-use-default-settings 2 | use_default_settings: true 3 | server: 4 | # base_url is defined in the SEARXNG_BASE_URL environment variable, see .env and docker-compose.yml 5 | secret_key: "ultrasecretkey" # change this! 6 | limiter: false 7 | image_proxy: true 8 | ui: 9 | static_use_hash: true 10 | redis: 11 | url: redis://redis:6379/0 12 | search: 13 | formats: 14 | - html 15 | - json -------------------------------------------------------------------------------- /start_services.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | start_services.py 4 | 5 | This script starts the Supabase stack first, waits for it to initialize, and then starts 6 | the local AI stack. Both stacks use the same Docker Compose project name ("localai") 7 | so they appear together in Docker Desktop. 8 | """ 9 | 10 | import os 11 | import subprocess 12 | import shutil 13 | import time 14 | import argparse 15 | import platform 16 | import sys 17 | 18 | def run_command(cmd, cwd=None): 19 | """Run a shell command and print it.""" 20 | print("Running:", " ".join(cmd)) 21 | subprocess.run(cmd, cwd=cwd, check=True) 22 | 23 | def clone_supabase_repo(): 24 | """Clone the Supabase repository using sparse checkout if not already present.""" 25 | if not os.path.exists("supabase"): 26 | print("Cloning the Supabase repository...") 27 | run_command([ 28 | "git", "clone", "--filter=blob:none", "--no-checkout", 29 | "https://github.com/supabase/supabase.git" 30 | ]) 31 | os.chdir("supabase") 32 | run_command(["git", "sparse-checkout", "init", "--cone"]) 33 | run_command(["git", "sparse-checkout", "set", "docker"]) 34 | run_command(["git", "checkout", "master"]) 35 | os.chdir("..") 36 | else: 37 | print("Supabase repository already exists, updating...") 38 | os.chdir("supabase") 39 | run_command(["git", "pull"]) 40 | os.chdir("..") 41 | 42 | def prepare_supabase_env(): 43 | """Copy .env to .env in supabase/docker.""" 44 | env_path = os.path.join("supabase", "docker", ".env") 45 | env_example_path = os.path.join(".env") 46 | print("Copying .env in root to .env in supabase/docker...") 47 | shutil.copyfile(env_example_path, env_path) 48 | 49 | def stop_existing_containers(): 50 | """Stop and remove existing containers for our unified project ('localai').""" 51 | print("Stopping and removing existing containers for the unified project 'localai'...") 52 | run_command([ 53 | "docker", "compose", 54 | "-p", "localai", 55 | "-f", "docker-compose.yml", 56 | "-f", "supabase/docker/docker-compose.yml", 57 | "down" 58 | ]) 59 | 60 | def start_supabase(): 61 | """Start the Supabase services (using its compose file).""" 62 | print("Starting Supabase services...") 63 | run_command([ 64 | "docker", "compose", "-p", "localai", "-f", "supabase/docker/docker-compose.yml", "up", "-d" 65 | ]) 66 | 67 | def start_local_ai(profile=None): 68 | """Start the local AI services (using its compose file).""" 69 | print("Starting local AI services...") 70 | cmd = ["docker", "compose", "-p", "localai"] 71 | if profile and profile != "none": 72 | cmd.extend(["--profile", profile]) 73 | cmd.extend(["-f", "docker-compose.yml", "up", "-d"]) 74 | run_command(cmd) 75 | 76 | def generate_searxng_secret_key(): 77 | """Generate a secret key for SearXNG based on the current platform.""" 78 | print("Checking SearXNG settings...") 79 | 80 | # Define paths for SearXNG settings files 81 | settings_path = os.path.join("searxng", "settings.yml") 82 | settings_base_path = os.path.join("searxng", "settings-base.yml") 83 | 84 | # Check if settings-base.yml exists 85 | if not os.path.exists(settings_base_path): 86 | print(f"Warning: SearXNG base settings file not found at {settings_base_path}") 87 | return 88 | 89 | # Check if settings.yml exists, if not create it from settings-base.yml 90 | if not os.path.exists(settings_path): 91 | print(f"SearXNG settings.yml not found. Creating from {settings_base_path}...") 92 | try: 93 | shutil.copyfile(settings_base_path, settings_path) 94 | print(f"Created {settings_path} from {settings_base_path}") 95 | except Exception as e: 96 | print(f"Error creating settings.yml: {e}") 97 | return 98 | else: 99 | print(f"SearXNG settings.yml already exists at {settings_path}") 100 | 101 | print("Generating SearXNG secret key...") 102 | 103 | # Detect the platform and run the appropriate command 104 | system = platform.system() 105 | 106 | try: 107 | if system == "Windows": 108 | print("Detected Windows platform, using PowerShell to generate secret key...") 109 | # PowerShell command to generate a random key and replace in the settings file 110 | ps_command = [ 111 | "powershell", "-Command", 112 | "$randomBytes = New-Object byte[] 32; " + 113 | "(New-Object Security.Cryptography.RNGCryptoServiceProvider).GetBytes($randomBytes); " + 114 | "$secretKey = -join ($randomBytes | ForEach-Object { \"{0:x2}\" -f $_ }); " + 115 | "(Get-Content searxng/settings.yml) -replace 'ultrasecretkey', $secretKey | Set-Content searxng/settings.yml" 116 | ] 117 | subprocess.run(ps_command, check=True) 118 | 119 | elif system == "Darwin": # macOS 120 | print("Detected macOS platform, using sed command with empty string parameter...") 121 | # macOS sed command requires an empty string for the -i parameter 122 | openssl_cmd = ["openssl", "rand", "-hex", "32"] 123 | random_key = subprocess.check_output(openssl_cmd).decode('utf-8').strip() 124 | sed_cmd = ["sed", "-i", "", f"s|ultrasecretkey|{random_key}|g", settings_path] 125 | subprocess.run(sed_cmd, check=True) 126 | 127 | else: # Linux and other Unix-like systems 128 | print("Detected Linux/Unix platform, using standard sed command...") 129 | # Standard sed command for Linux 130 | openssl_cmd = ["openssl", "rand", "-hex", "32"] 131 | random_key = subprocess.check_output(openssl_cmd).decode('utf-8').strip() 132 | sed_cmd = ["sed", "-i", f"s|ultrasecretkey|{random_key}|g", settings_path] 133 | subprocess.run(sed_cmd, check=True) 134 | 135 | print("SearXNG secret key generated successfully.") 136 | 137 | except Exception as e: 138 | print(f"Error generating SearXNG secret key: {e}") 139 | print("You may need to manually generate the secret key using the commands:") 140 | print(" - Linux: sed -i \"s|ultrasecretkey|$(openssl rand -hex 32)|g\" searxng/settings.yml") 141 | print(" - macOS: sed -i '' \"s|ultrasecretkey|$(openssl rand -hex 32)|g\" searxng/settings.yml") 142 | print(" - Windows (PowerShell):") 143 | print(" $randomBytes = New-Object byte[] 32") 144 | print(" (New-Object Security.Cryptography.RNGCryptoServiceProvider).GetBytes($randomBytes)") 145 | print(" $secretKey = -join ($randomBytes | ForEach-Object { \"{0:x2}\" -f $_ })") 146 | print(" (Get-Content searxng/settings.yml) -replace 'ultrasecretkey', $secretKey | Set-Content searxng/settings.yml") 147 | 148 | def check_and_fix_docker_compose_for_searxng(): 149 | """Check and modify docker-compose.yml for SearXNG first run.""" 150 | docker_compose_path = "docker-compose.yml" 151 | if not os.path.exists(docker_compose_path): 152 | print(f"Warning: Docker Compose file not found at {docker_compose_path}") 153 | return 154 | 155 | try: 156 | # Read the docker-compose.yml file 157 | with open(docker_compose_path, 'r') as file: 158 | content = file.read() 159 | 160 | # Default to first run 161 | is_first_run = True 162 | 163 | # Check if Docker is running and if the SearXNG container exists 164 | try: 165 | # Check if the SearXNG container is running 166 | container_check = subprocess.run( 167 | ["docker", "ps", "--filter", "name=searxng", "--format", "{{.Names}}"], 168 | capture_output=True, text=True, check=True 169 | ) 170 | searxng_containers = container_check.stdout.strip().split('\n') 171 | 172 | # If SearXNG container is running, check inside for uwsgi.ini 173 | if any(container for container in searxng_containers if container): 174 | container_name = next(container for container in searxng_containers if container) 175 | print(f"Found running SearXNG container: {container_name}") 176 | 177 | # Check if uwsgi.ini exists inside the container 178 | container_check = subprocess.run( 179 | ["docker", "exec", container_name, "sh", "-c", "[ -f /etc/searxng/uwsgi.ini ] && echo 'found' || echo 'not_found'"], 180 | capture_output=True, text=True, check=True 181 | ) 182 | 183 | if "found" in container_check.stdout: 184 | print("Found uwsgi.ini inside the SearXNG container - not first run") 185 | is_first_run = False 186 | else: 187 | print("uwsgi.ini not found inside the SearXNG container - first run") 188 | is_first_run = True 189 | else: 190 | print("No running SearXNG container found - assuming first run") 191 | except Exception as e: 192 | print(f"Error checking Docker container: {e} - assuming first run") 193 | 194 | if is_first_run and "cap_drop: - ALL" in content: 195 | print("First run detected for SearXNG. Temporarily removing 'cap_drop: - ALL' directive...") 196 | # Temporarily comment out the cap_drop line 197 | modified_content = content.replace("cap_drop: - ALL", "# cap_drop: - ALL # Temporarily commented out for first run") 198 | 199 | # Write the modified content back 200 | with open(docker_compose_path, 'w') as file: 201 | file.write(modified_content) 202 | 203 | print("Note: After the first run completes successfully, you should re-add 'cap_drop: - ALL' to docker-compose.yml for security reasons.") 204 | elif not is_first_run and "# cap_drop: - ALL # Temporarily commented out for first run" in content: 205 | print("SearXNG has been initialized. Re-enabling 'cap_drop: - ALL' directive for security...") 206 | # Uncomment the cap_drop line 207 | modified_content = content.replace("# cap_drop: - ALL # Temporarily commented out for first run", "cap_drop: - ALL") 208 | 209 | # Write the modified content back 210 | with open(docker_compose_path, 'w') as file: 211 | file.write(modified_content) 212 | 213 | except Exception as e: 214 | print(f"Error checking/modifying docker-compose.yml for SearXNG: {e}") 215 | 216 | def main(): 217 | parser = argparse.ArgumentParser(description='Start the local AI and Supabase services.') 218 | parser.add_argument('--profile', choices=['cpu', 'gpu-nvidia', 'gpu-amd', 'none'], default='cpu', 219 | help='Profile to use for Docker Compose (default: cpu)') 220 | args = parser.parse_args() 221 | 222 | clone_supabase_repo() 223 | prepare_supabase_env() 224 | 225 | # Generate SearXNG secret key and check docker-compose.yml 226 | generate_searxng_secret_key() 227 | check_and_fix_docker_compose_for_searxng() 228 | 229 | stop_existing_containers() 230 | 231 | # Start Supabase first 232 | start_supabase() 233 | 234 | # Give Supabase some time to initialize 235 | print("Waiting for Supabase to initialize...") 236 | time.sleep(10) 237 | 238 | # Then start the local AI services 239 | start_local_ai(args.profile) 240 | 241 | if __name__ == "__main__": 242 | main() --------------------------------------------------------------------------------