├── .gitignore ├── poetry.lock ├── pyproject.toml ├── readme.md ├── setup-poetry.sh ├── setup.sh ├── slackgpt.py └── start.sh /.gitignore: -------------------------------------------------------------------------------- 1 | *.env -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "aiohttp" 3 | version = "3.8.4" 4 | description = "Async http client/server framework (asyncio)" 5 | category = "main" 6 | optional = false 7 | python-versions = ">=3.6" 8 | 9 | [package.dependencies] 10 | aiosignal = ">=1.1.2" 11 | async-timeout = ">=4.0.0a3,<5.0" 12 | attrs = ">=17.3.0" 13 | charset-normalizer = ">=2.0,<4.0" 14 | frozenlist = ">=1.1.1" 15 | multidict = ">=4.5,<7.0" 16 | yarl = ">=1.0,<2.0" 17 | 18 | [package.extras] 19 | speedups = ["aiodns", "brotli", "cchardet"] 20 | 21 | [[package]] 22 | name = "aiosignal" 23 | version = "1.3.1" 24 | description = "aiosignal: a list of registered asynchronous callbacks" 25 | category = "main" 26 | optional = false 27 | python-versions = ">=3.7" 28 | 29 | [package.dependencies] 30 | frozenlist = ">=1.1.0" 31 | 32 | [[package]] 33 | name = "async-timeout" 34 | version = "4.0.2" 35 | description = "Timeout context manager for asyncio programs" 36 | category = "main" 37 | optional = false 38 | python-versions = ">=3.6" 39 | 40 | [[package]] 41 | name = "attrs" 42 | version = "22.2.0" 43 | description = "Classes Without Boilerplate" 44 | category = "main" 45 | optional = false 46 | python-versions = ">=3.6" 47 | 48 | [package.extras] 49 | cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] 50 | dev = ["attrs"] 51 | docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] 52 | tests = ["attrs", "zope.interface"] 53 | tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] 54 | tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] 55 | 56 | [[package]] 57 | name = "certifi" 58 | version = "2022.12.7" 59 | description = "Python package for providing Mozilla's CA Bundle." 60 | category = "main" 61 | optional = false 62 | python-versions = ">=3.6" 63 | 64 | [[package]] 65 | name = "charset-normalizer" 66 | version = "3.1.0" 67 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 68 | category = "main" 69 | optional = false 70 | python-versions = ">=3.7.0" 71 | 72 | [[package]] 73 | name = "colorama" 74 | version = "0.4.6" 75 | description = "Cross-platform colored terminal text." 76 | category = "main" 77 | optional = false 78 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 79 | 80 | [[package]] 81 | name = "frozenlist" 82 | version = "1.3.3" 83 | description = "A list-like structure which implements collections.abc.MutableSequence" 84 | category = "main" 85 | optional = false 86 | python-versions = ">=3.7" 87 | 88 | [[package]] 89 | name = "idna" 90 | version = "3.4" 91 | description = "Internationalized Domain Names in Applications (IDNA)" 92 | category = "main" 93 | optional = false 94 | python-versions = ">=3.5" 95 | 96 | [[package]] 97 | name = "multidict" 98 | version = "6.0.4" 99 | description = "multidict implementation" 100 | category = "main" 101 | optional = false 102 | python-versions = ">=3.7" 103 | 104 | [[package]] 105 | name = "openai" 106 | version = "0.27.2" 107 | description = "Python client library for the OpenAI API" 108 | category = "main" 109 | optional = false 110 | python-versions = ">=3.7.1" 111 | 112 | [package.dependencies] 113 | aiohttp = "*" 114 | requests = ">=2.20" 115 | tqdm = "*" 116 | 117 | [package.extras] 118 | datalib = ["numpy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 119 | dev = ["black (>=21.6b0,<22.0.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] 120 | embeddings = ["scikit-learn (>=1.0.2)", "tenacity (>=8.0.1)", "matplotlib", "plotly", "numpy", "scipy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 121 | wandb = ["wandb", "numpy", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "openpyxl (>=3.0.7)"] 122 | 123 | [[package]] 124 | name = "python-dotenv" 125 | version = "1.0.0" 126 | description = "Read key-value pairs from a .env file and set them as environment variables" 127 | category = "main" 128 | optional = false 129 | python-versions = ">=3.8" 130 | 131 | [package.extras] 132 | cli = ["click (>=5.0)"] 133 | 134 | [[package]] 135 | name = "requests" 136 | version = "2.28.2" 137 | description = "Python HTTP for Humans." 138 | category = "main" 139 | optional = false 140 | python-versions = ">=3.7, <4" 141 | 142 | [package.dependencies] 143 | certifi = ">=2017.4.17" 144 | charset-normalizer = ">=2,<4" 145 | idna = ">=2.5,<4" 146 | urllib3 = ">=1.21.1,<1.27" 147 | 148 | [package.extras] 149 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 150 | use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] 151 | 152 | [[package]] 153 | name = "slack-bolt" 154 | version = "1.16.4" 155 | description = "The Bolt Framework for Python" 156 | category = "main" 157 | optional = false 158 | python-versions = ">=3.6" 159 | 160 | [package.dependencies] 161 | slack-sdk = ">=3.20.2,<4" 162 | 163 | [package.extras] 164 | adapter = ["boto3 (<=2)", "bottle (>=0.12,<1)", "chalice (<=1.27.3)", "CherryPy (>=18,<19)", "Django (>=3,<5)", "falcon (>=2,<4)", "fastapi (>=0.70.0,<1)", "Flask (>=1,<3)", "Werkzeug (>=2,<3)", "pyramid (>=1,<3)", "sanic (>=22,<23)", "starlette (>=0.14,<1)", "tornado (>=6,<7)", "uvicorn (<1)", "gunicorn (>=20,<21)", "websocket-client (>=1.2.3,<2)"] 165 | adapter_testing = ["moto (>=3,<4)", "docker (>=5,<6)", "boddle (>=0.2,<0.3)", "Flask (>=1,<2)", "Werkzeug (>=1,<2)", "sanic-testing (>=0.7)", "requests (>=2,<3)"] 166 | async = ["aiohttp (>=3,<4)", "websockets (>=10,<11)"] 167 | testing = ["pytest (>=6.2.5,<7)", "pytest-cov (>=3,<4)", "Flask-Sockets (>=0.2,<1)", "Werkzeug (>=1,<2)", "itsdangerous (==2.0.1)", "Jinja2 (==3.0.3)", "black (==22.8.0)", "click (<=8.0.4)", "pytest-asyncio (>=0.18.2,<1)", "aiohttp (>=3,<4)"] 168 | testing_without_asyncio = ["pytest (>=6.2.5,<7)", "pytest-cov (>=3,<4)", "Flask-Sockets (>=0.2,<1)", "Werkzeug (>=1,<2)", "itsdangerous (==2.0.1)", "Jinja2 (==3.0.3)", "black (==22.8.0)", "click (<=8.0.4)"] 169 | 170 | [[package]] 171 | name = "slack-sdk" 172 | version = "3.20.2" 173 | description = "The Slack API Platform SDK for Python" 174 | category = "main" 175 | optional = false 176 | python-versions = ">=3.6.0" 177 | 178 | [package.extras] 179 | optional = ["aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "SQLAlchemy (>=1.4,<3)", "websockets (>=10,<11)", "websocket-client (>=1,<2)"] 180 | testing = ["pytest (>=6.2.5,<7)", "pytest-asyncio (<1)", "Flask-Sockets (>=0.2,<1)", "Flask (>=1,<2)", "Werkzeug (<2)", "itsdangerous (==1.1.0)", "Jinja2 (==3.0.3)", "pytest-cov (>=2,<3)", "codecov (>=2,<3)", "flake8 (>=5,<6)", "black (==22.8.0)", "click (==8.0.4)", "psutil (>=5,<6)", "databases (>=0.5)", "boto3 (<=2)", "moto (>=3,<4)"] 181 | 182 | [[package]] 183 | name = "tqdm" 184 | version = "4.65.0" 185 | description = "Fast, Extensible Progress Meter" 186 | category = "main" 187 | optional = false 188 | python-versions = ">=3.7" 189 | 190 | [package.dependencies] 191 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 192 | 193 | [package.extras] 194 | dev = ["py-make (>=0.1.0)", "twine", "wheel"] 195 | notebook = ["ipywidgets (>=6)"] 196 | slack = ["slack-sdk"] 197 | telegram = ["requests"] 198 | 199 | [[package]] 200 | name = "urllib3" 201 | version = "1.26.15" 202 | description = "HTTP library with thread-safe connection pooling, file post, and more." 203 | category = "main" 204 | optional = false 205 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 206 | 207 | [package.extras] 208 | brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] 209 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] 210 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 211 | 212 | [[package]] 213 | name = "yarl" 214 | version = "1.8.2" 215 | description = "Yet another URL library" 216 | category = "main" 217 | optional = false 218 | python-versions = ">=3.7" 219 | 220 | [package.dependencies] 221 | idna = ">=2.0" 222 | multidict = ">=4.0" 223 | 224 | [metadata] 225 | lock-version = "1.1" 226 | python-versions = "^3.9" 227 | content-hash = "45f5fc434ff44d22827f234617eb420dc7df38d0dcf9aeb01d141d090c2b8a5a" 228 | 229 | [metadata.files] 230 | aiohttp = [] 231 | aiosignal = [] 232 | async-timeout = [] 233 | attrs = [] 234 | certifi = [] 235 | charset-normalizer = [] 236 | colorama = [] 237 | frozenlist = [] 238 | idna = [] 239 | multidict = [] 240 | openai = [] 241 | python-dotenv = [] 242 | requests = [] 243 | slack-bolt = [] 244 | slack-sdk = [] 245 | tqdm = [] 246 | urllib3 = [] 247 | yarl = [] 248 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "slack-gpt" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Aaron Ng "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.9" 9 | slack-bolt = "^1.16.4" 10 | slack-sdk = "^3.20.2" 11 | openai = "^0.27.2" 12 | python-dotenv = "^1.0.0" 13 | 14 | [tool.poetry.dev-dependencies] 15 | 16 | [build-system] 17 | requires = ["poetry-core>=1.0.0"] 18 | build-backend = "poetry.core.masonry.api" 19 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Slack ChatGPT Bot 2 | 3 | _This document was written by ChatGPT and directed by Aaron Ng ([@localghost](https://twitter.com/localghost))._ 4 | 5 | ## Introduction 6 | 7 | This script creates a Slack bot that uses ChatGPT to respond to direct messages and mentions in a Slack workspace. It functions as a general question-answering bot for your company. 8 | 9 | ## Environment Variables 10 | 11 | ### Required: 12 | 13 | 1. `OPENAI_API_KEY`: Your OpenAI API key, which starts with "sk-". 14 | 2. `SLACK_APP_TOKEN`: Your Slack App Token, which starts with "xapp-". 15 | 3. `SLACK_BOT_TOKEN`: Your Slack Bot Token, which starts with "xoxb-". 16 | 17 | ### Optional: 18 | 19 | 1. `MODEL`: The OpenAI model to use. Can be "gpt-3.5-turbo" or "gpt-4". Default is "gpt-3.5-turbo". 20 | 2. `PROMPT`: A custom prompt for the bot. Default is a predefined prompt for a friendly company assistant. 21 | 22 | ## Setup 23 | 24 | 1. Go to [https://api.slack.com/apps?new_app=1](https://api.slack.com/apps?new_app=1). 25 | 2. Click "Create New App". 26 | 3. Click "Basic", then name your Slack bot and select a workspace. 27 | 28 | ### Configuration 29 | 30 | 1. In "Settings" → "Socket Mode", enable both Socket Mode and Event Subscriptions. 31 | 2. In "Settings" → "Basic Information", install your app to the workspace by following the instructions. 32 | 3. In "Settings" → "Basic Information", scroll to "App-Level Tokens" and create one with the permission `connections:write`. Set the resulting token that starts with `xapp-` as your `SLACK_APP_TOKEN`. 33 | 4. In "Features" → "OAuth and Permissions", copy the "Bot User OAuth Token" and set it as the `SLACK_BOT_TOKEN` in your environment. 34 | 5. In "Features" → "OAuth and Permissions" → "Scopes", add the following permissions: `app_mentions:read`, `channels:history`, `channels:read`, `chat:write`, `chat:write.public`, `groups:history`, `groups:read`, `im:history`, `im:read`, `mpim:history`, `mpim:read`, `users:read`. 35 | 6. In "Features" → "Event Subscriptions" → "Subscribe to Bot Events", add the following bot user events: `app_mentions:read`, `message.im`. 36 | 7. In "Features" → "App Home", turn on the "Messages Tab" switch, and enable the `Allow users to send Slash commands and messages from the messages tab` feature. 37 | 38 | Now your Slack bot should be ready to use! 39 | 40 | ## Deployment 41 | 42 | ### Cloud Deployment: 43 | 44 | 1. If deploying to a cloud service, check out and reconfigure `setup.sh` and `start.sh`. 45 | 46 | ### Local Deployment: 47 | 48 | 1. If running locally, install dependencies with `poetry`. 49 | 2. Comment out these two lines in the script: 50 | 51 | ``` 52 | # from dotenv import load_dotenv 53 | # load_dotenv() 54 | ``` 55 | 56 | Start the bot and enjoy using it in your Slack workspace. 57 | -------------------------------------------------------------------------------- /setup-poetry.sh: -------------------------------------------------------------------------------- 1 | # Sets up using `poetry` on Render 2 | # `source`d in all Render scripts 3 | 4 | set -euxo pipefail 5 | 6 | # Use pipx to install Poetry to avoid Render's outdated Poetry install 7 | 8 | poetry() { 9 | # Uses `pipx run` instead of `pipx install` to avoid ~/.local/bin 10 | # - Increases stability as less directories are involved that could be affected 11 | # by Render's environment 12 | # - Avoids having to add `~/.local/bin` to PATH which could have side effects if 13 | # Render has/adds binaries to ~/.local/bin 14 | # - Ensures we're always using our own Poetry install 15 | # Assumes Render allows writing to `~/.local/pipx/` 16 | pipx run poetry "$@" 17 | } 18 | 19 | install-poetry() { 20 | pip install -U pip 21 | # Assumes `pip install` location is in PATH 22 | pip install -U pipx 23 | # Install and smoke test Poetry 24 | # This keeps `poetry`'s perforamnce consistent and forces the slow initial run 25 | # to be during setup 26 | poetry --version 27 | } 28 | install-poetry 29 | -------------------------------------------------------------------------------- /setup.sh: -------------------------------------------------------------------------------- 1 | set -euxo pipefail 2 | 3 | source setup-poetry.sh 4 | 5 | poetry install --only main -------------------------------------------------------------------------------- /slackgpt.py: -------------------------------------------------------------------------------- 1 | # ----------------------------------------------------------------------------- 2 | # Directed by Aaron Ng (@localghost) 3 | # Co-authored by ChatGPT powered by GPT-4 4 | # ----------------------------------------------------------------------------- 5 | 6 | import openai 7 | import os 8 | import time 9 | from slack_bolt import App 10 | from slack_bolt.adapter.socket_mode import SocketModeHandler 11 | from slack_sdk import WebClient 12 | 13 | # Local dev only. 14 | # from dotenv import load_dotenv 15 | # load_dotenv() 16 | 17 | # Configuration 18 | DEFAULT_PROMPT = ( 19 | "You are a friendly assistant for a company that can answer general questions " 20 | "about business, marketing, and programming. Your goal is to help the people in " 21 | "the company with any questions they might have. If you aren't sure about " 22 | "something or something seems inappropriate, you should say that you don't know." 23 | ) 24 | # The OpenAI model to use. Can be gpt-3.5-turbo or gpt-4. 25 | MODEL = os.environ.get("MODEL", "gpt-3.5-turbo") 26 | # The max length of a message to OpenAI. 27 | MAX_TOKENS = 8000 if MODEL == "gpt-4" else 4096 28 | # The max length of a response from OpenAI. 29 | MAX_RESPONSE_TOKENS = 1000 30 | # Starts with "sk-", used for connecting to OpenAI. 31 | OPENAI_API_KEY = os.environ["OPENAI_API_KEY"] 32 | # Starts with "xapp-", used for connecting to Slack. 33 | SLACK_APP_TOKEN = os.environ["SLACK_APP_TOKEN"] 34 | # Starts with "xoxb-", used for connecting to Slack. 35 | SLACK_BOT_TOKEN = os.environ["SLACK_BOT_TOKEN"] 36 | # Tokens are ~4 characters but this script doens't account for that yet. 37 | TOKEN_MULTIPLIER = 4 38 | 39 | # Initialize the Slack Bolt App and Slack Web Client 40 | app = App() 41 | slack_client = WebClient(token=SLACK_BOT_TOKEN) 42 | 43 | # Set up the default prompt and OpenAI API 44 | prompt = os.environ.get("PROMPT", DEFAULT_PROMPT) 45 | openai.api_key = OPENAI_API_KEY 46 | 47 | 48 | def generate_completion(prompt, messages): 49 | """Generate a completion using OpenAI API.""" 50 | response = openai.ChatCompletion.create( 51 | model=MODEL, 52 | messages=[{"role": "system", "content": prompt}] + messages, 53 | max_tokens=MAX_RESPONSE_TOKENS, 54 | n=1, 55 | stop=None, 56 | temperature=0.7, 57 | ) 58 | 59 | completion = response.choices[0].message["content"].strip() 60 | return completion 61 | 62 | 63 | def get_message_history(channel_id, user_id, event_ts, limit, thread=False): 64 | """Fetch conversation or thread history and build a list of messages.""" 65 | history = [] 66 | 67 | # Fetch the message history 68 | if thread: 69 | result = slack_client.conversations_replies( 70 | channel=channel_id, ts=event_ts, limit=limit, latest=int(time.time()) 71 | ) 72 | else: 73 | result = slack_client.conversations_history( 74 | channel=channel_id, limit=limit 75 | ) 76 | 77 | token_count = 0 78 | for message in result["messages"]: 79 | if message.get("user") == user_id: 80 | role = "user" 81 | elif message.get("subtype") == "bot_message" or message.get("bot_id"): 82 | role = "assistant" 83 | else: 84 | continue 85 | 86 | token_count += len(message["text"]) 87 | if token_count > (MAX_TOKENS - MAX_RESPONSE_TOKENS): 88 | break 89 | else: 90 | history.append({"role": role, "content": message["text"]}) 91 | 92 | # DMs are in reverse order while threads are not. 93 | if not thread: 94 | history.reverse() 95 | 96 | return history 97 | 98 | 99 | def handle_message(event, thread=False): 100 | """Handle a direct message or mention.""" 101 | channel_id = event["channel"] 102 | user_id = event["user"] 103 | event_ts = event["ts"] 104 | 105 | # Set up the payload for the "Typing a response..." message 106 | payload = {"channel":channel_id, "text": "Typing a response..."} 107 | 108 | if thread: 109 | # Use the thread_ts as the event_ts when in a thread 110 | event_ts = event.get("thread_ts", event["ts"]) 111 | payload["thread_ts"] = event_ts 112 | 113 | # Get message history 114 | history = get_message_history( 115 | channel_id, user_id, event_ts, limit=25, thread=thread 116 | ) 117 | 118 | # Send "Typing a response..." message 119 | typing_message = slack_client.chat_postMessage( 120 | **payload 121 | ) 122 | 123 | # Generate the completion 124 | try: 125 | completion_message = generate_completion(prompt, history) 126 | except Exception as e: 127 | completion_message = ( 128 | "The call to OpenAI or another external service failed. Please try again later." 129 | ) 130 | 131 | # Replace "Typing a response..." with the actual response 132 | slack_client.chat_update( 133 | channel=channel_id, ts=typing_message["ts"], text=completion_message 134 | 135 | ) 136 | 137 | @app.event("app_mention") 138 | def mention_handler(body, say): 139 | """Handle app mention events.""" 140 | event = body["event"] 141 | handle_message(event, thread=True) 142 | 143 | @app.event("message") 144 | def direct_message_handler(body, say): 145 | """Handle direct message events.""" 146 | event = body["event"] 147 | if event.get("subtype") == "bot_message" or event.get("bot_id"): 148 | return 149 | handle_message(event) 150 | 151 | if __name__ == "__main__": 152 | handler = SocketModeHandler(app, SLACK_APP_TOKEN) 153 | handler.start() 154 | -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euxo pipefail 4 | 5 | source setup-poetry.sh 6 | 7 | poetry run python slackgpt.py 8 | --------------------------------------------------------------------------------