├── .dockerignore
├── .env
├── .flake8
├── .github
├── dependabot.yml
└── workflows
│ ├── automerge.yml
│ └── ci.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── app
├── __init__.py
├── main.py
├── services.py
├── views.py
└── workers.py
├── art
├── api_descr.png
├── api_docs.png
├── api_request.png
├── api_request_2.png
├── api_response.png
├── api_response_2.png
├── app_logs.png
├── rq_monitor.png
├── rq_monitor_2.png
├── rq_monitor_3.png
├── topology.png
├── webhook_concept.png
├── webhook_site.png
└── worker_logs.png
├── config.py
├── docker-compose-ci.yml
├── docker-compose.yml
├── examples
├── send_webhook.py
└── send_webhook.sh
├── gh-md-toc
├── pyproject.toml
├── requirements-dev.in
├── requirements-dev.txt
├── requirements.in
├── requirements.txt
└── tests
├── __init__.py
├── test_integration.py
├── test_logging.py
├── test_main.py
├── test_services.py
└── test_views.py
/.dockerignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/.dockerignore
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | # Sample env.
2 |
3 | # Webhook post timeout in seconds.
4 | HTTP_TIMEOUT="30"
5 |
6 | # Redis.
7 | REDIS_URL="redis://redis:6380/1"
8 |
9 | # API token, SHA-256 key.
10 | API_TOKEN='$5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4'
11 |
12 | # Retry parameters.
13 | MAX_RETRIES="3"
14 | INTERVAL="5"
15 |
16 | # Message queue configs.
17 | QUEUE_NAME="webhook_queue"
18 | WORKER_NAME_PREFIX="webhook_queue_consumer"
19 |
20 | # Log.
21 | LOG_LEVEL="INFO"
22 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | extend-exclude =
3 | .git,
4 | __pycache__,
5 | docs/source/conf.py,
6 | old,
7 | build,
8 | dist,
9 | .venv,
10 | venv
11 |
12 | extend-ignore = E203, E266, E501, W605, W503
13 |
14 | # Black's default line length.
15 | max-line-length = 88
16 |
17 | max-complexity = 18
18 |
19 | # Specify the list of error codes you wish Flake8 to report.
20 | select = B,C,E,F,W,T4,B9
21 |
22 | # Parallelism
23 | jobs = 4
24 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "pip" # See documentation for possible values
4 | directory: "/" # Location of package manifests
5 | schedule:
6 | interval: "monthly"
7 |
8 | # Maintain dependencies for GitHub Actions.
9 | - package-ecosystem: "github-actions"
10 | directory: "/"
11 | schedule:
12 | interval: "monthly"
13 |
--------------------------------------------------------------------------------
/.github/workflows/automerge.yml:
--------------------------------------------------------------------------------
1 | # .github/workflows/automerge.yml
2 |
3 | name: Dependabot auto-merge
4 |
5 | on: pull_request
6 |
7 | permissions:
8 | contents: write
9 |
10 | jobs:
11 | dependabot:
12 | runs-on: ubuntu-latest
13 | if: ${{ github.actor == 'dependabot[bot]' }}
14 | steps:
15 | - name: Enable auto-merge for Dependabot PRs
16 | run: gh pr merge --auto --merge "$PR_URL"
17 | env:
18 | PR_URL: ${{github.event.pull_request.html_url}}
19 | # GitHub provides this variable in the CI env. You don't
20 | # need to add anything to the secrets vault.
21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on:
3 | schedule:
4 | - cron: "37 0 * * 0" # At 00:37 on every Sunday.
5 |
6 | push:
7 | branches:
8 | - main
9 | pull_request:
10 | branches:
11 | - main
12 |
13 | # If you trigger a new workflow while the previous one is running,
14 | # this will cancel the previous one.
15 | concurrency:
16 | group: ${{ github.head_ref || github.run_id }}
17 | cancel-in-progress: true
18 |
19 |
20 | jobs:
21 | build:
22 | runs-on: ${{ matrix.os }}
23 | strategy:
24 | # Use matrix strategy to run the tests on multiple Py versions on multiple OSs.
25 | matrix:
26 | os: [ubuntu-latest, macos-latest]
27 | python-version: ["3.9", "3.10", "3.11"]
28 |
29 | steps:
30 | - uses: actions/checkout@v4
31 | - uses: actions/setup-python@v5
32 | with:
33 | python-version: ${{ matrix.python-version }}
34 | cache: 'pip'
35 | cache-dependency-path: |
36 | **/requirements*.txt
37 |
38 | - name: Install the Dependencies
39 | run: |
40 | echo "Installing the dependencies..."
41 | python -m pip install -r requirements.txt
42 | python -m pip install -r requirements-dev.txt
43 |
44 | - name: Check Linter
45 | run: |
46 | echo "Checking linter formatting..."
47 | make lint-check
48 |
49 | - name: Run Tests
50 | run: |
51 | echo "Running the tests..."
52 | export PYTHONWARNINGS="ignore" && pytest -v -s -k 'not integration'
53 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .venv
109 | env/
110 | venv/
111 | ENV/
112 | env.bak/
113 | venv.bak/
114 |
115 | # Spyder project settings
116 | .spyderproject
117 | .spyproject
118 |
119 | # Rope project settings
120 | .ropeproject
121 |
122 | # mkdocs documentation
123 | /site
124 |
125 | # mypy
126 | .mypy_cache/
127 | .dmypy.json
128 | dmypy.json
129 |
130 | # Pyre type checker
131 | .pyre/
132 |
133 | # pytype static type analyzer
134 | .pytype/
135 |
136 | # Cython debug symbols
137 | cython_debug/
138 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11-slim-bullseye
2 | ENV PYTHONUNBUFFERED=1
3 |
4 |
5 | WORKDIR /code
6 | COPY requirements.txt /code/
7 | RUN pip install -r requirements.txt
8 | COPY . /code/
9 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Redowan Delowar
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | path := .
2 |
3 | define Comment
4 | - Run `make help` to see all the available options.
5 | - Run `make lint` to run the linter.
6 | - Run `make lint-check` to check linter conformity.
7 | - Run `dep-lock` to lock the deps in 'requirements.txt' and 'requirements-dev.txt'.
8 | - Run `dep-sync` to sync current environment up to date with the locked deps.
9 | endef
10 |
11 |
12 | .PHONY: lint
13 | lint: black ruff mypy ## Apply all the linters.
14 |
15 |
16 | .PHONY: lint-check
17 | lint-check: ## Check whether the codebase satisfies the linter rules.
18 | @echo
19 | @echo "Checking linter rules..."
20 | @echo "========================"
21 | @echo
22 | @black --check $(path)
23 | @ruff $(path)
24 | @echo 'y' | mypy $(path) --install-types
25 |
26 |
27 | .PHONY: black
28 | black: ## Apply black.
29 | @echo
30 | @echo "Applying black..."
31 | @echo "================="
32 | @echo
33 | @black --fast $(path)
34 | @echo
35 |
36 |
37 | .PHONY: ruff
38 | ruff: ## Apply ruff.
39 | @echo "Applying ruff..."
40 | @echo "================"
41 | @echo
42 | @ruff --fix $(path)
43 |
44 |
45 | .PHONY: mypy
46 | mypy: ## Apply mypy.
47 | @echo
48 | @echo "Applying mypy..."
49 | @echo "================="
50 | @echo
51 | @mypy $(path)
52 |
53 |
54 | .PHONY: help
55 | help: ## Show this help message.
56 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
57 |
58 |
59 | .PHONY: dep-lock
60 | dep-lock: ## Freeze deps in 'requirements.txt' file.
61 | @pip-compile requirements.in -o requirements.txt --no-emit-options
62 | @pip-compile requirements-dev.in -o requirements-dev.txt --no-emit-options
63 |
64 |
65 | .PHONY: dep-sync
66 | dep-sync: ## Sync venv installation with 'requirements.txt' file.
67 | @pip-sync
68 |
69 |
70 | .PHONY: test
71 | test: ## Run the tests against the current version of Python.
72 | export PYTHONWARNINGS="ignore" && pytest -v -s -k 'not integration'
73 |
74 |
75 | .PHONY: create-topology
76 | create-topology: ## Creates topology diagram from docker compose file.
77 | @docker run \
78 | --rm -it \
79 | --name dcv \
80 | -v /home/rednafi/workspace/personal/hook-slinger:/input pmsipilot/docker-compose-viz \
81 | render -m image \
82 | --force docker-compose.yml \
83 | --output-file=topology.png \
84 | --no-volumes \
85 | --no-networks
86 |
87 |
88 | .PHONY: start-servers
89 | start-servers: ## Start the app, worker and monitor.
90 | docker compose up --build -d
91 |
92 |
93 | .PHONY: stop-servers
94 | stop-servers: ## Stop the app, worker and monitor.
95 | docker system prune
96 | docker compose down -t 1
97 |
98 |
99 | .PHONY: start-tests
100 | start-tests: ## Start the servers and execute the tests.
101 | docker compose -f docker-compose-ci.yml up --build -d
102 |
103 |
104 | .PHONY: app-logs
105 | app-logs: ## Explore the application server container logs.
106 | docker logs wh_app -f
107 |
108 |
109 | .PHONY: worker-logs
110 | worker-logs: ## Explore the worker instance container logs.
111 | docker logs hook-slinger_worker_1 -f
112 |
113 |
114 | .PHONY: worker-scale
115 | worker-scale:
116 | docker compose up -d --build --scale worker=$(n)
117 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ![logo][logo]
5 |
6 | >> A generic service to send, retry, and manage webhooks. <<
7 |
8 |
9 |
10 | ## Description
11 |
12 | ### What?
13 |
14 | Hook Slinger acts as a simple service that lets you send, retry, and manage
15 | event-triggered POST requests, aka webhooks. It provides a fully self-contained docker
16 | image that is easy to orchestrate, manage, and scale.
17 |
18 | ### Why?
19 |
20 | Technically, a webhook is a mere POST request—triggered by a system—when a particular
21 | event occurs. The following diagram shows how a simple POST request takes the webhook
22 | nomenclature when invoked by an event trigger.
23 |
24 | ![Webhook Concept][webhook-concept]
25 |
26 | However, there are a few factors that make it tricky to manage the life cycle of a
27 | webhook, such as:
28 |
29 | * Dealing with server failures on both the sending and the receiving end.
30 | * Managing HTTP timeouts.
31 | * Retrying the requests gracefully without overloading the recipients.
32 | * Avoiding retry loop on the sending side.
33 | * Monitoring and providing scope for manual interventions.
34 | * Scaling them quickly; either vertically or horizontally.
35 | * Decoupling webhook management logic from your primary application logic.
36 |
37 | Properly dealing with these concerns can be cumbersome; especially when sending webhooks
38 | is just another small part of your application and you just want it to work without you
39 | having to deal with all the hairy details every time. Hook Slinger aims to alleviate
40 | this pain point.
41 |
42 | ### How?
43 |
44 | Hook Slinger exposes a single endpoint where you can post your webhook payload,
45 | destination URL, auth details, and it'll make the POST request for you asynchronously in
46 | the background. Under the hood, the service uses:
47 |
48 | * [FastAPI][fastapi] to provide a [Uvicorn][uvicorn] driven [ASGI][asgi] server.
49 |
50 | * [Redis][redis] and [RQ][rq] for implementing message queues that provide the
51 | asynchrony and robust failure handling mechanism.
52 |
53 | * [Rqmonitor][rqmonitor] to provide a dashboard for monitoring the status of the
54 | webhooks and manually retrying the failed jobs.
55 |
56 | * [Rich][rich] to make the container logs colorful and more human friendly.
57 |
58 | The simplified app architecture looks something this:
59 |
60 | ![Topology][topology]
61 |
62 | In the above image, the webhook payload is first sent to the `app` and the `app`
63 | leverages the `worker` instance to make the POST request. Redis DB is used for fast
64 | bookkeeping and async message queue implementation. The `monitor` instance provides a
65 | GUI to monitor and manage the webhooks. Multiple `worker` instances can be spawned to
66 | achieve linear horizontal scale-up.
67 |
68 | ## Installation
69 |
70 | * Make sure you've got the latest version of [Docker][docker] and
71 | [Docker Compose V2][docker-compose] installed in your system.
72 |
73 | * Clone the repository and head over to the root directory.
74 |
75 | * To start the orchestra, run:
76 |
77 | ```
78 | make start-servers
79 | ```
80 |
81 | This will:
82 |
83 | * Start an `app` server that can be accessed from port `5000`.
84 |
85 | * Start an Alpine-based Redis server that exposes port `6380`.
86 |
87 | * Start a single `worker` that will carry out the actual tasks.
88 |
89 | * Start a `rqmonitor` instance that opens port `8899`.
90 |
91 | * To shut down everything, run:
92 |
93 | ```
94 | make stop-servers
95 | ```
96 |
97 | *TODO: Generalize it more before making it installable with a `docker pull` command.*
98 |
99 | ## Usage
100 |
101 | ### Exploring the interactive API docs
102 |
103 | To try out the entire workflow interactively, head over to the following URL on your
104 | browser:
105 |
106 | ```
107 | http://localhost:5000/docs
108 | ```
109 |
110 | You should see a panel like this:
111 |
112 | ![API Docs][api-docs]
113 |
114 | This app implements a rudimentary token-based authentication system where you're
115 | expected to send an API token by adding `Authorization: Token ` field to
116 | your request header. To do that here, click the `POST /hook_slinger/` ribbon and that
117 | will reveal the API description like this:
118 |
119 | ![API Description][api-description]
120 |
121 | Copy the default token value from the description corpus, then click the green button on
122 | the top right that says **Authorize**, and paste the value in the prompt box. Click
123 | the **Authorize** button again and that'll conclude the login step. In your production
124 | application, you should implement a robust authentication system or at least change this
125 | default token.
126 |
127 | To send a webhook, you'll need a URL where you'll be able to make the POST request. For
128 | this demonstration, let's pick this [webhook site][webhook-site-url] service to
129 | monitor the received webhooks. It gives you a unique URL against which you'll be able to
130 | make the post requests and monitor them in a dashboard like this:
131 |
132 |
133 | ![Webhook Site][webhook-site]
134 |
135 | On the API docs page, click the **Try it out** button near the **request body** section:
136 |
137 | ![API Request][api-request]
138 |
139 | This should reveal a panel like the following one where you can make your request:
140 |
141 | ![API Request][api-request-2]
142 |
143 | Notice that the section is prefilled with an example request payload. You can use this
144 | exact payload to make a request. Go ahead and click the execute button. If you scroll
145 | down a little, you'll notice the HTTP response:
146 |
147 |
148 | ![API Response][api-response]
149 |
150 | Now, if you head over to the [webhook site][webhook-site-url-detail] URL, you should be
151 | able to see your API payload:
152 |
153 |
154 | ![API Response][api-response-2]
155 |
156 | To monitor the webhook tasks, head over to the following URL:
157 |
158 | ```
159 | http://localhost:8899/
160 | ```
161 |
162 | You should be presented with a GUI like this:
163 |
164 | ![RQ Monitor][rq-monitor]
165 |
166 | If you click **Workers** on the left panel, you'll be presented with a panel where you
167 | can monitor all the workers:
168 |
169 | ![RQ Monitor][rq-monitor-2]
170 |
171 |
172 | The **Jobs** panel lists all the tasks, and from there you'll be able to requeue a
173 | failed job. By default, Hook Slinger retries a failed job 3 times with 5 seconds linear
174 | backoff. However, this can be configured using environment variables in the `.env` file.
175 |
176 | ![RQ Monitor][rq-monitor-3]
177 |
178 |
179 | ### Sending a webhook via cURL
180 |
181 | Run the following command on your terminal; this assumes that you haven't changed the
182 | auth token (you should):
183 |
184 | ```sh
185 | curl -X 'POST' \
186 | 'http://localhost:5000/hook_slinger/' \
187 | -H 'accept: application/json' \
188 | -H 'Authorization: Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4' \
189 | -H 'Content-Type: application/json' \
190 | -d '{
191 | "to_url": "https://webhook.site/b30da7ce-c3cc-47e2-b2ae-68747b3d7789",
192 | "to_auth": "",
193 | "tag": "Dhaka",
194 | "group": "Bangladesh",
195 | "payload": {
196 | "greetings": "Hello, world!"
197 | }
198 | }' | python -m json.tool
199 | ```
200 |
201 | You should expect the following output:
202 |
203 | ```json
204 | {
205 | "status": "queued",
206 | "ok": true,
207 | "message": "Webhook registration successful.",
208 | "job_id": "Bangladesh_Dhaka_a07ca786-0b7a-4029-bac0-9a7c6eb68a98",
209 | "queued_at": "2021-11-06T16:54:54.728999"
210 | }
211 | ```
212 |
213 | ### Sending a webhook via Python
214 |
215 | For this purpose, you can use an HTTP library like [httpx][httpx].
216 |
217 | Make the request with the following script:
218 |
219 | ```python
220 | import asyncio
221 | from http import HTTPStatus
222 | from pprint import pprint
223 |
224 | import httpx
225 |
226 |
227 | async def send_webhook() -> None:
228 | wh_payload = {
229 | "to_url": "https://webhook.site/b30da7ce-c3cc-47e2-b2ae-68747b3d7789",
230 | "to_auth": "",
231 | "tag": "Dhaka",
232 | "group": "Bangladesh",
233 | "payload": {"greetings": "Hello, world!"},
234 | }
235 |
236 | async with httpx.AsyncClient(http2=True) as session:
237 | headers = {
238 | "Content-Type": "application/json",
239 | "Authorization": (
240 | "Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4"
241 | ),
242 | }
243 |
244 | response = await session.post(
245 | "http://localhost:5000/hook_slinger",
246 | headers=headers,
247 | json=wh_payload,
248 | follow_redirects=True,
249 | )
250 |
251 | # Hook Slinger returns http code 202, accepted, for a successful request.
252 | assert response.status_code == HTTPStatus.ACCEPTED
253 | result = response.json()
254 | pprint(result)
255 |
256 |
257 | if __name__ == "__main__":
258 | asyncio.run(send_webhook())
259 | ```
260 |
261 | This should return a similar response as before:
262 |
263 | ```
264 | {
265 | 'job_id': 'Bangladesh_Dhaka_139fc35a-d2a5-4d01-a6af-e980c52f55bc',
266 | 'message': 'Webhook registration successful.',
267 | 'ok': True,
268 | 'queued_at': '2021-07-23T20:15:04.389690',
269 | 'status': 'queued'
270 | }
271 | ```
272 |
273 | ### Exploring the container logs
274 |
275 | Hook Slinger overloads the Python root logger to give you a colorized and user-friendly
276 | logging experience. To explore the logging messages of the application server, run:
277 |
278 | ```
279 | make app-logs
280 | ```
281 |
282 | Notice the colorful logs cascading down from the app server:
283 |
284 | ![App Logs][app-logs]
285 |
286 | Now, to explore the worker instance logs, in a separate terminal, run:
287 |
288 | ```
289 | make worker-logs
290 | ```
291 |
292 | You should see something like this:
293 |
294 | ![Worker Logs][worker-logs]
295 |
296 |
297 | ### Scaling up the service
298 |
299 | Hook Slinger offers easy horizontal scale-up, powered by the `docker-compose --scale`
300 | command. In this case, scaling up means, spawning new workers in separate containers.
301 | Let's spawn 3 worker containers this time. To do so, first shut down the orchestra by
302 | running:
303 |
304 | ```
305 | make stop-servers
306 | ```
307 |
308 | Now, run:
309 |
310 | ```
311 | make worker-scale n=3
312 | ```
313 |
314 | This will start the **App server**, **Redis DB**, **RQmonitor**, and 3 **Worker**
315 | instances. Spawning multiple worker instances are a great way to achieve job concurrency
316 | with the least amount of hassle.
317 |
318 | ### Troubleshooting
319 |
320 | On the Rqmonitor dashboard, if you see that your webhooks aren't reaching the
321 | destination, make sure that the destination URL in the webhook payload can accept the
322 | POST requests sent by the workers. Your webhook payload looks like this:
323 |
324 | ```
325 | {
326 | "to_url": "https://webhook.site/f864d28d-9162-4ad5-9205-458e2b561c07",
327 | "to_auth": "",
328 | "tag": "Dhaka",
329 | "group": "Bangladesh",
330 | "payload": {"greetings": "Hello, world!"},
331 | }
332 |
333 | ```
334 |
335 | Here, `to_url` must be able to receive the payloads and return HTTP code 201.
336 |
337 | ## Philosophy & limitations
338 |
339 | Hooks Slinger is designed to be simple, transparent, upgradable, and easily extensible
340 | to cater to your specific needs. It's not built around AMQP compliant message queues
341 | with all the niceties and complexities that come with them—this is intentional.
342 |
343 | Also, if you scrutinize the end-to-end workflow, you'll notice that it requires making
344 | HTTP requests from the sending service to the Hook Slinger. This inevitably adds another
345 | point of failure. However, from the sending service's POV, it's sending the HTTP
346 | requests to a single service, and the target service is responsible for fanning out the
347 | webhooks to the destinations. The developers are expected to have control over both
348 | services, which theoretically should mitigate the failures. The goal is to transfer
349 | some of the code complexity around managing webhooks from the sending service over to
350 | the Hook Slinger. Also, I'm playing around with some of the alternatives to using HTTP
351 | POST requests to send the payloads from the sending end to the Hook Slinger. Suggestions
352 | are always appreciated.
353 |
354 |
355 |
356 | ✨ 🍰 ✨
357 |
358 |
359 | [logo]: https://user-images.githubusercontent.com/30027932/126405827-8b859b4c-89cd-40c8-a7d3-fe6e9fc64770.png
360 | [forthebadge]: https://forthebadge.com
361 | [black-magic-badge]: https://forthebadge.com/images/badges/powered-by-black-magic.svg
362 | [build-with-love-badge]: https://forthebadge.com/images/badges/built-with-love.svg
363 | [made-with-python-badge]: https://forthebadge.com/images/badges/made-with-python.svg
364 | [webhook-concept]: ./art/webhook_concept.png
365 | [fastapi]: https://fastapi.tiangolo.com/
366 | [uvicorn]: https://www.uvicorn.org/
367 | [asgi]: https://asgi.readthedocs.io/en/latest/#
368 | [redis]: https://redis.io/
369 | [rq]: https://python-rq.org/docs/jobs/
370 | [rqmonitor]: https://github.com/pranavgupta1234/rqmonitor
371 | [rich]: https://github.com/willmcgugan/rich
372 | [topology]: ./art/topology.png
373 | [docker]: https://www.docker.com/
374 | [docker-compose]: https://docs.docker.com/compose/cli-command/
375 | [api-docs]: ./art/api_docs.png
376 | [api-description]: ./art/api_descr.png
377 | [webhook-site]: ./art/webhook_site.png
378 | [webhook-site-url]: https://webhook.site/
379 | [webhook-site-url-detail]: https://webhook.site/#!/f864d28d-9162-4ad5-9205-458e2b561c07
380 | [api-request]: ./art/api_request.png
381 | [api-request-2]: ./art/api_request_2.png
382 | [api-response]: ./art/api_response.png
383 | [api-response-2]: ./art/api_response_2.png
384 | [rq-monitor]: ./art/rq_monitor.png
385 | [rq-monitor-2]: ./art/rq_monitor_2.png
386 | [rq-monitor-3]: ./art/rq_monitor_3.png
387 | [app-logs]: ./art/app_logs.png
388 | [worker-logs]: ./art/worker_logs.png
389 | [httpx]: https://www.python-httpx.org
390 |
--------------------------------------------------------------------------------
/app/__init__.py:
--------------------------------------------------------------------------------
1 | """Here, the root logger is overriden to achieve ubiquitous custom log messages."""
2 |
3 | import logging
4 |
5 | from rich.console import Console
6 | from rich.logging import RichHandler
7 | from rich.traceback import install
8 |
9 | import config
10 |
11 | LOG_LEVEL = logging.getLevelName(config.LOG_LEVEL)
12 |
13 |
14 | console = Console(
15 | color_system="standard",
16 | force_terminal=True,
17 | tab_size=4,
18 | width=90,
19 | )
20 |
21 | install(console=console)
22 |
23 | logHandler = RichHandler(
24 | rich_tracebacks=True,
25 | console=console,
26 | tracebacks_width=88,
27 | show_time=False,
28 | )
29 |
30 |
31 | # Intercept everything at the root logger.
32 | logging.root.handlers = [logHandler]
33 | logging.root.setLevel(LOG_LEVEL)
34 |
35 | # Remove every other logger's handlers
36 | # and propagate to root logger.
37 | for name in logging.root.manager.loggerDict.keys():
38 | logging.getLogger(name).handlers = []
39 | logging.getLogger(name).propagate = True
40 |
--------------------------------------------------------------------------------
/app/main.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI
2 | from starlette.middleware.cors import CORSMiddleware
3 |
4 | from . import views
5 |
6 | app = FastAPI()
7 |
8 | # Set all CORS enabled origins
9 | app.add_middleware(
10 | CORSMiddleware,
11 | allow_origins=["*"],
12 | allow_credentials=True,
13 | allow_methods=["*"],
14 | allow_headers=["*"],
15 | )
16 |
17 | app.include_router(views.router)
18 |
--------------------------------------------------------------------------------
/app/services.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import re
4 | import typing
5 | import uuid
6 | from http import HTTPStatus
7 |
8 | import httpx
9 | import redis
10 | from rq import Queue, Retry
11 |
12 | import config
13 |
14 | if typing.TYPE_CHECKING:
15 | from rq.job import Job
16 |
17 | from .views import SlingerRequestPayload
18 |
19 | __all__ = ("send_webhook", "validate_url")
20 |
21 |
22 | class WebhookPostFailedError(Exception):
23 | """Raises this when sending the webhook post request fails due to
24 | some HTTP error."""
25 |
26 |
27 | def validate_url(url: str) -> str:
28 | # This was shamelessly copied from old Django source code.
29 | # https://github.com/django/django/blob/stable/1.3.x/django/core/validators.py#L45
30 | regex = re.compile(
31 | r"^(?:http|ftp)s?://" # http:// or https://
32 | r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain...
33 | r"localhost|" # localhost...
34 | r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
35 | r"(?::\d+)?" # optional port
36 | r"(?:/?|[/?]\S+)$",
37 | re.IGNORECASE,
38 | )
39 |
40 | if re.match(regex, url) is None:
41 | raise ValueError("Value of 'url' is not a valid URL.")
42 |
43 | return url
44 |
45 |
46 | def send_post_request(webhook_payload: SlingerRequestPayload) -> None:
47 | to_url = webhook_payload.to_url
48 | to_auth = webhook_payload.to_auth
49 | payload = webhook_payload.payload
50 |
51 | if to_auth:
52 | headers = {
53 | "Content-Type": "application/json",
54 | "Authorization": to_auth,
55 | }
56 |
57 | else:
58 | headers = {
59 | "Content-Type": "application/json",
60 | }
61 |
62 | with httpx.Client(http2=True) as session:
63 | response = session.post(
64 | to_url,
65 | headers=headers,
66 | json=payload,
67 | timeout=config.HTTP_TIMEOUT,
68 | )
69 |
70 | if not response.status_code == HTTPStatus.OK:
71 | raise WebhookPostFailedError(
72 | f"Sending webhook failed.\n"
73 | f"to_url: {to_url}\n"
74 | f"payload: {payload}\n, code: {response.status_code}"
75 | )
76 |
77 |
78 | redis_conn = redis.Redis.from_url(config.REDIS_URL)
79 | queue = Queue(config.QUEUE_NAME, connection=redis_conn)
80 |
81 |
82 | def send_webhook(*, webhook_payload: SlingerRequestPayload) -> Job:
83 | return queue.enqueue(
84 | send_post_request,
85 | webhook_payload,
86 | retry=Retry(max=config.MAX_RETRIES, interval=config.INTERVAL),
87 | job_id=f"{webhook_payload.group}_{webhook_payload.tag}_{str(uuid.uuid4())}",
88 | )
89 |
--------------------------------------------------------------------------------
/app/views.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from datetime import datetime
4 | from http import HTTPStatus
5 | from typing import Any, Optional
6 |
7 | from fastapi import APIRouter, Depends, Security
8 | from fastapi.security import APIKeyHeader
9 | from pydantic import BaseModel
10 | from rq.job import JobStatus
11 | from starlette.exceptions import HTTPException
12 |
13 | import config
14 |
15 | from .services import send_webhook, validate_url
16 |
17 |
18 | class SlingerRequestPayload(BaseModel):
19 | """Pydantic model to declare and validate webhook payload."""
20 |
21 | to_url: str # Webhook callback url
22 | to_auth: Optional[str] # Webhook callback auth
23 | tag: Optional[str] # Add a type tag
24 | group: Optional[str] # Which group/section/schema the webhook belongs to
25 | payload: dict[str, Any] # The actual payload to be sent to 'to_url'
26 |
27 | class Config:
28 | schema_extra = {
29 | "example": {
30 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
31 | "to_auth": "",
32 | "tag": "Dhaka",
33 | "group": "Bangladesh",
34 | "payload": {
35 | "greetings": "Hello, world!",
36 | },
37 | }
38 | }
39 |
40 |
41 | class SlingerResponsePayload(BaseModel):
42 | """Pydantic model to declare the response json shape of hook slinger API."""
43 |
44 | status: JobStatus
45 | ok: bool
46 | message: str
47 | job_id: Optional[str]
48 | queued_at: str = datetime.utcnow().isoformat()
49 |
50 | class Config:
51 | schema_extra = {
52 | "example": {
53 | "status": "registered",
54 | "ok": True,
55 | "message": "Webhook registration successful.",
56 | "job_id": "Bangladesh_Dhaka_0f8346f4-8b84-4dc1-9df3-a5c09024e45c",
57 | "queued_at": "2021-07-23T19:38:41.061838",
58 | },
59 | }
60 |
61 |
62 | router = APIRouter()
63 |
64 | SECRET_KEY_NAME = "Authorization"
65 | secret_header = APIKeyHeader(
66 | name=SECRET_KEY_NAME,
67 | scheme_name="Secret header",
68 | auto_error=False,
69 | )
70 | SECRET = f"Token {config.API_TOKEN}"
71 |
72 |
73 | async def secret_based_security(header_param: str = Security(secret_header)):
74 | """
75 | Args:
76 | header_param: parsed header field secret_header
77 | Returns:
78 | True if the authentication was successful
79 | Raises:
80 | HTTPException if the authentication failed
81 | """
82 |
83 | if header_param == SECRET:
84 | return True
85 | if not header_param:
86 | error = (
87 | "Did you forget to add 'Authorization' field to the request header? "
88 | "You can find the auth key in the '.env' file as 'API_KEY'. "
89 | "Also, you have to prepend the auth protocol before the token. "
90 | "For example: 'Authorization: Token ' "
91 | )
92 | else:
93 | error = (
94 | "Wrong API auth key. "
95 | "Did you forget to add 'Authorization' field to the request header? "
96 | "You can find the auth key in the '.env' file as 'API_KEY'. Also, you have to prepend the auth protocol before the token. "
97 | "For example: 'Authorization: Token ' "
98 | )
99 |
100 | raise HTTPException(status_code=HTTPStatus.FORBIDDEN, detail=error)
101 |
102 |
103 | @router.post(
104 | "/hook_slinger/",
105 | tags=["hook"],
106 | dependencies=[Depends(secret_based_security)],
107 | status_code=HTTPStatus.ACCEPTED,
108 | response_model=SlingerResponsePayload,
109 | )
110 | async def hook_slinger_view(
111 | webhook_payload: SlingerRequestPayload,
112 | ) -> SlingerResponsePayload:
113 |
114 | """
115 | # Hook Slinger Router API
116 |
117 | ## Description
118 |
119 | Send, retry, and manage webhooks with Redis Queue.
120 |
121 | Click the Authorize lock button and add the following API token from the `.env` file:
122 |
123 | ```
124 | Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4
125 | ```
126 |
127 | Make a `POST` request to the following endpoint:
128 |
129 | ```
130 | http://localhost:5000/hook_slinger/
131 | ```
132 |
133 | The API Payload should have the following schema:
134 |
135 | ```
136 | {
137 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
138 | "to_auth": "",
139 | "tag": "Dhaka",
140 | "group": "Bangladesh",
141 | "payload": {
142 | "greetings": "Hello, world!"
143 | }
144 | }
145 | ```
146 | Here:
147 | * `to_url` is the destination URL where the webhook is intended to be sent.
148 | * `to_auth` is the auth token expected by the webhook destination server,
149 | can be an empty string if the server doesn't require any authentication.
150 | * `tag` is any identifier string, can be empty.
151 | * `group` is another identifier string, can be empty.
152 | * `payload` the payload that is intended to be sent to `to_url`, can be an empty dict.
153 |
154 | """
155 |
156 | try:
157 | validate_url(webhook_payload.to_url)
158 | except ValueError:
159 | raise HTTPException(
160 | status_code=HTTPStatus.BAD_REQUEST,
161 | detail="Parameter 'to_url' is not a valid URL.",
162 | )
163 |
164 | try:
165 | job = send_webhook(webhook_payload=webhook_payload)
166 | return SlingerResponsePayload(
167 | status=job.get_status(),
168 | ok=True,
169 | message="Webhook registration successful.",
170 | job_id=job.get_id(),
171 | )
172 |
173 | except Exception:
174 | raise HTTPException(
175 | status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
176 | detail="Webhook registration failed.",
177 | )
178 |
--------------------------------------------------------------------------------
/app/workers.py:
--------------------------------------------------------------------------------
1 | """Module dedicated to spawn rq workers."""
2 |
3 | from redis import Redis
4 | from rq import Connection, Queue
5 | from rq.worker import Worker
6 |
7 | import config
8 |
9 | listen = [config.QUEUE_NAME]
10 | redis_conn = Redis.from_url(config.REDIS_URL)
11 |
12 |
13 | if __name__ == "__main__":
14 | with Connection(redis_conn):
15 | worker = Worker(map(Queue, listen), name=config.WORKER_NAME)
16 | worker.work(with_scheduler=True)
17 |
--------------------------------------------------------------------------------
/art/api_descr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_descr.png
--------------------------------------------------------------------------------
/art/api_docs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_docs.png
--------------------------------------------------------------------------------
/art/api_request.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_request.png
--------------------------------------------------------------------------------
/art/api_request_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_request_2.png
--------------------------------------------------------------------------------
/art/api_response.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_response.png
--------------------------------------------------------------------------------
/art/api_response_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/api_response_2.png
--------------------------------------------------------------------------------
/art/app_logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/app_logs.png
--------------------------------------------------------------------------------
/art/rq_monitor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/rq_monitor.png
--------------------------------------------------------------------------------
/art/rq_monitor_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/rq_monitor_2.png
--------------------------------------------------------------------------------
/art/rq_monitor_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/rq_monitor_3.png
--------------------------------------------------------------------------------
/art/topology.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/topology.png
--------------------------------------------------------------------------------
/art/webhook_concept.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/webhook_concept.png
--------------------------------------------------------------------------------
/art/webhook_site.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/webhook_site.png
--------------------------------------------------------------------------------
/art/worker_logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/art/worker_logs.png
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import uuid
3 |
4 | from dotenv import load_dotenv
5 |
6 | load_dotenv(".env")
7 |
8 |
9 | # Webhook post timeout in seconds.
10 | HTTP_TIMEOUT: int = int(os.environ.get("HTTP_TIMEOUT", 30))
11 |
12 | # Redis.
13 | REDIS_URL: str = os.environ.get("REDIS_URL", "redis://redis:6380/1")
14 |
15 | # API token, SHA-256 key.
16 | API_TOKEN: str = os.environ.get(
17 | "API_TOKEN", "$5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4"
18 | )
19 |
20 | # Retry parameters.
21 | MAX_RETRIES: int = int(os.environ.get("MAX_RETRIES", 3))
22 | INTERVAL: int = int(os.environ.get("INTERVAL", 5))
23 |
24 | # Message queue configs.
25 | QUEUE_NAME: str = os.environ.get("QUEUE_NAME", "webhook_queue")
26 | WORKER_NAME_PREFIX: str = os.environ.get(
27 | "WORKER_NAME_PREFIX",
28 | "webhook_queue_consumer",
29 | )
30 | WORKER_NAME = f"{WORKER_NAME_PREFIX}_{str(uuid.uuid4())}"
31 |
32 |
33 | # Log.
34 | LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO")
35 |
--------------------------------------------------------------------------------
/docker-compose-ci.yml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 |
3 | services:
4 | redis:
5 | image: redis:alpine
6 | container_name: wh_redis
7 | environment:
8 | - ALLOW_EMPTY_PASSWORD=yes
9 | ports:
10 | - "6380:6380"
11 | command: redis-server --port 6380
12 |
13 | app:
14 | build: .
15 | container_name: wh_app
16 | entrypoint: ["/bin/sh","-c"]
17 | command:
18 | - |
19 | uvicorn app.main:app --port 5000 --host app
20 |
21 | volumes:
22 | - ./app:/code/app
23 | ports:
24 | - "5000:5000"
25 | env_file:
26 | - ".env"
27 | depends_on:
28 | - redis
29 |
30 | worker:
31 | build: .
32 | entrypoint: ["/bin/sh","-c"]
33 | command:
34 | - |
35 | python -m app.workers
36 | volumes:
37 | - ./app:/code/app
38 |
39 | env_file:
40 | - ".env"
41 | depends_on:
42 | - redis
43 | - app
44 |
45 | monitor:
46 | build: .
47 | entrypoint: ["/bin/sh","-c"]
48 | command:
49 | - |
50 | rqmonitor -b monitor -p 8899 -u ${REDIS_URL}
51 |
52 | volumes:
53 | - ./app:/code/app
54 | ports:
55 | - "8899:8899"
56 | env_file:
57 | - ".env"
58 | depends_on:
59 | - redis
60 | - app
61 | - worker
62 |
63 | test:
64 | build: .
65 | entrypoint: ["/bin/sh","-c"]
66 | command:
67 | - |
68 | pip install -r requirements-dev.txt
69 | black . --check
70 | isort . --check
71 | pytest -s -v
72 |
73 | volumes:
74 | - ./:/code/
75 |
76 | env_file:
77 | - ".env"
78 | depends_on:
79 | - redis
80 | - app
81 | - worker
82 | - monitor
83 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 |
3 | services:
4 | redis:
5 | image: redis:alpine
6 | container_name: wh_redis
7 | environment:
8 | - ALLOW_EMPTY_PASSWORD=yes
9 | ports:
10 | - "6380:6380"
11 | command: redis-server --port 6380
12 |
13 | app:
14 | build: .
15 | container_name: wh_app
16 | entrypoint: ["/bin/sh","-c"]
17 | command:
18 | - |
19 | uvicorn app.main:app --port 5000 --host app
20 |
21 | volumes:
22 | - ./app:/code/app
23 | ports:
24 | - "5000:5000"
25 | env_file:
26 | - ".env"
27 | depends_on:
28 | - redis
29 | init: true
30 |
31 | worker:
32 | build: .
33 | entrypoint: ["/bin/sh","-c"]
34 | command:
35 | - |
36 | python -m app.workers
37 | volumes:
38 | - ./app:/code/app
39 |
40 | env_file:
41 | - ".env"
42 | depends_on:
43 | - redis
44 | - app
45 | init: true
46 |
47 | monitor:
48 | build: .
49 | entrypoint: ["/bin/sh","-c"]
50 | command:
51 | - |
52 | rqmonitor -b monitor -p 8899 -u ${REDIS_URL}
53 |
54 | volumes:
55 | - ./app:/code/app
56 | ports:
57 | - "8899:8899"
58 | env_file:
59 | - ".env"
60 | depends_on:
61 | - redis
62 | - app
63 | - worker
64 |
65 | init: true
66 |
--------------------------------------------------------------------------------
/examples/send_webhook.py:
--------------------------------------------------------------------------------
1 | # Requires python3.8 and up!
2 |
3 | import asyncio
4 | from http import HTTPStatus
5 | from pprint import pprint
6 |
7 | import httpx
8 |
9 |
10 | async def send_webhook() -> None:
11 |
12 | wh_payload = {
13 | "to_url": "https://webhook.site/f864d28d-9162-4ad5-9205-458e2b561c07",
14 | "to_auth": "",
15 | "tag": "Dhaka",
16 | "group": "Bangladesh",
17 | "payload": {"greetings": "Hello, world!"},
18 | }
19 |
20 | async with httpx.AsyncClient(http2=True) as session:
21 | headers = {
22 | "Content-Type": "application/json",
23 | "Authorization": (
24 | "Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4"
25 | ),
26 | }
27 |
28 | response = await session.post(
29 | "http://localhost:5000/hook_slinger",
30 | headers=headers,
31 | json=wh_payload,
32 | follow_redirects=True,
33 | )
34 |
35 | # Hook Slinger returns http code 202, accepted, for a successful request.
36 | assert response.status_code == HTTPStatus.ACCEPTED
37 | result = response.json()
38 | pprint(result)
39 |
40 |
41 | if __name__ == "__main__":
42 | asyncio.run(send_webhook())
43 |
--------------------------------------------------------------------------------
/examples/send_webhook.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -euo pipefail
4 |
5 | curl -X 'POST' \
6 | 'http://localhost:5000/hook_slinger/' \
7 | -H 'accept: application/json' \
8 | -H 'Authorization: Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4' \
9 | -H 'Content-Type: application/json' \
10 | -d '{
11 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
12 | "to_auth": "",
13 | "tag": "Dhaka",
14 | "group": "Bangladesh",
15 | "payload": {
16 | "greetings": "Hello, world!"
17 | }
18 | }' | python -m json.tool
19 |
--------------------------------------------------------------------------------
/gh-md-toc:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | #
4 | # Steps:
5 | #
6 | # 1. Download corresponding html file for some README.md:
7 | # curl -s $1
8 | #
9 | # 2. Discard rows where no substring 'user-content-' (github's markup):
10 | # awk '/user-content-/ { ...
11 | #
12 | # 3.1 Get last number in each row like ' ... sitemap.js.*<\/h/)+2, RLENGTH-5)
21 | #
22 | # 5. Find anchor and insert it inside "(...)":
23 | # substr($0, match($0, "href=\"[^\"]+?\" ")+6, RLENGTH-8)
24 | #
25 |
26 | gh_toc_version="0.7.0"
27 |
28 | gh_user_agent="gh-md-toc v$gh_toc_version"
29 |
30 | #
31 | # Download rendered into html README.md by its url.
32 | #
33 | #
34 | gh_toc_load() {
35 | local gh_url=$1
36 |
37 | if type curl &>/dev/null; then
38 | curl --user-agent "$gh_user_agent" -s "$gh_url"
39 | elif type wget &>/dev/null; then
40 | wget --user-agent="$gh_user_agent" -qO- "$gh_url"
41 | else
42 | echo "Please, install 'curl' or 'wget' and try again."
43 | exit 1
44 | fi
45 | }
46 |
47 | #
48 | # Converts local md file into html by GitHub
49 | #
50 | # -> curl -X POST --data '{"text": "Hello world github/linguist#1 **cool**, and #1!"}' https://api.github.com/markdown
51 | # Hello world github/linguist#1 cool, and #1!
'"
52 | gh_toc_md2html() {
53 | local gh_file_md=$1
54 | URL=https://api.github.com/markdown/raw
55 |
56 | if [ ! -z "$GH_TOC_TOKEN" ]; then
57 | TOKEN=$GH_TOC_TOKEN
58 | else
59 | TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt"
60 | if [ -f "$TOKEN_FILE" ]; then
61 | TOKEN="$(cat $TOKEN_FILE)"
62 | fi
63 | fi
64 | if [ ! -z "${TOKEN}" ]; then
65 | AUTHORIZATION="Authorization: token ${TOKEN}"
66 | fi
67 |
68 | # echo $URL 1>&2
69 | OUTPUT=$(curl -s \
70 | --user-agent "$gh_user_agent" \
71 | --data-binary @"$gh_file_md" \
72 | -H "Content-Type:text/plain" \
73 | -H "$AUTHORIZATION" \
74 | "$URL")
75 |
76 | if [ "$?" != "0" ]; then
77 | echo "XXNetworkErrorXX"
78 | fi
79 | if [ "$(echo "${OUTPUT}" | awk '/API rate limit exceeded/')" != "" ]; then
80 | echo "XXRateLimitXX"
81 | else
82 | echo "${OUTPUT}"
83 | fi
84 | }
85 |
86 |
87 | #
88 | # Is passed string url
89 | #
90 | gh_is_url() {
91 | case $1 in
92 | https* | http*)
93 | echo "yes";;
94 | *)
95 | echo "no";;
96 | esac
97 | }
98 |
99 | #
100 | # TOC generator
101 | #
102 | gh_toc(){
103 | local gh_src=$1
104 | local gh_src_copy=$1
105 | local gh_ttl_docs=$2
106 | local need_replace=$3
107 | local no_backup=$4
108 | local no_footer=$5
109 |
110 | if [ "$gh_src" = "" ]; then
111 | echo "Please, enter URL or local path for a README.md"
112 | exit 1
113 | fi
114 |
115 |
116 | # Show "TOC" string only if working with one document
117 | if [ "$gh_ttl_docs" = "1" ]; then
118 |
119 | echo "Table of Contents"
120 | echo "================="
121 | echo ""
122 | gh_src_copy=""
123 |
124 | fi
125 |
126 | if [ "$(gh_is_url "$gh_src")" == "yes" ]; then
127 | gh_toc_load "$gh_src" | gh_toc_grab "$gh_src_copy"
128 | if [ "${PIPESTATUS[0]}" != "0" ]; then
129 | echo "Could not load remote document."
130 | echo "Please check your url or network connectivity"
131 | exit 1
132 | fi
133 | if [ "$need_replace" = "yes" ]; then
134 | echo
135 | echo "!! '$gh_src' is not a local file"
136 | echo "!! Can't insert the TOC into it."
137 | echo
138 | fi
139 | else
140 | local rawhtml=$(gh_toc_md2html "$gh_src")
141 | if [ "$rawhtml" == "XXNetworkErrorXX" ]; then
142 | echo "Parsing local markdown file requires access to github API"
143 | echo "Please make sure curl is installed and check your network connectivity"
144 | exit 1
145 | fi
146 | if [ "$rawhtml" == "XXRateLimitXX" ]; then
147 | echo "Parsing local markdown file requires access to github API"
148 | echo "Error: You exceeded the hourly limit. See: https://developer.github.com/v3/#rate-limiting"
149 | TOKEN_FILE="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/token.txt"
150 | echo "or place GitHub auth token here: ${TOKEN_FILE}"
151 | exit 1
152 | fi
153 | local toc=`echo "$rawhtml" | gh_toc_grab "$gh_src_copy"`
154 | echo "$toc"
155 | if [ "$need_replace" = "yes" ]; then
156 | if grep -Fxq "" $gh_src && grep -Fxq "" $gh_src; then
157 | echo "Found markers"
158 | else
159 | echo "You don't have or in your file...exiting"
160 | exit 1
161 | fi
162 | local ts="<\!--ts-->"
163 | local te="<\!--te-->"
164 | local dt=`date +'%F_%H%M%S'`
165 | local ext=".orig.${dt}"
166 | local toc_path="${gh_src}.toc.${dt}"
167 | local toc_footer=""
168 | # http://fahdshariff.blogspot.ru/2012/12/sed-mutli-line-replacement-between-two.html
169 | # clear old TOC
170 | sed -i${ext} "/${ts}/,/${te}/{//!d;}" "$gh_src"
171 | # create toc file
172 | echo "${toc}" > "${toc_path}"
173 | if [ "${no_footer}" != "yes" ]; then
174 | echo -e "\n${toc_footer}\n" >> "$toc_path"
175 | fi
176 |
177 | # insert toc file
178 | if [[ "`uname`" == "Darwin" ]]; then
179 | sed -i "" "/${ts}/r ${toc_path}" "$gh_src"
180 | else
181 | sed -i "/${ts}/r ${toc_path}" "$gh_src"
182 | fi
183 | echo
184 | if [ "${no_backup}" = "yes" ]; then
185 | rm ${toc_path} ${gh_src}${ext}
186 | fi
187 | echo "!! TOC was added into: '$gh_src'"
188 | if [ -z "${no_backup}" ]; then
189 | echo "!! Origin version of the file: '${gh_src}${ext}'"
190 | echo "!! TOC added into a separate file: '${toc_path}'"
191 | fi
192 | echo
193 | fi
194 | fi
195 | }
196 |
197 | #
198 | # Grabber of the TOC from rendered html
199 | #
200 | # $1 - a source url of document.
201 | # It's need if TOC is generated for multiple documents.
202 | #
203 | gh_toc_grab() {
204 | common_awk_script='
205 | modified_href = ""
206 | split(href, chars, "")
207 | for (i=1;i <= length(href); i++) {
208 | c = chars[i]
209 | res = ""
210 | if (c == "+") {
211 | res = " "
212 | } else {
213 | if (c == "%") {
214 | res = "\\x"
215 | } else {
216 | res = c ""
217 | }
218 | }
219 | modified_href = modified_href res
220 | }
221 | print sprintf("%*s", (level-1)*3, "") "* [" text "](" gh_url modified_href ")"
222 | '
223 | if [ `uname -s` == "OS/390" ]; then
224 | grepcmd="pcregrep -o"
225 | echoargs=""
226 | awkscript='{
227 | level = substr($0, length($0), 1)
228 | text = substr($0, match($0, /a>.*<\/h/)+2, RLENGTH-5)
229 | href = substr($0, match($0, "href=\"([^\"]+)?\"")+6, RLENGTH-7)
230 | '"$common_awk_script"'
231 | }'
232 | else
233 | grepcmd="grep -Eo"
234 | echoargs="-e"
235 | awkscript='{
236 | level = substr($0, length($0), 1)
237 | text = substr($0, match($0, /a>.*<\/h/)+2, RLENGTH-5)
238 | href = substr($0, match($0, "href=\"[^\"]+?\"")+6, RLENGTH-7)
239 | '"$common_awk_script"'
240 | }'
241 | fi
242 | href_regex='href=\"[^\"]+?\"'
243 |
244 | # if closed is on the new line, then move it on the prev line
245 | # for example:
246 | # was: The command foo1
247 | #
248 | # became: The command foo1
249 | sed -e ':a' -e 'N' -e '$!ba' -e 's/\n<\/h/<\/h/g' |
250 |
251 | # find strings that corresponds to template
252 | $grepcmd '//g' | sed 's/<\/code>//g' |
256 |
257 | # remove g-emoji
258 | sed 's/]*[^<]*<\/g-emoji> //g' |
259 |
260 | # now all rows are like:
261 | # ... / placeholders"
290 | echo " $app_name - Create TOC for markdown from STDIN"
291 | echo " $app_name --help Show help"
292 | echo " $app_name --version Show version"
293 | return
294 | fi
295 |
296 | if [ "$1" = '--version' ]; then
297 | echo "$gh_toc_version"
298 | echo
299 | echo "os: `lsb_release -d | cut -f 2`"
300 | echo "kernel: `cat /proc/version`"
301 | echo "shell: `$SHELL --version`"
302 | echo
303 | for tool in curl wget grep awk sed; do
304 | printf "%-5s: " $tool
305 | echo `$tool --version | head -n 1`
306 | done
307 | return
308 | fi
309 |
310 | if [ "$1" = "-" ]; then
311 | if [ -z "$TMPDIR" ]; then
312 | TMPDIR="/tmp"
313 | elif [ -n "$TMPDIR" -a ! -d "$TMPDIR" ]; then
314 | mkdir -p "$TMPDIR"
315 | fi
316 | local gh_tmp_md
317 | if [ `uname -s` == "OS/390" ]; then
318 | local timestamp=$(date +%m%d%Y%H%M%S)
319 | gh_tmp_md="$TMPDIR/tmp.$timestamp"
320 | else
321 | gh_tmp_md=$(mktemp $TMPDIR/tmp.XXXXXX)
322 | fi
323 | while read input; do
324 | echo "$input" >> "$gh_tmp_md"
325 | done
326 | gh_toc_md2html "$gh_tmp_md" | gh_toc_grab ""
327 | return
328 | fi
329 |
330 | if [ "$1" = '--insert' ]; then
331 | need_replace="yes"
332 | shift
333 | fi
334 |
335 | if [ "$1" = '--no-backup' ]; then
336 | need_replace="yes"
337 | no_backup="yes"
338 | shift
339 | fi
340 |
341 | if [ "$1" = '--hide-footer' ]; then
342 | need_replace="yes"
343 | no_footer="yes"
344 | shift
345 | fi
346 |
347 | for md in "$@"
348 | do
349 | echo ""
350 | gh_toc "$md" "$#" "$need_replace" "$no_backup" "$no_footer"
351 | done
352 |
353 | echo ""
354 | echo "Created by [gh-md-toc](https://github.com/ekalinin/github-markdown-toc)"
355 | }
356 |
357 | #
358 | # Entry point
359 | #
360 | gh_toc_app "$@"
361 |
362 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.mypy]
2 | follow_imports = "skip"
3 | ignore_missing_imports = true
4 | warn_no_return = false
5 | warn_unused_ignores = true
6 | allow_untyped_globals = true
7 | allow_redefinition = true
8 | pretty = true
9 |
10 | [[tool.mypy.overrides]]
11 | module = "tests.*"
12 | ignore_errors = true
13 |
14 |
15 | [tool.ruff]
16 | line-length = 88
17 |
18 | # Enable Pyflakes `E` and `F` codes by default.
19 | select = ["E", "F", "I001", "PT", "C4"]
20 | ignore = ["E501"]
21 | respect-gitignore = true
22 |
23 | per-file-ignores = {}
24 |
25 | # Allow unused variables when underscore-prefixed.
26 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
27 |
28 | # Assume Python 3.11.
29 | target-version = "py311"
30 |
31 |
32 | [tool.ruff.mccabe]
33 | # Unlike Flake8, default to a complexity level of 10.
34 | max-complexity = 10
35 |
--------------------------------------------------------------------------------
/requirements-dev.in:
--------------------------------------------------------------------------------
1 | black
2 | ruff
3 | mypy
4 | pip-tools
5 | types-redis
6 | pytest
7 | pytest-asyncio
8 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.11
3 | # by the following command:
4 | #
5 | # pip-compile --no-emit-options --output-file=requirements-dev.txt requirements-dev.in
6 | #
7 | black==22.12.0
8 | # via -r requirements-dev.in
9 | build==1.1.1
10 | # via pip-tools
11 | cffi==1.15.1
12 | # via cryptography
13 | click==8.1.3
14 | # via
15 | # black
16 | # pip-tools
17 | cryptography==39.0.0
18 | # via
19 | # types-pyopenssl
20 | # types-redis
21 | iniconfig==2.0.0
22 | # via pytest
23 | mypy==1.15.0
24 | # via -r requirements-dev.in
25 | mypy-extensions==1.0.0
26 | # via
27 | # black
28 | # mypy
29 | packaging==23.0
30 | # via
31 | # build
32 | # pytest
33 | pathspec==0.10.3
34 | # via black
35 | pip-tools==7.4.1
36 | # via -r requirements-dev.in
37 | platformdirs==2.6.2
38 | # via black
39 | pluggy==1.5.0
40 | # via pytest
41 | pycparser==2.21
42 | # via cffi
43 | pyproject-hooks==1.0.0
44 | # via
45 | # build
46 | # pip-tools
47 | pytest==8.3.5
48 | # via
49 | # -r requirements-dev.in
50 | # pytest-asyncio
51 | pytest-asyncio==0.26.0
52 | # via -r requirements-dev.in
53 | ruff==0.4.7
54 | # via -r requirements-dev.in
55 | types-pyopenssl==23.0.0.2
56 | # via types-redis
57 | types-redis==4.6.0.20241004
58 | # via -r requirements-dev.in
59 | typing-extensions==4.12.2
60 | # via mypy
61 | wheel==0.38.4
62 | # via pip-tools
63 |
64 | # The following packages are considered to be unsafe in a requirements file:
65 | # pip
66 | # setuptools
67 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
1 | rq
2 | fastapi
3 | uvicorn
4 | httpx[http2]
5 | toml
6 | redis
7 | python-dotenv
8 | rich
9 | rqmonitor
10 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.11
3 | # by the following command:
4 | #
5 | # pip-compile --no-emit-options --output-file=requirements.txt requirements.in
6 | #
7 | anyio==3.6.2
8 | # via
9 | # httpx
10 | # starlette
11 | bcrypt==4.0.1
12 | # via paramiko
13 | certifi==2022.12.7
14 | # via
15 | # httpcore
16 | # httpx
17 | cffi==1.15.1
18 | # via
19 | # cryptography
20 | # pynacl
21 | click==8.1.3
22 | # via
23 | # flask
24 | # rq
25 | # rqmonitor
26 | # uvicorn
27 | cryptography==39.0.0
28 | # via paramiko
29 | fabric==2.7.1
30 | # via rqmonitor
31 | fastapi==0.95.2
32 | # via -r requirements.in
33 | flask==2.2.2
34 | # via rqmonitor
35 | h11==0.14.0
36 | # via
37 | # httpcore
38 | # uvicorn
39 | h2==4.1.0
40 | # via httpx
41 | hpack==4.0.0
42 | # via h2
43 | httpcore==1.0.2
44 | # via httpx
45 | httpx[http2]==0.28.1
46 | # via -r requirements.in
47 | humanize==4.4.0
48 | # via rqmonitor
49 | hyperframe==6.0.1
50 | # via h2
51 | idna==3.4
52 | # via
53 | # anyio
54 | # httpx
55 | invoke==1.7.3
56 | # via
57 | # fabric
58 | # rqmonitor
59 | itsdangerous==2.1.2
60 | # via flask
61 | jinja2==3.1.2
62 | # via flask
63 | markdown-it-py==2.2.0
64 | # via rich
65 | markupsafe==2.1.1
66 | # via
67 | # jinja2
68 | # werkzeug
69 | mdurl==0.1.2
70 | # via markdown-it-py
71 | paramiko==2.12.0
72 | # via fabric
73 | pathlib2==2.3.7.post1
74 | # via fabric
75 | pycparser==2.21
76 | # via cffi
77 | pydantic==1.10.4
78 | # via fastapi
79 | pygments==2.14.0
80 | # via rich
81 | pynacl==1.5.0
82 | # via paramiko
83 | python-dotenv==1.1.0
84 | # via -r requirements.in
85 | redis==6.2.0
86 | # via
87 | # -r requirements.in
88 | # rq
89 | # rqmonitor
90 | rich==14.0.0
91 | # via -r requirements.in
92 | rq==2.3.2
93 | # via
94 | # -r requirements.in
95 | # rqmonitor
96 | rqmonitor==1.0.6
97 | # via -r requirements.in
98 | six==1.16.0
99 | # via
100 | # paramiko
101 | # pathlib2
102 | # rqmonitor
103 | sniffio==1.3.0
104 | # via anyio
105 | starlette==0.27.0
106 | # via fastapi
107 | toml==0.10.2
108 | # via -r requirements.in
109 | typing-extensions==4.4.0
110 | # via pydantic
111 | uvicorn==0.34.2
112 | # via -r requirements.in
113 | werkzeug==2.2.2
114 | # via
115 | # flask
116 | # rqmonitor
117 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rednafi/hook-slinger/ec8f6c95e243fb1fc1eaca214cf5407cb33d9b98/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_integration.py:
--------------------------------------------------------------------------------
1 | """
2 | Integrations tests.
3 | This is only expected to be run inside a docker container.
4 | Run `make start_tests` to execute the tests.
5 | """
6 |
7 | import logging
8 | import time
9 | from http import HTTPStatus
10 | from pprint import pprint
11 |
12 | import httpx
13 | from redis import Redis
14 | from rq.job import Job, JobStatus
15 |
16 | import config
17 |
18 | redis_conn = Redis.from_url(config.REDIS_URL)
19 |
20 |
21 | def test_webhook_throw():
22 | # Payload that is sent from the sending service to Hook Slinger.
23 | # Here, the 'test_webhook_throw' function acts as the service that sends
24 | # the webhook payload to the Hook Slinger container.
25 | webhook_request = {
26 | "to_url": "https://webhook.site/f864d28d-9162-4ad5-9205-458e2b561c07",
27 | "to_auth": "",
28 | "tag": "Dhaka",
29 | "group": "Bangladesh",
30 | "payload": {"greetings": "Hello, world!"},
31 | }
32 |
33 | with httpx.Client(http2=True) as session:
34 | # In this case, we're using the default API token.
35 | headers = {
36 | "Content-Type": "application/json",
37 | "Authorization": f"Token {config.API_TOKEN}",
38 | }
39 |
40 | response = session.post(
41 | "http://app:5000/hook_slinger",
42 | headers=headers,
43 | json=webhook_request,
44 | follow_redirects=True,
45 | )
46 |
47 | # Inspecting the HTTP response status code.
48 | assert response.status_code == HTTPStatus.ACCEPTED
49 | logging.info(f"Got the expected HTTP status code: {response.status_code}.")
50 |
51 | # Inspecting the HTTP response payload.
52 | result = response.json()
53 | logging.info(f"HTTP response payload: {pprint(result)}\n")
54 |
55 | # This section first looks for the 'job_id' in the response payload.
56 | # Using the 'job_id', it makes a query against the Redis server to get
57 | # the job status. The expected 'job_status' is 'queued'.
58 | job_id = result["job_id"]
59 | job = Job.fetch(job_id, connection=redis_conn)
60 | maybe_queued = job.get_status()
61 |
62 | assert maybe_queued in (
63 | JobStatus.QUEUED,
64 | JobStatus.STARTED,
65 | JobStatus.SCHEDULED,
66 | )
67 | logging.info(f"Got the expected Job Status: {maybe_queued}\n")
68 |
69 | # This section polls the Redis server 10 times with 1 second interval
70 | # between each requests. It waits to see if the 'job_status' has been
71 | # changed from 'queued' to 'finished'. The test passes if the transition
72 | # happens within 10 seconds.
73 | counter = 1
74 | logging.info("Started polling to see if the Job finishes...")
75 | while True:
76 | maybe_finished = job.get_status()
77 | if maybe_finished == JobStatus.FINISHED:
78 | logging.info(
79 | f"Current Job Status: {maybe_finished} \n"
80 | f"Expected Job Status: {JobStatus.FINISHED}\n"
81 | )
82 | logging.info("Successfully passed the integration test.")
83 | return None
84 |
85 | logging.info(
86 | f"Current Job Status: {maybe_finished} \n"
87 | f"Expected Job Status: {JobStatus.FINISHED}\n"
88 | )
89 | counter += 1
90 | time.sleep(1)
91 | if counter > 10:
92 | raise TimeoutError("HTTP response took too long.")
93 |
--------------------------------------------------------------------------------
/tests/test_logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from unittest.mock import patch
3 |
4 | from rich.logging import RichHandler
5 |
6 | import app
7 |
8 |
9 | @patch("app.LOG_LEVEL", "DEBUG")
10 | def test_logging(capsys):
11 | assert app.LOG_LEVEL == "DEBUG"
12 | assert isinstance(app.logging.root.handlers[0], RichHandler)
13 |
14 | app.logging.root.setLevel(logging.DEBUG)
15 | assert app.logging.root.getEffectiveLevel() == logging.DEBUG
16 |
17 | for name in logging.root.manager.loggerDict.keys():
18 | assert logging.getLogger(name).handlers == []
19 | assert logging.getLogger(name).propagate is True
20 |
21 | logging.debug("This is a debug message.")
22 | logging.info("This is an info message.")
23 | logging.warning("This is a warning message.")
24 | logging.error("This is an error message.")
25 | logging.critical("This is a critical message.")
26 |
27 | out, err = capsys.readouterr()
28 | assert err == ""
29 | for message in ("debug", "info", "error", "critical"):
30 | assert message in out
31 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | from fastapi.routing import APIRouter
2 | from starlette.middleware import Middleware
3 | from starlette.middleware.cors import CORSMiddleware
4 |
5 | from app import main
6 |
7 |
8 | def test_app_router():
9 | """Test Fastapi app router registration."""
10 |
11 | # Check that the API router has been registered properly.
12 | assert isinstance(main.app.router, APIRouter)
13 |
14 |
15 | def test_app_middleware():
16 | """Test Fastapi app middleware config. The expected
17 | 'main.app.user_middleware' should look like this:
18 |
19 | ```
20 | >>> main.app.user_middleware
21 | >>> Middleware(CORSMiddleware, allow_origins=['*'], allow_credentials=True, allow_methods=['*'], allow_headers=['*'])
22 | ```
23 | """
24 |
25 | # Check that the middleware settings were properly registered to the app.
26 |
27 | user_middleware = main.app.user_middleware.pop()
28 |
29 | assert user_middleware.__class__ == Middleware
30 | assert user_middleware.__dict__ == {
31 | "cls": CORSMiddleware,
32 | "options": {
33 | "allow_origins": ["*"],
34 | "allow_credentials": True,
35 | "allow_methods": ["*"],
36 | "allow_headers": ["*"],
37 | },
38 | }
39 |
--------------------------------------------------------------------------------
/tests/test_services.py:
--------------------------------------------------------------------------------
1 | from http import HTTPStatus
2 | from unittest.mock import patch
3 |
4 | import httpx
5 | import pytest
6 |
7 | from app import services, views
8 |
9 |
10 | def test_public_methods():
11 | assert services.__all__ == ("send_webhook", "validate_url")
12 |
13 |
14 | def test_webhook_post_failed_error():
15 | assert issubclass(services.WebhookPostFailedError, Exception)
16 |
17 | # Test docstring.
18 | expected_docstring = (
19 | "Raises this when sending the webhook post request fails "
20 | "due to some HTTP error."
21 | )
22 | expected_docstring = expected_docstring.lower()
23 | expected_docstring = " ".join(expected_docstring.split())
24 | expected_docstring = " ".join(expected_docstring.splitlines())
25 |
26 | current_docstring = services.WebhookPostFailedError.__doc__
27 | current_docstring = current_docstring.lower()
28 | current_docstring = " ".join(current_docstring.splitlines())
29 | current_docstring = " ".join(current_docstring.split())
30 |
31 | assert current_docstring == expected_docstring
32 |
33 | # Test traceback.
34 | assert services.WebhookPostFailedError("failed").__traceback__ is None
35 |
36 | # Test exception arguments.
37 | assert services.WebhookPostFailedError("failed").args == ("failed",)
38 |
39 | # Raise error.
40 | with pytest.raises(services.WebhookPostFailedError):
41 | raise services.WebhookPostFailedError("webhook failed")
42 |
43 |
44 | def test_validate_url():
45 | urls = (
46 | "https:sfsdfdsff",
47 | "http:google.com",
48 | "https://sfsdfdsff",
49 | )
50 |
51 | for url in urls:
52 | with pytest.raises(ValueError, match="Value of 'url' is not a valid URL."):
53 | services.validate_url(url)
54 |
55 | assert services.validate_url("http://localhost:8000") == "http://localhost:8000"
56 | assert services.validate_url("https://google.com") == "https://google.com"
57 |
58 |
59 | @patch("app.services.httpx.Client.post", autospec=True)
60 | def test_send_post_request(mock_post):
61 | webhook_request = {
62 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
63 | "to_auth": "",
64 | "tag": "Dhaka",
65 | "group": "Bangladesh",
66 | "payload": {
67 | "greetings": "Hello, world!",
68 | },
69 | }
70 |
71 | webhook_payload = views.SlingerRequestPayload(**webhook_request)
72 | mock_post.return_value = httpx.Response(
73 | status_code=HTTPStatus.OK,
74 | json=webhook_request,
75 | )
76 | mock_post.return_value.status_code = HTTPStatus.OK
77 |
78 | assert services.send_post_request(webhook_payload=webhook_payload) is None
79 |
80 |
81 | @pytest.mark.dummy()
82 | @patch("app.services.redis_conn")
83 | def test_redis_conn(mock_redis_conn):
84 | """Dummy testing Redis connection, this doesn't do anything."""
85 |
86 | mock_redis_conn.return_value = 42
87 | assert mock_redis_conn() == 42
88 |
89 |
90 | @pytest.mark.dummy()
91 | @patch("app.services.queue")
92 | def test_redis_queue(mock_redis_queue):
93 | """Dummy testing Redis queue, this doesn't do anything."""
94 |
95 | mock_redis_queue.return_value = 42
96 | assert mock_redis_queue() == 42
97 |
98 |
99 | @patch("app.services.send_webhook", autospec=True)
100 | def test_send_webhook(mock_send_webhook):
101 | webhook_request = {
102 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
103 | "to_auth": "",
104 | "tag": "Dhaka",
105 | "group": "Bangladesh",
106 | "payload": {
107 | "greetings": "Hello, world!",
108 | },
109 | }
110 |
111 | webhook_payload = views.SlingerRequestPayload(**webhook_request)
112 | webhook_response = {
113 | "job_id": "Bangladesh_Dhaka_139fc35a-d2a5-4d01-a6af-e980c52f55bc",
114 | "message": "Webhook registration successful.",
115 | "ok": True,
116 | "queued_at": "2021-07-23T20:15:04.389690",
117 | "status": "queued",
118 | }
119 |
120 | mock_send_webhook.return_value = httpx.Response(
121 | status_code=HTTPStatus.OK, json=webhook_response
122 | )
123 |
124 | response = services.send_webhook(webhook_payload=webhook_payload)
125 | assert response.status_code == HTTPStatus.OK
126 | assert response.json() == webhook_response
127 |
--------------------------------------------------------------------------------
/tests/test_views.py:
--------------------------------------------------------------------------------
1 | from http import HTTPStatus
2 | from unittest.mock import patch
3 |
4 | import httpx
5 | import pytest
6 | from rq.job import JobStatus
7 | from starlette.exceptions import HTTPException
8 |
9 | from app import views
10 |
11 |
12 | def test_slinger_request_payload():
13 | webhook_request_payload = {
14 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
15 | "to_auth": "",
16 | "tag": "Dhaka",
17 | "group": "Bangladesh",
18 | "payload": {
19 | "greetings": "Hello, world!",
20 | },
21 | }
22 |
23 | slinger_request_payload_obj = views.SlingerRequestPayload(
24 | **webhook_request_payload,
25 | )
26 | assert slinger_request_payload_obj.to_url == webhook_request_payload["to_url"]
27 | assert slinger_request_payload_obj.to_auth == webhook_request_payload["to_auth"]
28 | assert slinger_request_payload_obj.tag == webhook_request_payload["tag"]
29 | assert slinger_request_payload_obj.group == webhook_request_payload["group"]
30 | assert slinger_request_payload_obj.payload == webhook_request_payload["payload"]
31 |
32 |
33 | def test_slinger_response_payload():
34 | webhook_response_payload = {
35 | "status": JobStatus.QUEUED,
36 | "ok": True,
37 | "message": "Webhook registration successful.",
38 | "job_id": "Bangladesh_Dhaka_0f8346f4-8b84-4dc1-9df3-a5c09024e45c",
39 | "queued_at": "2021-07-23T19:38:41.061838",
40 | }
41 |
42 | slinger_response_payload_obj = views.SlingerResponsePayload(
43 | **webhook_response_payload,
44 | )
45 | assert slinger_response_payload_obj.status == webhook_response_payload["status"]
46 | assert slinger_response_payload_obj.ok == webhook_response_payload["ok"]
47 | assert slinger_response_payload_obj.message == webhook_response_payload["message"]
48 | assert slinger_response_payload_obj.job_id == webhook_response_payload["job_id"]
49 | assert (
50 | slinger_response_payload_obj.queued_at == webhook_response_payload["queued_at"]
51 | )
52 |
53 |
54 | @pytest.mark.asyncio()
55 | @pytest.mark.parametrize(
56 | ("header_param", "return_value"),
57 | [
58 | ("Token $5$1O/inyTZhNvFt.GW$Zfckz9OL.lm2wh3IewTm8YJ914wjz5txFnXG5XW.wb4", True),
59 | ("abcd", pytest.raises(HTTPException)),
60 | ("dummy", pytest.raises(HTTPException)),
61 | ],
62 | )
63 | async def test_secret_based_security(header_param, return_value):
64 | if isinstance(return_value, bool):
65 | assert (
66 | await views.secret_based_security(header_param=header_param) == return_value
67 | )
68 |
69 | else:
70 | with return_value:
71 | assert (
72 | await views.secret_based_security(header_param=header_param)
73 | == return_value
74 | )
75 |
76 |
77 | @pytest.mark.dummy()
78 | @patch("httpx.Client.post", autospec=True)
79 | def test_hook_slinger_view(mock_post):
80 |
81 | # Define HTTP request attributes
82 | # -------------------------------
83 | webhook_request = {
84 | "to_url": "https://webhook.site/37ad9530-59c3-430d-9db6-e68317321a9f",
85 | "to_auth": "",
86 | "tag": "Dhaka",
87 | "group": "Bangladesh",
88 | "payload": {
89 | "greetings": "Hello, world!",
90 | },
91 | }
92 |
93 | webhook_response = {
94 | "job_id": "Bangladesh_Dhaka_139fc35a-d2a5-4d01-a6af-e980c52f55bc",
95 | "message": "Webhook registration successful.",
96 | "ok": True,
97 | "queued_at": "2021-07-23T20:15:04.389690",
98 | "status": "queued",
99 | }
100 |
101 | headers = {
102 | "Content-Type": "application/json",
103 | "Authorization": "Token 1234",
104 | }
105 |
106 | # Mock client.post
107 | # ----------------
108 | mock_post.return_value = httpx.Response(
109 | status_code=HTTPStatus.OK, json=webhook_response
110 | )
111 |
112 | # Make HTTP request
113 | # -----------------
114 | with httpx.Client(http2=True) as session:
115 | response = session.post(
116 | url="hook_slinger/",
117 | headers=headers,
118 | json=webhook_request,
119 | )
120 |
121 | # Assert
122 | # ------
123 | assert response.status_code == HTTPStatus.OK
124 | assert response.json() == webhook_response
125 |
--------------------------------------------------------------------------------