├── .gitignore
├── Pipfile
├── Pipfile.lock
├── README.md
├── file_read_backwards
├── __init__.py
├── buffer_work_space.py
└── file_read_backwards.py
└── interview_notify.py
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | # Byte-compiled / optimized / DLL files
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | cover/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # poetry
99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
100 | # This is especially recommended for binary packages to ensure reproducibility, and is more
101 | # commonly ignored for libraries.
102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
103 | #poetry.lock
104 |
105 | # pdm
106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
107 | #pdm.lock
108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
109 | # in version control.
110 | # https://pdm.fming.dev/#use-with-ide
111 | .pdm.toml
112 |
113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
114 | __pypackages__/
115 |
116 | # Celery stuff
117 | celerybeat-schedule
118 | celerybeat.pid
119 |
120 | # SageMath parsed files
121 | *.sage.py
122 |
123 | # Environments
124 | .env
125 | .venv
126 | env/
127 | venv/
128 | ENV/
129 | env.bak/
130 | venv.bak/
131 |
132 | # Spyder project settings
133 | .spyderproject
134 | .spyproject
135 |
136 | # Rope project settings
137 | .ropeproject
138 |
139 | # mkdocs documentation
140 | /site
141 |
142 | # mypy
143 | .mypy_cache/
144 | .dmypy.json
145 | dmypy.json
146 |
147 | # Pyre type checker
148 | .pyre/
149 |
150 | # pytype static type analyzer
151 | .pytype/
152 |
153 | # Cython debug symbols
154 | cython_debug/
155 |
156 | # PyCharm
157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
159 | # and can be added to the global gitignore or merged into this file. For a more nuclear
160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
161 | #.idea/
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | requests = "*"
8 |
9 | [dev-packages]
10 |
11 | [requires]
12 | python_version = "3.13"
13 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "1841259a6294939b3dde91acf33c2e02af40cae97022c9b3da3f7f2bb07c77c5"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "3.13"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "certifi": {
20 | "hashes": [
21 | "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6",
22 | "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"
23 | ],
24 | "markers": "python_version >= '3.6'",
25 | "version": "==2025.4.26"
26 | },
27 | "charset-normalizer": {
28 | "hashes": [
29 | "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4",
30 | "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45",
31 | "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7",
32 | "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0",
33 | "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7",
34 | "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d",
35 | "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d",
36 | "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0",
37 | "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184",
38 | "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db",
39 | "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b",
40 | "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64",
41 | "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b",
42 | "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8",
43 | "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff",
44 | "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344",
45 | "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58",
46 | "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e",
47 | "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471",
48 | "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148",
49 | "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a",
50 | "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836",
51 | "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e",
52 | "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63",
53 | "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c",
54 | "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1",
55 | "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01",
56 | "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366",
57 | "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58",
58 | "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5",
59 | "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c",
60 | "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2",
61 | "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a",
62 | "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597",
63 | "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b",
64 | "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5",
65 | "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb",
66 | "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f",
67 | "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0",
68 | "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941",
69 | "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0",
70 | "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86",
71 | "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7",
72 | "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7",
73 | "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455",
74 | "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6",
75 | "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4",
76 | "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0",
77 | "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3",
78 | "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1",
79 | "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6",
80 | "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981",
81 | "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c",
82 | "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980",
83 | "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645",
84 | "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7",
85 | "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12",
86 | "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa",
87 | "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd",
88 | "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef",
89 | "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f",
90 | "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2",
91 | "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d",
92 | "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5",
93 | "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02",
94 | "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3",
95 | "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd",
96 | "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e",
97 | "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214",
98 | "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd",
99 | "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a",
100 | "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c",
101 | "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681",
102 | "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba",
103 | "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f",
104 | "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a",
105 | "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28",
106 | "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691",
107 | "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82",
108 | "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a",
109 | "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027",
110 | "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7",
111 | "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518",
112 | "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf",
113 | "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b",
114 | "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9",
115 | "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544",
116 | "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da",
117 | "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509",
118 | "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f",
119 | "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a",
120 | "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"
121 | ],
122 | "markers": "python_version >= '3.7'",
123 | "version": "==3.4.2"
124 | },
125 | "idna": {
126 | "hashes": [
127 | "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9",
128 | "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"
129 | ],
130 | "markers": "python_version >= '3.6'",
131 | "version": "==3.10"
132 | },
133 | "requests": {
134 | "hashes": [
135 | "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760",
136 | "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"
137 | ],
138 | "index": "pypi",
139 | "markers": "python_version >= '3.8'",
140 | "version": "==2.32.3"
141 | },
142 | "urllib3": {
143 | "hashes": [
144 | "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
145 | "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
146 | ],
147 | "markers": "python_version >= '3.9'",
148 | "version": "==2.4.0"
149 | }
150 | },
151 | "develop": {}
152 | }
153 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # interview-notify
2 | Push notifications from IRC for your private tracker interviews
3 |
4 |
5 |
6 | ## features
7 |
8 | this script parses log files from your irc client and attempts to be client-agnostic.
9 |
10 | it sends push notifications when:
11 | - interviews are happening
12 | - YOUR interview is happening!
13 | - someone mentions you
14 | - you lose your spot in the queue due to a netsplit
15 | - you get kicked
16 |
17 | ## installing
18 |
19 | - install python3. i suggest homebrew, winget, or just use the installer: https://www.python.org/downloads/
20 | - _this script might require python3.11_
21 | - install the `requests` module with `pip3 install requests` (or use `pipenv install` to automatically install dependencies)
22 | - clone this repo
23 | - `git clone https://github.com/ftc2/interview-notify.git`
24 | - `python3 interview_notify.py`
25 |
26 | ## using
27 |
28 | pretty self explanatory if you read the help:
29 |
30 | ```
31 | ./interview_notify.py -h
32 |
33 | usage: interview_notify.py [-h] --topic TOPIC [--server SERVER] --log-dir PATH --nick NICK [--check-bot-nicks | --no-check-bot-nicks] [--bot-nicks NICKS] [--mode {red,orp}] [-v] [--version]
34 |
35 | IRC Interview Notifier v1.2.10
36 | https://github.com/ftc2/interview-notify
37 |
38 | options:
39 | -h, --help show this help message and exit
40 | --topic TOPIC ntfy topic name to POST notifications to
41 | --server SERVER ntfy server to POST notifications to – default: https://ntfy.sh
42 | --log-dir PATH path to IRC logs (continuously checks for newest file to parse)
43 | --nick NICK your IRC nick
44 | --check-bot-nicks, --no-check-bot-nicks
45 | attempt to parse bot's nick. disable if your log files are not like ' message' – default: enabled
46 | --bot-nicks NICKS comma-separated list of bot nicks to watch – default: Gatekeeper
47 | --mode {red,orp} interview mode (affects triggers) – default: red
48 | -v verbose (invoke multiple times for more verbosity)
49 | --version show program's version number and exit
50 |
51 | Sends a push notification with https://ntfy.sh/ when it's your turn to interview.
52 | They have a web client and mobile clients. You can have multiple clients subscribed to this.
53 | Wherever you want notifications: open the client, 'Subscribe to topic', pick a unique topic
54 | name for this script, and use that everywhere.
55 | On mobile, I suggest enabling the 'Instant delivery' feature as well as 'Keep alerting for
56 | highest priority'. These will enable fastest and most reliable delivery of the
57 | notification, and your phone will continuously alarm when your interview is ready.
58 | ```
59 |
60 | ## testing/troubleshooting
61 |
62 | first, use `-v` and make sure you can see new messages from IRC showing up:
63 |
64 | `interview_notify.py --topic your_topic --log-dir /path/to/logs --nick your_nick -v`
65 |
66 | ### testing notifications
67 |
68 | `interview_notify.py --topic your_topic --log-dir /path/to/logs --nick your_nick --bot-nicks Gatekeeper,your_nick -v`
69 |
70 | then type `Currently interviewing: your_nick` in IRC.
71 |
72 | if it doesn't work, maybe you have a wonky log file format. try with `--no-check-bot-nicks`:
73 |
74 | `interview_notify.py --topic your_topic --log-dir /path/to/logs --nick your_nick --no-check-bot-nicks -v`
75 |
--------------------------------------------------------------------------------
/file_read_backwards/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from .file_read_backwards import FileReadBackwards # noqa: F401
4 |
5 | __author__ = """Robin Robin"""
6 | __email__ = 'robinsquare42@gmail.com'
7 | __version__ = '3.0.0'
8 |
--------------------------------------------------------------------------------
/file_read_backwards/buffer_work_space.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """BufferWorkSpace module."""
5 |
6 | import os
7 |
8 | new_lines = ["\r\n", "\n", "\r"]
9 | new_lines_bytes = [n.encode("ascii") for n in new_lines] # we only support encodings that's backward compat with ascii
10 |
11 |
12 | class BufferWorkSpace:
13 |
14 | """It is a helper module for FileReadBackwards."""
15 |
16 | def __init__(self, fp, chunk_size):
17 | """Convention for the data.
18 |
19 | When read_buffer is not None, it represents contents of the file from `read_position` onwards
20 | that has not been processed/returned.
21 | read_position represents the file pointer position that has been read into read_buffer
22 | initialized to be just past the end of file.
23 | """
24 | self.fp = fp
25 | self.read_position = _get_file_size(self.fp) # set the previously read position to the
26 | self.read_buffer = None
27 | self.chunk_size = chunk_size
28 |
29 | def add_to_buffer(self, content, read_position):
30 | """Add additional bytes content as read from the read_position.
31 |
32 | Args:
33 | content (bytes): data to be added to buffer working BufferWorkSpac.
34 | read_position (int): where in the file pointer the data was read from.
35 | """
36 | self.read_position = read_position
37 | if self.read_buffer is None:
38 | self.read_buffer = content
39 | else:
40 | self.read_buffer = content + self.read_buffer
41 |
42 | def yieldable(self):
43 | """Return True if there is a line that the buffer can return, False otherwise."""
44 | if self.read_buffer is None:
45 | return False
46 |
47 | t = _remove_trailing_new_line(self.read_buffer)
48 | n = _find_furthest_new_line(t)
49 | if n >= 0:
50 | return True
51 |
52 | # we have read in entire file and have some unprocessed lines
53 | if self.read_position == 0 and self.read_buffer is not None:
54 | return True
55 | return False
56 |
57 | def return_line(self):
58 | """Return a new line if it is available.
59 |
60 | Precondition: self.yieldable() must be True
61 | """
62 | assert(self.yieldable()) # noqa: E275
63 |
64 | t = _remove_trailing_new_line(self.read_buffer)
65 | i = _find_furthest_new_line(t)
66 |
67 | if i >= 0:
68 | delimiter = i + 1
69 | after_new_line = slice(delimiter, None)
70 | up_to_include_new_line = slice(0, delimiter)
71 | r = t[after_new_line]
72 | self.read_buffer = t[up_to_include_new_line]
73 | else: # the case where we have read in entire file and at the "last" line
74 | r = t
75 | self.read_buffer = None
76 | return r
77 |
78 | def read_until_yieldable(self):
79 | """Read in additional chunks until it is yieldable."""
80 | while not self.yieldable():
81 | read_content, read_position = _get_next_chunk(self.fp, self.read_position, self.chunk_size)
82 | self.add_to_buffer(read_content, read_position)
83 |
84 | def has_returned_every_line(self):
85 | """Return True if every single line in the file has been returned, False otherwise."""
86 | if self.read_position == 0 and self.read_buffer is None:
87 | return True
88 | return False
89 |
90 |
91 | def _get_file_size(fp):
92 | return os.fstat(fp.fileno()).st_size
93 |
94 |
95 | def _get_next_chunk(fp, previously_read_position, chunk_size):
96 | """Return next chunk of data that we would from the file pointer.
97 |
98 | Args:
99 | fp: file-like object
100 | previously_read_position: file pointer position that we have read from
101 | chunk_size: desired read chunk_size
102 |
103 | Returns:
104 | (bytestring, int): data that has been read in, the file pointer position where the data has been read from
105 | """
106 | seek_position, read_size = _get_what_to_read_next(fp, previously_read_position, chunk_size)
107 | fp.seek(seek_position)
108 | read_content = fp.read(read_size)
109 | read_position = seek_position
110 | return read_content, read_position
111 |
112 |
113 | def _get_what_to_read_next(fp, previously_read_position, chunk_size):
114 | """Return information on which file pointer position to read from and how many bytes.
115 |
116 | Args:
117 | fp
118 | past_read_positon (int): The file pointer position that has been read previously
119 | chunk_size(int): ideal io chunk_size
120 |
121 | Returns:
122 | (int, int): The next seek position, how many bytes to read next
123 | """
124 | seek_position = max(previously_read_position - chunk_size, 0)
125 | read_size = chunk_size
126 |
127 | # examples: say, our new_lines are potentially "\r\n", "\n", "\r"
128 | # find a reading point where it is not "\n", rewind further if necessary
129 | # if we have "\r\n" and we read in "\n",
130 | # the next iteration would treat "\r" as a different new line.
131 | # Q: why don't I just check if it is b"\n", but use a function ?
132 | # A: so that we can potentially expand this into generic sets of separators, later on.
133 | while seek_position > 0:
134 | fp.seek(seek_position)
135 | if _is_partially_read_new_line(fp.read(1)):
136 | seek_position -= 1
137 | read_size += 1 # as we rewind further, let's make sure we read more to compensate
138 | else:
139 | break
140 |
141 | # take care of special case when we are back to the beginnin of the file
142 | read_size = min(previously_read_position - seek_position, read_size)
143 | return seek_position, read_size
144 |
145 |
146 | def _remove_trailing_new_line(line):
147 | """Remove a single instance of new line at the end of line if it exists.
148 |
149 | Returns:
150 | bytestring
151 | """
152 | # replace only 1 instance of newline
153 | # match longest line first (hence the reverse=True), we want to match "\r\n" rather than "\n" if we can
154 | for n in sorted(new_lines_bytes, key=lambda x: len(x), reverse=True):
155 | if line.endswith(n):
156 | remove_new_line = slice(None, -len(n))
157 | return line[remove_new_line]
158 | return line
159 |
160 |
161 | def _find_furthest_new_line(read_buffer):
162 | """Return -1 if read_buffer does not contain new line otherwise the position of the rightmost newline.
163 |
164 | Args:
165 | read_buffer (bytestring)
166 |
167 | Returns:
168 | int: The right most position of new line character in read_buffer if found, else -1
169 | """
170 | new_line_positions = [read_buffer.rfind(n) for n in new_lines_bytes]
171 | return max(new_line_positions)
172 |
173 |
174 | def _is_partially_read_new_line(b):
175 | """Return True when b is part of a new line separator found at index >= 1, False otherwise.
176 |
177 | Args:
178 | b (bytestring)
179 |
180 | Returns:
181 | bool
182 | """
183 | for n in new_lines_bytes:
184 | if n.find(b) >= 1:
185 | return True
186 | return False
187 |
--------------------------------------------------------------------------------
/file_read_backwards/file_read_backwards.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """FileReadBackwards module."""
5 |
6 | import io
7 | import os
8 |
9 | from .buffer_work_space import BufferWorkSpace
10 |
11 | supported_encodings = ["utf-8", "ascii", "latin-1"] # any encodings that are backward compatible with ascii should work
12 |
13 |
14 | class FileReadBackwards:
15 |
16 | """Class definition for `FileReadBackwards`.
17 |
18 | A `FileReadBackwards` will spawn a `FileReadBackwardsIterator` and keep an opened file handler.
19 |
20 | It can be used as a Context Manager. If done so, when exited, it will close its file handler.
21 |
22 | In any mode, `close()` can be called to close the file handler..
23 | """
24 |
25 | def __init__(self, path, encoding="utf-8", chunk_size=io.DEFAULT_BUFFER_SIZE):
26 | """Constructor for FileReadBackwards.
27 |
28 | Args:
29 | path: Path to the file to be read
30 | encoding (str): Encoding
31 | chunk_size (int): How many bytes to read at a time
32 | """
33 | if encoding.lower() not in supported_encodings:
34 | error_message = "{0} encoding was not supported/tested.".format(encoding)
35 | error_message += "Supported encodings are '{0}'".format(",".join(supported_encodings))
36 | raise NotImplementedError(error_message)
37 |
38 | self.path = path
39 | self.encoding = encoding.lower()
40 | self.chunk_size = chunk_size
41 | self.iterator = FileReadBackwardsIterator(io.open(self.path, mode="rb"), self.encoding, self.chunk_size)
42 |
43 | def __iter__(self):
44 | """Return its iterator."""
45 | return self.iterator
46 |
47 | def __enter__(self):
48 | return self
49 |
50 | def __exit__(self, exc_type, exc_val, exc_tb):
51 | """Closes all opened its file handler and propagates all exceptions on exit."""
52 | self.close()
53 | return False
54 |
55 | def close(self):
56 | """Closes all opened it s file handler."""
57 | self.iterator.close()
58 |
59 | def readline(self):
60 | """Return a line content (with a trailing newline) if there are content. Return '' otherwise."""
61 |
62 | try:
63 | r = next(self.iterator) + os.linesep
64 | return r
65 | except StopIteration:
66 | return ""
67 |
68 |
69 | class FileReadBackwardsIterator:
70 | """Iterator for `FileReadBackwards`.
71 |
72 | This will read backwards line by line a file. It holds an opened file handler.
73 | """
74 | def __init__(self, fp, encoding, chunk_size):
75 | """Constructor for FileReadBackwardsIterator
76 |
77 | Args:
78 | fp (File): A file that we wish to start reading backwards from
79 | encoding (str): Encoding of the file
80 | chunk_size (int): How many bytes to read at a time
81 | """
82 | self.path = fp.name
83 | self.encoding = encoding
84 | self.chunk_size = chunk_size
85 | self.__fp = fp
86 | self.__buf = BufferWorkSpace(self.__fp, self.chunk_size)
87 |
88 | def __iter__(self):
89 | return self
90 |
91 | def next(self):
92 | """Returns unicode string from the last line until the beginning of file.
93 |
94 | Gets exhausted if::
95 |
96 | * already reached the beginning of the file on previous iteration
97 | * the file got closed
98 |
99 | When it gets exhausted, it closes the file handler.
100 | """
101 | # Using binary mode, because some encodings such as "utf-8" use variable number of
102 | # bytes to encode different Unicode points.
103 | # Without using binary mode, we would probably need to understand each encoding more
104 | # and do the seek operations to find the proper boundary before issuing read
105 | if self.closed:
106 | raise StopIteration
107 | if self.__buf.has_returned_every_line():
108 | self.close()
109 | raise StopIteration
110 | self.__buf.read_until_yieldable()
111 | r = self.__buf.return_line()
112 | return r.decode(self.encoding)
113 |
114 | __next__ = next
115 |
116 | @property
117 | def closed(self):
118 | """The status of the file handler.
119 |
120 | :return: True if the file handler is still opened. False otherwise.
121 | """
122 | return self.__fp.closed
123 |
124 | def close(self):
125 | """Closes the file handler."""
126 | self.__fp.close()
127 |
--------------------------------------------------------------------------------
/interview_notify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse, sys, threading, logging, re, requests
4 | from pathlib import Path
5 | from time import sleep
6 | from file_read_backwards import FileReadBackwards
7 | from hashlib import sha256
8 | from urllib.parse import urljoin
9 |
10 | VERSION = '1.2.10'
11 | default_server = 'https://ntfy.sh/'
12 |
13 | parser = argparse.ArgumentParser(prog='interview_notify.py',
14 | description='IRC Interview Notifier v{}\nhttps://github.com/ftc2/interview-notify'.format(VERSION),
15 | epilog='''Sends a push notification with https://ntfy.sh/ when it's your turn to interview.
16 | They have a web client and mobile clients. You can have multiple clients subscribed to this.
17 | Wherever you want notifications: open the client, 'Subscribe to topic', pick a unique topic
18 | name for this script, and use that everywhere.
19 | On mobile, I suggest enabling the 'Instant delivery' feature as well as 'Keep alerting for
20 | highest priority'. These will enable fastest and most reliable delivery of the
21 | notification, and your phone will continuously alarm when your interview is ready.''',
22 | formatter_class=argparse.RawDescriptionHelpFormatter)
23 | parser.add_argument('--topic', required=True, help='ntfy topic name to POST notifications to')
24 | parser.add_argument('--server', default=default_server, help='ntfy server to POST notifications to – default: {}'.format(default_server))
25 | parser.add_argument('--log-dir', required=True, dest='path', type=Path, help='path to IRC logs (continuously checks for newest file to parse)')
26 | parser.add_argument('--nick', required=True, help='your IRC nick')
27 | parser.add_argument('--check-bot-nicks', default=True, action=argparse.BooleanOptionalAction, help="attempt to parse bot's nick. disable if your log files are not like ' message' – default: enabled")
28 | parser.add_argument('--bot-nicks', metavar='NICKS', default='Gatekeeper', help='comma-separated list of bot nicks to watch – default: Gatekeeper')
29 | parser.add_argument('--mode', choices=['red', 'ops'], default='red', help='interview mode (affects triggers) – default: red')
30 | parser.add_argument('-v', action='count', default=5, dest='verbose', help='verbose (invoke multiple times for more verbosity)')
31 | parser.add_argument('--version', action='version', version='{} v{}'.format(parser.prog, VERSION))
32 |
33 | def log_scan():
34 | """Poll dir for most recently modified log file and spawn a parser thread for the log"""
35 | logging.info('scanner: watching logs in "{}"'.format(args.path))
36 | curr = find_latest_log()
37 | logging.debug('scanner: current log: "{}"'.format(curr.name))
38 | parser, parser_stop = spawn_parser(curr)
39 | parser.start()
40 | while True:
41 | sleep(0.5) # polling delay for checking for newer logfile
42 | latest = find_latest_log()
43 | if curr != latest:
44 | curr = latest
45 | logging.info('scanner: newer log found: "{}"'.format(curr.name))
46 | parser_stop.set()
47 | parser.join()
48 | parser, parser_stop = spawn_parser(curr)
49 | parser.start()
50 |
51 | def find_latest_log():
52 | """Find latest log file"""
53 | files = [f for f in args.path.iterdir() if f.is_file() and f.name not in ['.DS_Store', 'thumbs.db']]
54 | if len(files) == 0:
55 | crit_quit('no log files found')
56 | return max(files, key=lambda f: f.stat().st_mtime)
57 |
58 | def spawn_parser(log_path):
59 | """Spawn new parser thread"""
60 | logging.debug('spawning new parser')
61 | parser_stop = threading.Event()
62 | thread = threading.Thread(target=log_parse, args=(log_path, parser_stop))
63 | return thread, parser_stop
64 |
65 | def log_parse(log_path, parser_stop):
66 | """Parse log file and notify on triggers (parser thread)"""
67 | logging.info('parser: using "{}"'.format(log_path.name))
68 | for line in tail(log_path, parser_stop):
69 | logging.debug(line)
70 | if check_trigger(line, 'Currently interviewing: {}'.format(args.nick)):
71 | logging.info('YOUR INTERVIEW IS HAPPENING ❗')
72 | notify(line, title='Your interview is happening❗', tags='rotating_light', priority=5)
73 | elif check_trigger(line, 'Currently interviewing:'):
74 | logging.info('interview detected ⚠️')
75 | notify(line, title='Interview detected', tags='warning')
76 | elif check_trigger(line, '{}:'.format(args.nick), disregard_bot_nicks=True):
77 | logging.info('mention detected ⚠️')
78 | notify(line, title="You've been mentioned", tags='wave')
79 | elif check_words(line, triggers=['quit', 'disconnect', 'part', 'left', 'leave']):
80 | logging.info('netsplit detected ⚠️')
81 | notify(line, title="Netsplit detected – requeue within 10min!", tags='electric_plug', priority=5)
82 | elif check_words(line, triggers=['kick'], check_nick=True):
83 | logging.info('kick detected ⚠️')
84 | notify(line, title="You've been kicked – rejoin & requeue ASAP!", tags='anger', priority=5)
85 |
86 | def tail(path, parser_stop):
87 | """Poll file and yield lines as they appear"""
88 | with FileReadBackwards(path) as f:
89 | last_line = f.readline()
90 | if last_line:
91 | yield last_line
92 | with open(path) as f:
93 | f.seek(0, 2) # os.SEEK_END
94 | while not parser_stop.is_set():
95 | line = f.readline()
96 | if not line:
97 | sleep(0.1) # polling delay for checking for new lines
98 | continue
99 | yield line
100 |
101 | def check_trigger(line, trigger, disregard_bot_nicks=False):
102 | """Check for a trigger in a line"""
103 | if disregard_bot_nicks or not args.check_bot_nicks:
104 | return trigger in remove_html_tags(line)
105 | else:
106 | triggers = bot_nick_prefix(trigger)
107 | return any(trigger in line for trigger in triggers)
108 |
109 | def check_words(line, triggers, check_nick=False):
110 | """Check if a trigger & a bot nick & (optionally) user nick all appear in a string"""
111 | for trigger in triggers:
112 | for bot in args.bot_nicks.split(','):
113 | if check_nick:
114 | if args.nick in line and bot in line and trigger.lower() in line.lower():
115 | return True
116 | else:
117 | if bot in line and trigger.lower() in line.lower():
118 | return True
119 | return False
120 |
121 | def remove_html_tags(text):
122 | """Remove html tags from a string"""
123 | clean = re.compile('<.*?>')
124 | return re.sub(clean, '', text)
125 |
126 | def bot_nick_prefix(trigger):
127 | """Prefix a trigger with bot nick(s) to reduce false positives"""
128 | nicks = args.bot_nicks.split(',')
129 | return ['{}> {}'.format(nick, trigger) for nick in nicks]
130 |
131 | def notify(data, topic=None, server=None, **kwargs):
132 | """Send notification via ntfy"""
133 | if topic is None: topic=args.topic
134 | if server is None: server=args.server
135 | if server[-1] != '/': server += '/'
136 | target = urljoin(server, topic, allow_fragments=False)
137 | headers = {k.capitalize():str(v).encode('utf-8') for (k,v) in kwargs.items()}
138 | requests.post(target,
139 | data=data.encode(encoding='utf-8'),
140 | headers=headers)
141 |
142 | def anon_telemetry():
143 | """Send anonymous telemetry
144 |
145 | Why? I won't bother working on it if I don't see people using it!
146 | I can't get your nick or IP or anything.
147 |
148 | sends: anon id based on nick, script mode, script version
149 | """
150 | seed = 'H6IhIkah11ee1AxnDKClsujZ6gX9zHf8'
151 | nick_sha = sha256(args.nick.encode('utf-8')).hexdigest()
152 | anon_id = sha256('{}{}'.format(nick_sha, seed).encode('utf-8')).hexdigest()
153 | notify('anon_id={}, mode={}, version={}'.format(anon_id, args.mode, VERSION),
154 | server=default_server,
155 | title='Anonymous Telemetry', topic='interview-notify-telemetry', tags='telephone_receiver')
156 |
157 | def crit_quit(msg):
158 | logging.critical(msg)
159 | sys.exit()
160 |
161 | # ----------
162 |
163 | args = parser.parse_args()
164 |
165 | args.verbose = 70 - (10*args.verbose) if args.verbose > 0 else 0
166 | logging.basicConfig(level=args.verbose, format='%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
167 |
168 | if args.mode != 'red':
169 | crit_quit('"{}" mode not implemented'.format(args.mode))
170 |
171 | if args.path.is_file():
172 | crit_quit('log path invalid: dir expected, got file')
173 | elif not args.path.is_dir():
174 | crit_quit('log path invalid')
175 |
176 | scanner = threading.Thread(target=log_scan)
177 | scanner.start()
178 |
179 | anon_telemetry()
180 |
--------------------------------------------------------------------------------