├── .github └── workflows │ └── ci.yml ├── .gitignore ├── Pipfile ├── Pipfile.lock ├── README.md ├── pytest.ini ├── scripts ├── compare.py ├── count_subreddits.py ├── database_sandbox.py ├── explain_parse.py ├── find_cakedays.py ├── find_remind_me.py ├── iterate_backups.py ├── iterate_backups_longest.py ├── migration_1.py ├── migration_2.py ├── migration_4.py ├── pushshift.py ├── pushshift_beta_integrity.py ├── pushshift_lag.py └── update_wiki.py ├── src ├── __init__.py ├── classes │ ├── comment.py │ ├── key_value.py │ ├── reminder.py │ ├── stat.py │ ├── subreddit.py │ └── user.py ├── comments.py ├── counters.py ├── database │ ├── UtcDateTime.py │ ├── __init__.py │ ├── _comments.py │ ├── _keystore.py │ ├── _reminders.py │ ├── _stats.py │ ├── _subreddits.py │ └── _users.py ├── main.py ├── messages.py ├── notifications.py ├── static.py ├── stats.py └── utils.py ├── test ├── comment_test.py ├── conftest.py ├── dateparsing_test.py ├── message_test.py ├── reminder_test.py └── stat_test.py ├── timezones.txt └── todo.txt /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v2 15 | - name: Set up Python 16 | uses: actions/setup-python@v1 17 | with: 18 | python-version: 3.9 19 | - name: Test with pytest 20 | run: | 21 | export PATH=/home/runner/.local/bin:$PATH 22 | pip install --user pipenv 23 | pipenv install --dev 24 | pipenv run pytest 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/* 2 | logs/* 3 | games/* 4 | __pycache__/* 5 | *.db 6 | *.ini 7 | src/__pycache__/ 8 | src/classes/__pycache__/ 9 | src/database/__pycache__/ 10 | src/dateparser/ 11 | src/praw-wrapper/ 12 | test/__pycache__/ 13 | migration_3.py -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | pytest = "*" 8 | 9 | [packages] 10 | pytz = "*" 11 | discord-logging = {git = "https://github.com/Watchful1/DiscordLogging.git"} 12 | parsedatetime = "*" 13 | python-dateutil = "*" 14 | sqlalchemy = "*" 15 | dateparser = {git = "https://github.com/Watchful1/dateparser.git",ref = "features"} 16 | prometheus-client = "*" 17 | praw-wrapper = {editable = true, git = "https://github.com/Watchful1/PrawWrapper.git"} 18 | 19 | [requires] 20 | python_version = "3.9" 21 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "0c7f2e3d8ee9cbbd177b2ab3ca0fbbb9c97d206f2021d28eed69b7310eac578d" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.9" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "certifi": { 20 | "hashes": [ 21 | "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", 22 | "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" 23 | ], 24 | "markers": "python_version >= '3.6'", 25 | "version": "==2025.4.26" 26 | }, 27 | "charset-normalizer": { 28 | "hashes": [ 29 | "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", 30 | "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45", 31 | "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", 32 | "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", 33 | "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", 34 | "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", 35 | "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d", 36 | "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", 37 | "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184", 38 | "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", 39 | "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b", 40 | "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64", 41 | "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", 42 | "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", 43 | "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", 44 | "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344", 45 | "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58", 46 | "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", 47 | "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", 48 | "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", 49 | "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", 50 | "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", 51 | "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", 52 | "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", 53 | "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", 54 | "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1", 55 | "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01", 56 | "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", 57 | "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58", 58 | "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", 59 | "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", 60 | "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2", 61 | "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a", 62 | "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", 63 | "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", 64 | "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5", 65 | "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb", 66 | "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f", 67 | "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", 68 | "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", 69 | "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", 70 | "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", 71 | "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7", 72 | "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", 73 | "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455", 74 | "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", 75 | "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4", 76 | "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", 77 | "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", 78 | "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", 79 | "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", 80 | "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", 81 | "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", 82 | "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", 83 | "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", 84 | "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", 85 | "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", 86 | "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa", 87 | "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", 88 | "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", 89 | "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", 90 | "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", 91 | "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", 92 | "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", 93 | "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02", 94 | "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", 95 | "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", 96 | "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", 97 | "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", 98 | "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", 99 | "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", 100 | "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", 101 | "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681", 102 | "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", 103 | "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", 104 | "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a", 105 | "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", 106 | "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", 107 | "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", 108 | "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", 109 | "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027", 110 | "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", 111 | "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", 112 | "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", 113 | "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", 114 | "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", 115 | "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", 116 | "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da", 117 | "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", 118 | "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f", 119 | "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", 120 | "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f" 121 | ], 122 | "markers": "python_version >= '3.7'", 123 | "version": "==3.4.2" 124 | }, 125 | "dateparser": { 126 | "git": "https://github.com/Watchful1/dateparser.git", 127 | "markers": "python_version >= '3.8'", 128 | "ref": "66ec9d6b9558530f8cd1da16c4886fd907cdd1d5" 129 | }, 130 | "discord-logging": { 131 | "git": "https://github.com/Watchful1/DiscordLogging.git", 132 | "ref": "049d47a134f7e4f87f0ae8363b1e6866d9ecba50" 133 | }, 134 | "greenlet": { 135 | "hashes": [ 136 | "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057", 137 | "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207", 138 | "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3", 139 | "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4", 140 | "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b", 141 | "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc", 142 | "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825", 143 | "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370", 144 | "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708", 145 | "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457", 146 | "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763", 147 | "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf", 148 | "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe", 149 | "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e", 150 | "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d", 151 | "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59", 152 | "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa", 153 | "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e", 154 | "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51", 155 | "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3", 156 | "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e", 157 | "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61", 158 | "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5", 159 | "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74", 160 | "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907", 161 | "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275", 162 | "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f", 163 | "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59", 164 | "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c", 165 | "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf", 166 | "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b", 167 | "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819", 168 | "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65", 169 | "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e", 170 | "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240", 171 | "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5", 172 | "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659", 173 | "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", 174 | "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec", 175 | "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8", 176 | "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418", 177 | "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce", 178 | "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6", 179 | "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7", 180 | "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6", 181 | "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f", 182 | "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13", 183 | "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b", 184 | "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068", 185 | "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325", 186 | "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330", 187 | "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834", 188 | "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b", 189 | "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5", 190 | "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421" 191 | ], 192 | "markers": "python_version >= '3.9'", 193 | "version": "==3.2.2" 194 | }, 195 | "idna": { 196 | "hashes": [ 197 | "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", 198 | "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" 199 | ], 200 | "markers": "python_version >= '3.6'", 201 | "version": "==3.10" 202 | }, 203 | "parsedatetime": { 204 | "hashes": [ 205 | "sha256:4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455", 206 | "sha256:cb96edd7016872f58479e35879294258c71437195760746faffedb692aef000b" 207 | ], 208 | "index": "pypi", 209 | "version": "==2.6" 210 | }, 211 | "praw": { 212 | "hashes": [ 213 | "sha256:15917a81a06e20ff0aaaf1358481f4588449fa2421233040cb25e5c8202a3e2f", 214 | "sha256:3c5767909f71e48853eb6335fef7b50a43cbe3da728cdfb16d3be92904b0a4d8" 215 | ], 216 | "markers": "python_version ~= '3.8'", 217 | "version": "==7.8.1" 218 | }, 219 | "praw-wrapper": { 220 | "editable": true, 221 | "git": "https://github.com/Watchful1/PrawWrapper.git", 222 | "ref": "814bc75ea7a31fec1cb6089f2a3e86d4385f16be" 223 | }, 224 | "prawcore": { 225 | "hashes": [ 226 | "sha256:29af5da58d85704b439ad3c820873ad541f4535e00bb98c66f0fbcc8c603065a", 227 | "sha256:b7b2b5a1d04406e086ab4e79988dc794df16059862f329f4c6a43ed09986c335" 228 | ], 229 | "markers": "python_version ~= '3.8'", 230 | "version": "==2.4.0" 231 | }, 232 | "prometheus-client": { 233 | "hashes": [ 234 | "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb", 235 | "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301" 236 | ], 237 | "index": "pypi", 238 | "markers": "python_version >= '3.8'", 239 | "version": "==0.21.1" 240 | }, 241 | "python-dateutil": { 242 | "hashes": [ 243 | "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", 244 | "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" 245 | ], 246 | "index": "pypi", 247 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", 248 | "version": "==2.9.0.post0" 249 | }, 250 | "pytz": { 251 | "hashes": [ 252 | "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", 253 | "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00" 254 | ], 255 | "index": "pypi", 256 | "version": "==2025.2" 257 | }, 258 | "regex": { 259 | "hashes": [ 260 | "sha256:0008650041531d0eadecc96a73d37c2dc4821cf51b0766e374cb4f1ddc4e1c14", 261 | "sha256:03299b0bcaa7824eb7c0ebd7ef1e3663302d1b533653bfe9dc7e595d453e2ae9", 262 | "sha256:06b1df01cf2aef3a9790858af524ae2588762c8a90e784ba00d003f045306204", 263 | "sha256:09b4b6ccc61d4119342b26246ddd5a04accdeebe36bdfe865ad87a0784efd77f", 264 | "sha256:0be0c34a39e5d04a62fd5342f0886d0e57592a4f4993b3f9d257c1f688b19737", 265 | "sha256:0d96eec8550fd2fd26f8e675f6d8b61b159482ad8ffa26991b894ed5ee19038b", 266 | "sha256:0eb0e2845e81bdea92b8281a3969632686502565abf4a0b9e4ab1471c863d8f3", 267 | "sha256:13bbf0c9453c6d16e5867bda7f6c0c7cff1decf96c5498318bb87f8136d2abd4", 268 | "sha256:17e51ad1e6131c496b58d317bc9abec71f44eb1957d32629d06013a21bc99cac", 269 | "sha256:1977bb64264815d3ef016625adc9df90e6d0e27e76260280c63eca993e3f455f", 270 | "sha256:1e30762ddddb22f7f14c4f59c34d3addabc789216d813b0f3e2788d7bcf0cf29", 271 | "sha256:1e73652057473ad3e6934944af090852a02590c349357b79182c1b681da2c772", 272 | "sha256:20e6a27959f162f979165e496add0d7d56d7038237092d1aba20b46de79158f1", 273 | "sha256:286ff9ec2709d56ae7517040be0d6c502642517ce9937ab6d89b1e7d0904f863", 274 | "sha256:297c42ede2c81f0cb6f34ea60b5cf6dc965d97fa6936c11fc3286019231f0d66", 275 | "sha256:320c2f4106962ecea0f33d8d31b985d3c185757c49c1fb735501515f963715ed", 276 | "sha256:35ed2f3c918a00b109157428abfc4e8d1ffabc37c8f9abc5939ebd1e95dabc47", 277 | "sha256:3d146e5591cb67c5e836229a04723a30af795ef9b70a0bbd913572e14b7b940f", 278 | "sha256:42bb37e2b2d25d958c25903f6125a41aaaa1ed49ca62c103331f24b8a459142f", 279 | "sha256:42d6007722d46bd2c95cce700181570b56edc0dcbadbfe7855ec26c3f2d7e008", 280 | "sha256:43eba5c46208deedec833663201752e865feddc840433285fbadee07b84b464d", 281 | "sha256:452519bc4c973e961b1620c815ea6dd8944a12d68e71002be5a7aff0a8361571", 282 | "sha256:4b9c16a807b17b17c4fa3a1d8c242467237be67ba92ad24ff51425329e7ae3d0", 283 | "sha256:5510932596a0f33399b7fff1bd61c59c977f2b8ee987b36539ba97eb3513584a", 284 | "sha256:55820bc631684172b9b56a991d217ec7c2e580d956591dc2144985113980f5a3", 285 | "sha256:57484d39447f94967e83e56db1b1108c68918c44ab519b8ecfc34b790ca52bf7", 286 | "sha256:58ba41e462653eaf68fc4a84ec4d350b26a98d030be1ab24aba1adcc78ffe447", 287 | "sha256:5bc5f921be39ccb65fdda741e04b2555917a4bced24b4df14eddc7569be3b493", 288 | "sha256:5dcc4168536c8f68654f014a3db49b6b4a26b226f735708be2054314ed4964f4", 289 | "sha256:5f92a7cdc6a0ae2abd184e8dfd6ef2279989d24c85d2c85d0423206284103ede", 290 | "sha256:67250b36edfa714ba62dc62d3f238e86db1065fccb538278804790f578253640", 291 | "sha256:6df070a986fc064d865c381aecf0aaff914178fdf6874da2f2387e82d93cc5bd", 292 | "sha256:729aa8ca624c42f309397c5fc9e21db90bf7e2fdd872461aabdbada33de9063c", 293 | "sha256:72bc3a5effa5974be6d965ed8301ac1e869bc18425c8a8fac179fbe7876e3aee", 294 | "sha256:74d86e8924835f863c34e646392ef39039405f6ce52956d8af16497af4064a30", 295 | "sha256:79e5af1ff258bc0fe0bdd6f69bc4ae33935a898e3cbefbbccf22e88a27fa053b", 296 | "sha256:7b103dffb9f6a47ed7ffdf352b78cfe058b1777617371226c1894e1be443afec", 297 | "sha256:83f03f0bd88c12e63ca2d024adeee75234d69808b341e88343b0232329e1f1a1", 298 | "sha256:86d7a68fa53688e1f612c3246044157117403c7ce19ebab7d02daf45bd63913e", 299 | "sha256:878c626cbca3b649e14e972c14539a01191d79e58934e3f3ef4a9e17f90277f8", 300 | "sha256:878f5d649ba1db9f52cc4ef491f7dba2d061cdc48dd444c54260eebc0b1729b9", 301 | "sha256:87bc01226cd288f0bd9a4f9f07bf6827134dc97a96c22e2d28628e824c8de231", 302 | "sha256:8babb2b5751105dc0aef2a2e539f4ba391e738c62038d8cb331c710f6b0f3da7", 303 | "sha256:91e0f7e7be77250b808a5f46d90bf0032527d3c032b2131b63dee54753a4d729", 304 | "sha256:9557545c10d52c845f270b665b52a6a972884725aa5cf12777374e18f2ea8960", 305 | "sha256:9ccb0a4ab926016867260c24c192d9df9586e834f5db83dfa2c8fffb3a6e5056", 306 | "sha256:9d828c5987d543d052b53c579a01a52d96b86f937b1777bbfe11ef2728929357", 307 | "sha256:9efa41d1527b366c88f265a227b20bcec65bda879962e3fc8a2aee11e81266d7", 308 | "sha256:aaf5317c961d93c1a200b9370fb1c6b6836cc7144fef3e5a951326912bf1f5a3", 309 | "sha256:ab69b4fe09e296261377d209068d52402fb85ef89dc78a9ac4a29a895f4e24a7", 310 | "sha256:ad397bc7d51d69cb07ef89e44243f971a04ce1dca9bf24c992c362406c0c6573", 311 | "sha256:ae17fc8103f3b63345709d3e9654a274eee1c6072592aec32b026efd401931d0", 312 | "sha256:af4d8cc28e4c7a2f6a9fed544228c567340f8258b6d7ea815b62a72817bbd178", 313 | "sha256:b22ff939a8856a44f4822da38ef4868bd3a9ade22bb6d9062b36957c850e404f", 314 | "sha256:b549d851f91a4efb3e65498bd4249b1447ab6035a9972f7fc215eb1f59328834", 315 | "sha256:be319f4eb400ee567b722e9ea63d5b2bb31464e3cf1b016502e3ee2de4f86f5c", 316 | "sha256:c0446b2871335d5a5e9fcf1462f954586b09a845832263db95059dcd01442015", 317 | "sha256:c68d2c04f7701a418ec2e5631b7f3552efc32f6bcc1739369c6eeb1af55f62e0", 318 | "sha256:c87ac58b9baaf50b6c1b81a18d20eda7e2883aa9a4fb4f1ca70f2e443bfcdc57", 319 | "sha256:caa2734ada16a44ae57b229d45091f06e30a9a52ace76d7574546ab23008c635", 320 | "sha256:cb34c2d66355fb70ae47b5595aafd7218e59bb9c00ad8cc3abd1406ca5874f07", 321 | "sha256:cb3652bbe6720786b9137862205986f3ae54a09dec8499a995ed58292bdf77c2", 322 | "sha256:cf668f26604e9f7aee9f8eaae4ca07a948168af90b96be97a4b7fa902a6d2ac1", 323 | "sha256:d326ff80ed531bf2507cba93011c30fff2dd51454c85f55df0f59f2030b1687b", 324 | "sha256:d6c2441538e4fadd4291c8420853431a229fcbefc1bf521810fbc2629d8ae8c2", 325 | "sha256:d6ecfd1970b3380a569d7b3ecc5dd70dba295897418ed9e31ec3c16a5ab099a5", 326 | "sha256:e5602a9b5074dcacc113bba4d2f011d2748f50e3201c8139ac5b68cf2a76bd8b", 327 | "sha256:ef806f684f17dbd6263d72a54ad4073af42b42effa3eb42b877e750c24c76f86", 328 | "sha256:f3356afbb301ec34a500b8ba8b47cba0b44ed4641c306e1dd981a08b416170b5", 329 | "sha256:f6f7ee2289176cb1d2c59a24f50900f8b9580259fa9f1a739432242e7d254f93", 330 | "sha256:f7e8f1ee28e0a05831c92dc1c0c1c94af5289963b7cf09eca5b5e3ce4f8c91b0", 331 | "sha256:f8169ec628880bdbca67082a9196e2106060a4a5cbd486ac51881a4df805a36f", 332 | "sha256:fbc88d3ba402b5d041d204ec2449c4078898f89c4a6e6f0ed1c1a510ef1e221d", 333 | "sha256:fbd3fe37353c62fd0eb19fb76f78aa693716262bcd5f9c14bb9e5aca4b3f0dc4" 334 | ], 335 | "markers": "python_version >= '3.6'", 336 | "version": "==2022.3.2" 337 | }, 338 | "requests": { 339 | "hashes": [ 340 | "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", 341 | "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" 342 | ], 343 | "markers": "python_version >= '3.8'", 344 | "version": "==2.32.3" 345 | }, 346 | "six": { 347 | "hashes": [ 348 | "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", 349 | "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" 350 | ], 351 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", 352 | "version": "==1.17.0" 353 | }, 354 | "sqlalchemy": { 355 | "hashes": [ 356 | "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a", 357 | "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", 358 | "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", 359 | "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", 360 | "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", 361 | "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad", 362 | "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870", 363 | "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", 364 | "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", 365 | "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", 366 | "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", 367 | "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", 368 | "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867", 369 | "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", 370 | "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff", 371 | "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", 372 | "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2", 373 | "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5", 374 | "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", 375 | "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", 376 | "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", 377 | "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", 378 | "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", 379 | "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9", 380 | "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", 381 | "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", 382 | "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", 383 | "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", 384 | "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", 385 | "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37", 386 | "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5", 387 | "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625", 388 | "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", 389 | "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47", 390 | "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98", 391 | "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", 392 | "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", 393 | "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", 394 | "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", 395 | "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96", 396 | "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", 397 | "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706", 398 | "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438", 399 | "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", 400 | "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e", 401 | "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", 402 | "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08", 403 | "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3", 404 | "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e", 405 | "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", 406 | "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8", 407 | "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", 408 | "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", 409 | "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c", 410 | "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", 411 | "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", 412 | "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106" 413 | ], 414 | "index": "pypi", 415 | "markers": "python_version >= '3.7'", 416 | "version": "==2.0.40" 417 | }, 418 | "typing-extensions": { 419 | "hashes": [ 420 | "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", 421 | "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" 422 | ], 423 | "markers": "python_version >= '3.8'", 424 | "version": "==4.13.2" 425 | }, 426 | "tzdata": { 427 | "hashes": [ 428 | "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", 429 | "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9" 430 | ], 431 | "markers": "python_version >= '2'", 432 | "version": "==2025.2" 433 | }, 434 | "tzlocal": { 435 | "hashes": [ 436 | "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", 437 | "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d" 438 | ], 439 | "markers": "python_version >= '3.9'", 440 | "version": "==5.3.1" 441 | }, 442 | "update-checker": { 443 | "hashes": [ 444 | "sha256:6a2d45bb4ac585884a6b03f9eade9161cedd9e8111545141e9aa9058932acb13", 445 | "sha256:cbba64760a36fe2640d80d85306e8fe82b6816659190993b7bdabadee4d4bbfd" 446 | ], 447 | "version": "==0.18.0" 448 | }, 449 | "urllib3": { 450 | "hashes": [ 451 | "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", 452 | "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" 453 | ], 454 | "markers": "python_version >= '3.9'", 455 | "version": "==2.4.0" 456 | }, 457 | "websocket-client": { 458 | "hashes": [ 459 | "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", 460 | "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da" 461 | ], 462 | "markers": "python_version >= '3.8'", 463 | "version": "==1.8.0" 464 | } 465 | }, 466 | "develop": { 467 | "colorama": { 468 | "hashes": [ 469 | "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", 470 | "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" 471 | ], 472 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", 473 | "version": "==0.4.6" 474 | }, 475 | "exceptiongroup": { 476 | "hashes": [ 477 | "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", 478 | "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88" 479 | ], 480 | "markers": "python_version >= '3.7'", 481 | "version": "==1.3.0" 482 | }, 483 | "iniconfig": { 484 | "hashes": [ 485 | "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", 486 | "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" 487 | ], 488 | "markers": "python_version >= '3.8'", 489 | "version": "==2.1.0" 490 | }, 491 | "packaging": { 492 | "hashes": [ 493 | "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", 494 | "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" 495 | ], 496 | "markers": "python_version >= '3.8'", 497 | "version": "==25.0" 498 | }, 499 | "pluggy": { 500 | "hashes": [ 501 | "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", 502 | "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" 503 | ], 504 | "markers": "python_version >= '3.8'", 505 | "version": "==1.5.0" 506 | }, 507 | "pytest": { 508 | "hashes": [ 509 | "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", 510 | "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845" 511 | ], 512 | "index": "pypi", 513 | "markers": "python_version >= '3.8'", 514 | "version": "==8.3.5" 515 | }, 516 | "tomli": { 517 | "hashes": [ 518 | "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", 519 | "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", 520 | "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", 521 | "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", 522 | "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", 523 | "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", 524 | "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", 525 | "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", 526 | "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", 527 | "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", 528 | "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", 529 | "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", 530 | "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", 531 | "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", 532 | "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", 533 | "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", 534 | "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", 535 | "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", 536 | "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", 537 | "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", 538 | "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", 539 | "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", 540 | "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", 541 | "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", 542 | "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", 543 | "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", 544 | "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", 545 | "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", 546 | "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", 547 | "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", 548 | "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", 549 | "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7" 550 | ], 551 | "markers": "python_version >= '3.8'", 552 | "version": "==2.2.1" 553 | }, 554 | "typing-extensions": { 555 | "hashes": [ 556 | "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", 557 | "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" 558 | ], 559 | "markers": "python_version >= '3.8'", 560 | "version": "==4.13.2" 561 | } 562 | } 563 | } 564 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This is the repository for u/RemindMeBot on reddit. It's a reminder bot that you can trigger with a comment or message and it will send you a reminder message the specified time period later. You can find instructions on how to use the bot [here](https://www.reddit.com/r/RemindMeBot/comments/e1bko7/remindmebot_info_v21/). 2 | 3 | I took over running u/RemindMeBot from u/RemindMeBotWrangler in early 2019 and fully rewrote the code. 4 | 5 | I use both [dateparser](https://github.com/scrapinghub/dateparser) and [parsedatetime](https://github.com/bear/parsedatetime) to parse date strings, though I use a [custom branch of dateparser](https://github.com/Watchful1/dateparser) with a few small modifications. I use [sqlalchemy](https://www.sqlalchemy.org/) backed by sqlite for storing data. I use a custom python logging library [DiscordLogging](https://github.com/Watchful1/DiscordLogging) to log error messages to a discord channel. Lastly I use praw via another custom library [PrawWrapper](https://github.com/Watchful1/PrawWrapper) that allows me to set up a mock reddit instance for unit testing. -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore::DeprecationWarning -------------------------------------------------------------------------------- /scripts/compare.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import parsedatetime 3 | from datetime import datetime 4 | from datetime import timedelta 5 | import static 6 | import re 7 | import requests 8 | import pytz 9 | import dateparser 10 | from dateparser.search import search_dates 11 | 12 | log = discord_logging.init_logging() 13 | 14 | import utils 15 | 16 | cal = parsedatetime.Calendar() 17 | 18 | 19 | def parse_time_old(time_string, base_time, timezone_string): 20 | time_string = re.split("http", time_string, 2, flags=re.IGNORECASE)[0] 21 | base_time = utils.datetime_as_timezone(base_time, timezone_string) 22 | 23 | try: 24 | date_time = dateparser.parse( 25 | time_string, 26 | languages=['en'], 27 | settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 28 | except Exception: 29 | date_time = None 30 | 31 | if date_time is None: 32 | try: 33 | results = search_dates( 34 | time_string, 35 | languages=['en'], 36 | settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 37 | if results is not None: 38 | temp_time = results[0][1] 39 | if temp_time.tzinfo is None: 40 | temp_time = utils.datetime_force_utc(temp_time) 41 | 42 | if temp_time > base_time: 43 | date_time = results[0][1] 44 | else: 45 | date_time = None 46 | except Exception: 47 | date_time = None 48 | 49 | if date_time is None: 50 | try: 51 | date_time, result_code = cal.parseDT(time_string, base_time) 52 | if result_code == 0: 53 | date_time = None 54 | except Exception: 55 | date_time = None 56 | 57 | if date_time is None: 58 | return None 59 | 60 | if date_time.tzinfo is None: 61 | if timezone_string is not None: 62 | date_time = pytz.timezone(timezone_string).localize(date_time) 63 | else: 64 | date_time = utils.datetime_force_utc(date_time) 65 | 66 | date_time = utils.datetime_as_utc(date_time) 67 | 68 | return date_time 69 | 70 | 71 | def find_reminder_time_old(body, trigger): 72 | regex_string = r'(?:{trigger}.? +)(.*?)(?:\[|\n|\"|“|$|http)'.format(trigger=trigger) 73 | times = re.findall(regex_string, body, flags=re.IGNORECASE) 74 | if len(times) > 0 and times[0] != "": 75 | return times[0][:80] 76 | 77 | regex_string = r'(?:{trigger}.? *)(.*?)(?:\[|\n|\"|“|$|http)'.format(trigger=trigger) 78 | times = re.findall(regex_string, body, flags=re.IGNORECASE) 79 | if len(times) > 0 and times[0] != "": 80 | return times[0][:80] 81 | else: 82 | return None 83 | 84 | 85 | url = "https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=remindme&before=" 86 | 87 | previousEpoch = int(datetime.utcnow().timestamp()) 88 | count = 0 89 | breakOut = False 90 | current = utils.datetime_now() 91 | log.info(f"Current time: {utils.get_datetime_string(current)}") 92 | while True: 93 | newUrl = url+str(previousEpoch) 94 | json = requests.get(newUrl, headers={'User-Agent': "Remindme tester by /u/Watchful1"}) 95 | objects = json.json()['data'] 96 | if len(objects) == 0: 97 | break 98 | for comment in objects: 99 | if comment['author'] == "RemindMeBot": 100 | continue 101 | previousEpoch = comment['created_utc'] - 1 102 | time_string_old = find_reminder_time_old(comment['body'].lower(), static.TRIGGER_LOWER)#find_reminder_time_old(comment['body'].lower(), False) 103 | time_string_new = utils.find_reminder_time(comment['body'].lower(), static.TRIGGER_LOWER) 104 | if time_string_old is not None: 105 | date_time_old = parse_time_old(time_string_old, current, None) 106 | date_time_new = utils.parse_time(time_string_new, current, None) 107 | 108 | if date_time_old != date_time_new: 109 | log.info( 110 | f"{utils.get_datetime_string(date_time_old, False, '%Y-%m-%d %H:%M:%S %Z').ljust(23) if date_time_old is not None else 'None'.ljust(23)} " 111 | f"| {utils.get_datetime_string(date_time_new, False, '%Y-%m-%d %H:%M:%S %Z').ljust(23) if date_time_new is not None else 'None'.ljust(23)} " 112 | f"| {time_string_old[:60].ljust(60) if time_string_old is not None else 'None'.ljust(60)} " 113 | f"| {time_string_new.ljust(60) if time_string_new is not None else 'None'.ljust(60)} " 114 | f"| https://www.reddit.com{comment['permalink']} ") 115 | count += 1 116 | if count > 10000: 117 | breakOut = True 118 | break 119 | if breakOut: 120 | break 121 | -------------------------------------------------------------------------------- /scripts/count_subreddits.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import re 3 | from collections import defaultdict 4 | from datetime import timedelta 5 | 6 | log = discord_logging.init_logging() 7 | 8 | from database import Database 9 | from classes.comment import DbComment 10 | from classes.reminder import Reminder 11 | from classes.subreddit import Subreddit 12 | from classes.user import User 13 | import utils 14 | 15 | if __name__ == "__main__": 16 | database = Database() 17 | 18 | date_after = utils.datetime_now() - timedelta(days=180) 19 | reminders = database.session.query(Reminder) \ 20 | .filter(Reminder.requested_date > date_after)\ 21 | .all() 22 | 23 | sub_counts = defaultdict(int) 24 | count_reminders = 0 25 | count_from_comment = 0 26 | for reminder in reminders: 27 | count_reminders += 1 28 | groups = re.search(r"(?:reddit.com/r/)([\w-]+)", reminder.source) 29 | if groups: 30 | count_from_comment += 1 31 | sub_counts[groups[1]] += 1 32 | 33 | print(f"Reminders: {count_reminders}, from comment: {count_from_comment}") 34 | 35 | database.close() 36 | 37 | for subreddit, count_reminder in sorted(sub_counts.items(), key=lambda item: item[1] * -1): 38 | if count_reminder > 1: 39 | log.info(f"{subreddit} {count_reminder}") 40 | -------------------------------------------------------------------------------- /scripts/database_sandbox.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import sqlalchemy 3 | from datetime import datetime, timedelta 4 | import os 5 | 6 | log = discord_logging.init_logging() 7 | 8 | from database import Database 9 | from classes.comment import DbComment 10 | from classes.reminder import Reminder 11 | from classes.subreddit import Subreddit 12 | from classes.user import User 13 | import utils 14 | 15 | if __name__ == "__main__": 16 | backup_folder = r"D:\backup\RemindMeBot" 17 | date_str = "24-05-15 00:00" 18 | backup_before = datetime.strptime(date_str, "%y-%m-%d %H:%M") 19 | 20 | found = False 21 | for subdir, dirs, files in os.walk(backup_folder): 22 | for filename in reversed(files): 23 | if filename.endswith(".db"): 24 | input_path = os.path.join(subdir, filename) 25 | try: 26 | backup_date = datetime.strptime(filename[:-3], "%Y-%m-%d_%H-%M") 27 | if backup_date > backup_before: 28 | continue 29 | 30 | database = Database(override_location=input_path, readonly=True, quiet=True) 31 | user = database.get_or_add_user("SilynJaguar") 32 | reminders = database.session.query(Reminder).filter_by(user=user).all() 33 | #log.info(f"{backup_date}: {banned_count}") 34 | database.close() 35 | found = True 36 | break 37 | except (ValueError, sqlalchemy.exc.OperationalError): 38 | continue 39 | if found: 40 | break 41 | -------------------------------------------------------------------------------- /scripts/explain_parse.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import parsedatetime 3 | import pytz 4 | import dateparser 5 | import re 6 | import sys 7 | from datetime import timedelta 8 | from dateparser.search import search_dates 9 | 10 | log = discord_logging.init_logging() 11 | 12 | import utils 13 | 14 | cal = parsedatetime.Calendar() 15 | 16 | input_string = '''RemindMeRepeat! 4:35pm''' 17 | base_time_string = None#"2024-09-18 16:34:41 -0700" 18 | created_utc = 1726702499 19 | timezone_string = "America/Los_Angeles" 20 | recurring = True 21 | 22 | if base_time_string: 23 | base_time = utils.datetime_as_timezone(utils.parse_datetime_string(base_time_string, False, '%Y-%m-%d %H:%M:%S %z'), "UTC") 24 | elif created_utc: 25 | base_time = utils.datetime_from_timestamp(created_utc) 26 | else: 27 | base_time = utils.datetime_now() 28 | 29 | format_string = '%Y-%m-%d %H:%M:%S %Z' 30 | 31 | log.info(f"Input string: {input_string}") 32 | time = utils.find_reminder_time(input_string, "remindmerepeat") 33 | if time is not None: 34 | log.info(f"Result: {time}") 35 | time_string = time 36 | else: 37 | log.info(f"No string found") 38 | sys.exit(0) 39 | 40 | log.info(f"Now: {base_time.strftime(format_string)}") 41 | # 42 | # try: 43 | date_time = dateparser.parse(time_string, languages=['en'], settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 44 | if date_time is not None: 45 | log.info(f"dateparser.parse: {date_time.strftime(format_string)}") 46 | # except Exception: 47 | # date_time = None 48 | 49 | try: 50 | results = search_dates(time_string, languages=['en'], settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 51 | if results is not None: 52 | temp_time = results[0][1] 53 | if temp_time.tzinfo is None: 54 | temp_time = utils.datetime_force_utc(temp_time) 55 | 56 | if temp_time > base_time: 57 | if date_time is None: 58 | date_time = results[0][1] 59 | log.info(f"search_dates: {date_time.strftime(format_string)}") 60 | else: 61 | log.info(f" search_dates would have found: {results[0][1].strftime(format_string)}") 62 | except Exception: 63 | date_time = None 64 | 65 | try: 66 | date_time_result, result_code = cal.parseDT(time_string, base_time) 67 | if result_code != 0: 68 | if date_time is None: 69 | date_time = date_time_result 70 | log.info(f"cal.parseDT: {date_time.strftime(format_string)}") 71 | else: 72 | log.info(f" cal.parseDT would have found: {date_time_result.strftime(format_string)}") 73 | except Exception: 74 | date_time = None 75 | 76 | 77 | if date_time is None: 78 | log.info(f"No datetime found") 79 | sys.exit(0) 80 | 81 | if date_time.tzinfo is None: 82 | if timezone_string is not None: 83 | date_time = pytz.timezone(timezone_string).localize(date_time) 84 | log.info(f"Converting to timezone: {timezone_string} : {date_time.strftime(format_string)}") 85 | else: 86 | date_time = utils.datetime_force_utc(date_time) 87 | log.info(f"Converting to utc: {date_time.strftime(format_string)}") 88 | 89 | date_time = utils.datetime_as_utc(date_time) 90 | log.info(f"Forcing utc: {date_time.strftime(format_string)}") 91 | 92 | if recurring: 93 | second_target_date = utils.next_recurring_time(time_string, date_time, timezone_string) 94 | log.info(f"Recurring next at: {second_target_date.strftime(format_string)}") 95 | third_target_date = utils.next_recurring_time(time_string, second_target_date, timezone_string) 96 | log.info(f"Recurring next at: {third_target_date.strftime(format_string)}") 97 | 98 | -------------------------------------------------------------------------------- /scripts/find_cakedays.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import datetime 3 | import static 4 | import requests 5 | 6 | log = discord_logging.init_logging() 7 | 8 | import utils 9 | 10 | url = "https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=cakeday&before=" 11 | 12 | 13 | def trigger_start_of_text(body, trigger): 14 | return body.startswith(f"{trigger}!") or body.startswith(f"!{trigger}") 15 | 16 | 17 | previousEpoch = int(datetime.utcnow().timestamp()) 18 | count = 0 19 | breakOut = False 20 | while True: 21 | newUrl = url+str(previousEpoch) 22 | json = requests.get(newUrl, headers={'User-Agent': "Remindme tester by /u/Watchful1"}) 23 | objects = json.json()['data'] 24 | if len(objects) == 0: 25 | break 26 | for comment in objects: 27 | if comment['author'] == "RemindMeBot": 28 | continue 29 | if comment['subreddit'] == "RemindMeBot": 30 | continue 31 | previousEpoch = comment['created_utc'] - 1 32 | if trigger_start_of_text(comment['body'].lower(), "cakeday"): 33 | log.info(f"https://www.reddit.com{comment['permalink']}") 34 | count += 1 35 | if count % 1000 == 0: 36 | log.info(f"{count} | {utils.get_datetime_string(utils.datetime_from_timestamp(comment['created_utc']))}") 37 | if count > 100000: 38 | breakOut = True 39 | break 40 | if breakOut: 41 | break 42 | -------------------------------------------------------------------------------- /scripts/find_remind_me.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import datetime, timedelta, timezone 3 | import static 4 | import requests 5 | import re 6 | 7 | log = discord_logging.init_logging(debug=True) 8 | 9 | import utils 10 | 11 | trigger_single = "remindme" 12 | trigger_split = "remind me" 13 | endEpoch = int((datetime.utcnow().replace(tzinfo=timezone.utc) - timedelta(hours=4)).timestamp()) 14 | 15 | # url = f"https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=" \ 16 | # f"{'|'.join([trigger, trigger_split.replace(' ', '%20')])}" \ 17 | # f"&before=" 18 | 19 | base_url = "https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q={}&before=" 20 | 21 | 22 | def trigger_start_of_text(body, trigger): 23 | return body.startswith(f"{trigger}!") or body.startswith(f"!{trigger}") 24 | 25 | 26 | def trigger_start_of_line(body, trigger): 27 | for line in body.splitlines(): 28 | if line.startswith(f"{trigger}!") or line.startswith(f"!{trigger}"): 29 | return True 30 | return False 31 | 32 | 33 | def trigger_in_text(body, trigger): 34 | return f"{trigger}!" in body or f"!{trigger}" in body 35 | 36 | 37 | def parse_comment(body, trigger, comment_created): 38 | time_string = utils.find_reminder_time(body, trigger) 39 | time_string = time_string.strip() if time_string is not None else None 40 | target_date = None 41 | if time_string is not None: 42 | target_date = utils.parse_time(time_string, comment_created, None) 43 | return time_string, target_date 44 | 45 | 46 | def process_comment(comment): 47 | body = comment['body'].lower().strip() 48 | 49 | single_trigger_found = False 50 | single_string_found = False 51 | single_date_found = False 52 | if trigger_in_text(body, trigger_single): 53 | single_trigger_found = True 54 | 55 | if single_trigger_found: 56 | time_string, target_date = parse_comment(comment['body'], trigger_single, utils.datetime_from_timestamp(comment['created_utc'])) 57 | if time_string is not None: 58 | single_string_found = True 59 | if target_date is not None: 60 | single_date_found = True 61 | 62 | split_trigger_found = False 63 | split_string_found = False 64 | split_date_found = False 65 | if trigger_start_of_line(body, trigger_split): 66 | split_trigger_found = True 67 | 68 | if split_trigger_found: 69 | time_string, target_date = parse_comment(comment['body'], trigger_split, utils.datetime_from_timestamp(comment['created_utc'])) 70 | if time_string is not None: 71 | split_string_found = True 72 | if target_date is not None: 73 | split_date_found = True 74 | 75 | return single_trigger_found, single_string_found, single_date_found, split_trigger_found, split_string_found, split_date_found 76 | 77 | 78 | def process_comments(url): 79 | previousEpoch = int(datetime.utcnow().timestamp()) 80 | breakOut = False 81 | count = 0 82 | single_trigger_count = 0 83 | single_trigger_string_count = 0 84 | single_trigger_date_count = 0 85 | split_trigger_count = 0 86 | split_trigger_string_count = 0 87 | split_trigger_date_count = 0 88 | count_none = 0 89 | while True: 90 | newUrl = url+str(previousEpoch) 91 | json = requests.get(newUrl, headers={'User-Agent': "Remindme tester by /u/Watchful1"}) 92 | objects = json.json()['data'] 93 | if len(objects) == 0: 94 | break 95 | for comment in objects: 96 | previousEpoch = comment['created_utc'] - 1 97 | if comment['author'] not in static.BLACKLISTED_ACCOUNTS and comment['subreddit'] != "RemindMeBot": 98 | single_trigger_found, single_string_found, single_date_found, split_trigger_found, split_string_found, split_date_found = process_comment(comment) 99 | if single_trigger_found: 100 | single_trigger_count += 1 101 | if single_string_found: 102 | single_trigger_string_count += 1 103 | if single_date_found: 104 | single_trigger_date_count += 1 105 | if split_trigger_found: 106 | split_trigger_count += 1 107 | if split_string_found: 108 | split_trigger_string_count += 1 109 | if split_date_found: 110 | split_trigger_date_count += 1 111 | if not single_trigger_found and not single_string_found and not single_date_found: 112 | count_none += 1 113 | 114 | count += 1 115 | # if count % 1000 == 0: 116 | # log.info(f"{count} | {utils.get_datetime_string(utils.datetime_from_timestamp(comment['created_utc']))}") 117 | if previousEpoch < endEpoch: 118 | breakOut = True 119 | break 120 | if breakOut: 121 | break 122 | 123 | log.info(f"{single_trigger_count}|{single_trigger_string_count}|{single_trigger_date_count} - {split_trigger_count}|{split_trigger_string_count}|{split_trigger_date_count} {count_none}") 124 | 125 | 126 | process_comments("https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=remindme&before=") 127 | process_comments("https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=remind%20me&before=") 128 | process_comments("https://api.pushshift.io/reddit/comment/search?&limit=1000&sort=desc&q=remindme|cakeday|remindmerepeat|%22remind%20me%22&before=") 129 | 130 | -------------------------------------------------------------------------------- /scripts/iterate_backups.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import sqlalchemy 3 | from datetime import datetime, timedelta 4 | import os 5 | 6 | log = discord_logging.init_logging() 7 | 8 | from database import Database 9 | from classes.comment import DbComment 10 | from classes.reminder import Reminder 11 | from classes.subreddit import Subreddit 12 | from classes.user import User 13 | import utils 14 | 15 | if __name__ == "__main__": 16 | backup_folder = r"D:\backup\RemindMeBot" 17 | 18 | for subdir, dirs, files in os.walk(backup_folder): 19 | for filename in files: 20 | if filename.endswith(".db"): 21 | input_path = os.path.join(subdir, filename) 22 | try: 23 | backup_date = datetime.strptime(filename[:-3], "%Y-%m-%d_%H-%M") 24 | 25 | database = Database(override_location=input_path, readonly=True, quiet=True) 26 | banned_count = database.session.query(Subreddit).filter_by(banned=True).count() 27 | log.info(f"{backup_date}: {banned_count}") 28 | database.close() 29 | except (ValueError, sqlalchemy.exc.OperationalError): 30 | continue 31 | -------------------------------------------------------------------------------- /scripts/iterate_backups_longest.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import sqlalchemy 3 | from datetime import datetime, timedelta 4 | import os 5 | 6 | log = discord_logging.init_logging() 7 | 8 | from database import Database 9 | from classes.comment import DbComment 10 | from classes.reminder import Reminder 11 | from classes.subreddit import Subreddit 12 | from classes.user import User 13 | import utils 14 | 15 | if __name__ == "__main__": 16 | backup_folder = r"D:\backup\RemindMeBot" 17 | 18 | for subdir, dirs, files in os.walk(backup_folder): 19 | for filename in files: 20 | if filename.endswith(".db"): 21 | input_path = os.path.join(subdir, filename) 22 | try: 23 | backup_date = datetime.strptime(filename[:-3], "%Y-%m-%d_%H-%M") 24 | 25 | database = Database(override_location=input_path, readonly=True, quiet=True) 26 | banned_count = database.session.query(Subreddit).filter_by(banned=True).count() 27 | log.info(f"{backup_date}: {banned_count}") 28 | database.close() 29 | except (ValueError, sqlalchemy.exc.OperationalError): 30 | continue 31 | -------------------------------------------------------------------------------- /scripts/migration_1.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | import discord_logging 3 | import time 4 | import os 5 | 6 | log = discord_logging.init_logging() 7 | 8 | from classes.reminder import Reminder 9 | import utils 10 | from database_old import DatabaseOld 11 | 12 | 13 | old_database = "databaseOld.db" 14 | new_database = "database.db" 15 | 16 | log.info(f"Importing from {old_database} to {new_database}") 17 | 18 | old_db_conn = sqlite3.connect(old_database) 19 | old_c = old_db_conn.cursor() 20 | 21 | if os.path.exists(new_database): 22 | log.info("Deleting existing database") 23 | os.remove(new_database) 24 | new_db_conn = sqlite3.connect(new_database) 25 | new_c = new_db_conn.cursor() 26 | new_c.execute(Database.tables['reminders']) 27 | 28 | default_comment = "Hello, I'm here to remind you to see the parent comment!" 29 | info_page = "http://np.reddit.com/r/RemindMeBot/comments/24duzp/remindmebot_info/" 30 | 31 | startTime = time.perf_counter() 32 | loop = 0 33 | count_default_comment = 0 34 | count_info_page = 0 35 | for row in old_c.execute(''' 36 | SELECT permalink, message, new_date, origin_date, userID 37 | FROM message_date 38 | '''): 39 | loop += 1 40 | reminder = Reminder( 41 | source=row[0], 42 | target_date=utils.parse_datetime_string(row[2]), 43 | message=row[1], 44 | user=row[4], 45 | requested_date=utils.parse_datetime_string(row[3]) 46 | ) 47 | try: 48 | if isinstance(reminder.message, (bytes, bytearray)): 49 | reminder.message = reminder.message.decode("utf-8") 50 | reminder.message = reminder.message.strip(' "') 51 | if reminder.message == default_comment: 52 | count_default_comment += 1 53 | reminder.message = None 54 | 55 | if isinstance(reminder.source, (bytes, bytearray)): 56 | reminder.source = reminder.source.decode("utf-8") 57 | if reminder.source == info_page: 58 | count_info_page += 1 59 | reminder.source = "Unfortunately I couldn't find a source for this reminder. " \ 60 | "This happens sometimes with really old reminders" 61 | 62 | new_c.execute(''' 63 | INSERT INTO reminders 64 | (Source, RequestedDate, TargetDate, Message, User, Defaulted) 65 | VALUES (?, ?, ?, ?, ?, 0) 66 | ''', ( 67 | reminder.source, 68 | utils.get_datetime_string(reminder.requested_date), 69 | utils.get_datetime_string(reminder.target_date), 70 | reminder.message, 71 | reminder.user)) 72 | except Exception as err: 73 | log.info(err) 74 | log.info(reminder) 75 | if loop % 10000 == 0: 76 | log.info(f"{loop}: {int(time.perf_counter() - startTime)}s : {count_default_comment} : {count_info_page}") 77 | 78 | new_db_conn.commit() 79 | new_db_conn.close() 80 | old_db_conn.close() 81 | log.info(f"{loop}: {int(time.perf_counter() - startTime)}s : {count_default_comment} : {count_info_page}") 82 | -------------------------------------------------------------------------------- /scripts/migration_2.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import re 3 | 4 | log = discord_logging.init_logging() 5 | 6 | import reddit_class 7 | from database_old import DatabaseOld 8 | 9 | reddit = reddit_class.Reddit("Watchful1BotTest", False) 10 | database = Database() 11 | 12 | count_no_source = 0 13 | count_comment_id = 0 14 | count_comment_missing = 0 15 | count_comment_else = 0 16 | count_reminders_updated = 0 17 | reminders = database.get_all_reminders() 18 | for i, reminder in enumerate(reminders): 19 | if i % 1000 == 0: 20 | log.info(f"{i}/{len(reminders)}: {count_no_source} : {count_comment_id} : {count_comment_missing} : {count_comment_else}") 21 | if "reddit.com" not in reminder.source: 22 | changed = False 23 | if "Unfortunately I couldn't find a source for this reminder. This happens sometimes with really " \ 24 | "old reminders" in reminder.source: 25 | count_no_source += 1 26 | reminder.source = "No source" 27 | changed = True 28 | 29 | else: 30 | match = re.search(r"^(\w{7})$", reminder.source) 31 | if match is not None: 32 | comment = reddit.get_comment(match.group()) 33 | try: 34 | permalink = f"https://www.reddit.com{comment.permalink}" 35 | reminder.source = permalink 36 | count_comment_id += 1 37 | changed = True 38 | 39 | except Exception: 40 | count_comment_missing += 1 41 | 42 | else: 43 | count_comment_else += 1 44 | 45 | if changed: 46 | database.add_reminder(reminder) 47 | count_reminders_updated += 1 48 | 49 | log.info(f"{len(reminders)}/{len(reminders)}: {count_no_source} : {count_comment_id} : {count_comment_missing} : {count_comment_else}") 50 | log.info(f"Reminders updated: {count_reminders_updated}") 51 | -------------------------------------------------------------------------------- /scripts/migration_4.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | 3 | log = discord_logging.init_logging() 4 | 5 | from database import Database 6 | from classes.reminder import Reminder 7 | 8 | database = Database() 9 | 10 | comments = { 11 | "dp9zjr0": "/r/The_Donald/comments/7af5i7/life_comes_at_you_fast/dp9zjr0/", 12 | "dpavxen": "/r/The_Donald/comments/7ajz8d/this_has_not_aged_well/dpavxen/", 13 | "dpgq2cc": "/r/The_Donald/comments/7bb4se/persistance/dpgq2cc/", 14 | "dpmi1ia": "/r/CringeAnarchy/comments/7c0t7w/seen_on_some_artists_instagram_story/dpmi1ia/", 15 | "dpoxm7j": "/r/The_Donald/comments/7cb255/immigration_without_assimilation_is_invasion/dpoxm7j/", 16 | "dpqdmkm": "/r/milliondollarextreme/comments/7chna0/1968/dpqdmkm/", 17 | "dpqeg1a": "/r/milliondollarextreme/comments/7chna0/1968/dpqeg1a/", 18 | "dpuimx7": "/r/The_Donald/comments/7d1q7t/sr_16_sr_20_jr_24_jr_28_eric_32_eric_36_ivanka_40/dpuimx7/", 19 | } 20 | 21 | www_reminders = 0 22 | empty_reminders = 0 23 | starting_space_reminders = 0 24 | comment_id_reminders = 0 25 | test_reminders = 0 26 | for reminder in database.session.query(Reminder).all(): 27 | if not reminder.source.startswith("http") and reminder.source != "No source": 28 | if reminder.source.startswith("www"): 29 | reminder.source = "https://"+reminder.source 30 | www_reminders += 1 31 | else: 32 | if reminder.source == "": 33 | reminder.source = "No source" 34 | empty_reminders += 1 35 | elif reminder.source.startswith(" http"): 36 | reminder.source = reminder.source[1:] 37 | starting_space_reminders += 1 38 | elif reminder.source in comments: 39 | reminder.source = f"https://www.reddit.com{comments[reminder.source]}" 40 | comment_id_reminders += 1 41 | elif reminder.source == "TEST" or len(reminder.source) > 160: 42 | database.session.delete(reminder) 43 | test_reminders += 1 44 | else: 45 | log.info(reminder.source) 46 | 47 | database.close() 48 | log.info(f"www: {www_reminders}") 49 | log.info(f"empty: {empty_reminders}") 50 | log.info(f"space: {starting_space_reminders}") 51 | log.info(f"comment id: {comment_id_reminders}") 52 | log.info(f"test: {test_reminders}") 53 | -------------------------------------------------------------------------------- /scripts/pushshift.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import timedelta 3 | import requests 4 | import time 5 | 6 | log = discord_logging.init_logging() 7 | 8 | import utils 9 | 10 | USER_AGENT = "Pushshift tester by u/Watchful1" 11 | LAST_REQUEST = utils.datetime_now() - timedelta(seconds=1) 12 | 13 | 14 | def get_comments(date_time): 15 | global LAST_REQUEST 16 | seconds_since_last_request = (utils.datetime_now() - LAST_REQUEST).total_seconds() 17 | if seconds_since_last_request < 1: 18 | log.info(f"Sleeping: {1 - seconds_since_last_request}") 19 | time.sleep(1 - seconds_since_last_request) 20 | 21 | url = f"https://api.pushshift.io/reddit/comment/search?limit=500&sort=desc&before={int(date_time.timestamp()) + 1}" 22 | LAST_REQUEST = utils.datetime_now() 23 | for i in range(10): 24 | try: 25 | response = requests.get(url, headers={'User-Agent': USER_AGENT}, timeout=10) 26 | if response.status_code != 200: 27 | log.warning(f"Bad response code, trying again: {response.status_code}") 28 | time.sleep(5) 29 | continue 30 | comments = response.json()['data'] 31 | return comments 32 | except Exception as err: 33 | log.warning(f"Exception in request, trying again: {err}") 34 | time.sleep(5) 35 | continue 36 | log.warning(f"Hit 10 exceptions, giving up") 37 | return None 38 | 39 | 40 | end_time = utils.parse_datetime_string("2021-01-01 00:00:00") 41 | start_time = utils.parse_datetime_string("2021-04-01 00:00:00") 42 | log.info(f"Counting comments from {utils.get_datetime_string(start_time, False)} to {utils.get_datetime_string(end_time, False)}, {int((start_time - end_time).total_seconds())} seconds") 43 | current_time = start_time 44 | 45 | current_count = 0 46 | while current_time > end_time: 47 | current_comments = get_comments(current_time) 48 | if current_comments is None: 49 | break 50 | ingest_delay_seconds = int((utils.datetime_from_timestamp(current_comments[0]['retrieved_on']) - utils.datetime_from_timestamp(current_comments[0]['created_utc'])).total_seconds()) 51 | for comment in current_comments: 52 | comment_time = utils.datetime_from_timestamp(comment['created_utc']) 53 | if comment_time != current_time: 54 | log.info(f"{utils.get_datetime_string(current_time)} {current_count} {ingest_delay_seconds}") 55 | current_count = 0 56 | current_time = current_time - timedelta(minutes=15) 57 | break 58 | current_count += 1 59 | -------------------------------------------------------------------------------- /scripts/pushshift_beta_integrity.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import timedelta 3 | import requests 4 | import time 5 | 6 | log = discord_logging.init_logging() 7 | 8 | import utils 9 | 10 | USER_AGENT = "Pushshift tester by u/Watchful1" 11 | LAST_REQUEST = utils.datetime_now() - timedelta(seconds=1) 12 | 13 | 14 | def get_comments(date_time): 15 | global LAST_REQUEST 16 | seconds_since_last_request = (utils.datetime_now() - LAST_REQUEST).total_seconds() 17 | if seconds_since_last_request < 1: 18 | #log.info(f"Sleeping: {1 - seconds_since_last_request}") 19 | time.sleep(1 - seconds_since_last_request) 20 | 21 | url = f"https://beta.pushshift.io/search/reddit/comments?size=250&sort=desc&max_created_utc={int(date_time.timestamp())}" 22 | LAST_REQUEST = utils.datetime_now() 23 | for i in range(10): 24 | try: 25 | response = requests.get(url, headers={'User-Agent': USER_AGENT}, timeout=10) 26 | if response.status_code != 200: 27 | log.warning(f"Bad response code, trying again: {response.status_code} : {url}") 28 | time.sleep(5) 29 | continue 30 | comments = response.json()['data'] 31 | return comments 32 | except Exception as err: 33 | log.warning(f"Exception in request, trying again: {err}") 34 | time.sleep(5) 35 | continue 36 | log.warning(f"Hit 10 exceptions, giving up") 37 | return None 38 | 39 | 40 | end_time = utils.parse_datetime_string("2021-06-03 02:45:00") 41 | start_time = utils.datetime_now() - timedelta(seconds=30) 42 | log.info(f"Counting comments from {utils.get_datetime_string(start_time, False)} to {utils.get_datetime_string(end_time, False)}, {int((start_time - end_time).total_seconds())} seconds") 43 | current_time = start_time 44 | 45 | current_count = 0 46 | while current_time > end_time: 47 | current_comments = get_comments(current_time) 48 | if current_comments is None: 49 | break 50 | ingest_delay_seconds = int((utils.datetime_from_timestamp(current_comments[0]['retrieved_utc']) - utils.datetime_from_timestamp(current_comments[0]['created_utc'])).total_seconds()) 51 | for comment in current_comments: 52 | comment_time = utils.datetime_from_timestamp(comment['created_utc']) 53 | if comment_time != current_time: 54 | log.info(f"{utils.get_datetime_string(current_time)} {current_count} {ingest_delay_seconds if current_count > 0 else 0}") 55 | current_count = 0 56 | current_time = current_time - timedelta(seconds=1) 57 | break 58 | current_count += 1 59 | -------------------------------------------------------------------------------- /scripts/pushshift_lag.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import timedelta 3 | import requests 4 | import time 5 | 6 | log = discord_logging.init_logging() 7 | 8 | import utils 9 | 10 | USER_AGENT = "Pushshift tester by u/Watchful1" 11 | BETA_START = utils.datetime_from_timestamp(1622071192) 12 | START_TIME = utils.datetime_now() 13 | PROD_START = None 14 | 15 | while True: 16 | url = "https://api.pushshift.io/reddit/comment/search" 17 | comments = requests.get(url, headers={'User-Agent': USER_AGENT}, timeout=10).json()['data'] 18 | comment_time = utils.datetime_from_timestamp(comments[0]['created_utc']) 19 | 20 | if PROD_START is None: 21 | PROD_START = comment_time 22 | 23 | change_from_prod_start = comment_time - PROD_START 24 | seconds_since_start = utils.datetime_now() - START_TIME 25 | ratio = (change_from_prod_start).seconds / (seconds_since_start).seconds 26 | if ratio > 0: 27 | catchup_seconds = (BETA_START - comment_time).seconds / ratio 28 | else: 29 | catchup_seconds = 1 30 | 31 | log.info(f"{utils.get_datetime_string(comment_time)} - {utils.get_datetime_string(BETA_START)} : " 32 | f"{BETA_START - comment_time} : {change_from_prod_start} : {seconds_since_start} | " 33 | f"{ratio:.2} | {catchup_seconds} : {timedelta(seconds=catchup_seconds)}") 34 | time.sleep(10) 35 | -------------------------------------------------------------------------------- /scripts/update_wiki.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import traceback 3 | from datetime import timedelta 4 | import praw_wrapper 5 | 6 | log = discord_logging.init_logging() 7 | 8 | import utils 9 | from database import Database 10 | import stats 11 | 12 | 13 | if __name__ == "__main__": 14 | reddit = praw_wrapper.Reddit("Watchful1") 15 | database = Database() 16 | 17 | stats.update_stats(reddit, database) 18 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Watchful1/RemindMeBot/0ddcb9854edbbcd54b05b30380fa5a8fe158340d/src/__init__.py -------------------------------------------------------------------------------- /src/classes/comment.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, ForeignKey, Integer, String 2 | from database import Base 3 | 4 | 5 | class DbComment(Base): 6 | __tablename__ = 'comments' 7 | 8 | id = Column(Integer, primary_key=True) 9 | thread_id = Column(String(12), nullable=False) 10 | comment_id = Column(String(12), nullable=False) 11 | reminder_id = Column(Integer, ForeignKey('reminders.id'), nullable=False) 12 | user = Column(String(80), nullable=False) 13 | source = Column(String(400), nullable=False) 14 | current_count = Column(Integer, nullable=False) 15 | 16 | def __init__( 17 | self, 18 | thread_id, 19 | comment_id, 20 | reminder_id, 21 | user, 22 | source, 23 | current_count=0 24 | ): 25 | self.thread_id = thread_id 26 | self.comment_id = comment_id 27 | self.reminder_id = reminder_id 28 | self.user = user 29 | self.source = source 30 | self.current_count = current_count 31 | -------------------------------------------------------------------------------- /src/classes/key_value.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, ForeignKey, Integer, String 2 | from database import Base 3 | 4 | 5 | class KeyValue(Base): 6 | __tablename__ = 'key_value' 7 | 8 | key = Column(String(32), primary_key=True) 9 | value = Column(String(200)) 10 | 11 | def __init__( 12 | self, 13 | key, 14 | value 15 | ): 16 | self.key = key 17 | self.value = value 18 | -------------------------------------------------------------------------------- /src/classes/reminder.py: -------------------------------------------------------------------------------- 1 | import utils 2 | import discord_logging 3 | from datetime import timedelta 4 | from sqlalchemy import Column, Integer, String, Boolean, ForeignKey 5 | from sqlalchemy.orm import relationship 6 | import re 7 | 8 | import static 9 | from praw_wrapper.reddit import ReturnType 10 | from database import Base 11 | from database.UtcDateTime import UtcDateTime 12 | from classes.stat import DbStat 13 | 14 | 15 | log = discord_logging.get_logger() 16 | 17 | 18 | class Reminder(Base): 19 | __tablename__ = 'reminders' 20 | 21 | id = Column(Integer, primary_key=True) 22 | source = Column(String(400), nullable=False) 23 | message = Column(String(500)) 24 | user_id = Column(Integer, ForeignKey('users.id'), nullable=False) 25 | requested_date = Column(UtcDateTime, nullable=False) 26 | target_date = Column(UtcDateTime, nullable=False) 27 | recurrence = Column(String(500)) 28 | defaulted = Column(Boolean, nullable=False) 29 | 30 | comment = relationship("DbComment", cascade="all") 31 | user = relationship("User") 32 | 33 | def __init__( 34 | self, 35 | source, 36 | message, 37 | user, 38 | requested_date, 39 | target_date, 40 | recurrence=None, 41 | defaulted=False 42 | ): 43 | self.source = source 44 | self.message = message 45 | self.user = user 46 | self.requested_date = requested_date 47 | self.target_date = target_date 48 | self.recurrence = recurrence 49 | self.defaulted = defaulted 50 | 51 | @staticmethod 52 | def build_reminder( 53 | source, 54 | message, 55 | user, 56 | requested_date, 57 | time_string, 58 | recurring=False, 59 | target_date=None, 60 | allow_default=True 61 | ): 62 | result_message = None 63 | defaulted = False 64 | time_string = time_string.strip() if time_string is not None else None 65 | if target_date is None: 66 | if time_string is not None: 67 | target_date = utils.parse_time(time_string, requested_date, user.timezone) 68 | log.debug(f"Target date: {utils.get_datetime_string(target_date)}") 69 | 70 | if target_date is None: 71 | if allow_default: 72 | result_message = f"Could not parse date: \"{time_string}\", defaulting to one day" 73 | log.info(result_message) 74 | defaulted = True 75 | target_date = utils.parse_time("1 day", requested_date, None) 76 | 77 | else: 78 | result_message = f"Could not parse date: \"{time_string}\", defaulting not allowed" 79 | log.info(result_message) 80 | return None, result_message 81 | 82 | elif target_date < requested_date: 83 | result_message = f"This time, {time_string}, was interpreted as " \ 84 | f"{utils.get_datetime_string(target_date)}, which is in the past" 85 | log.info(result_message) 86 | return None, result_message 87 | 88 | else: 89 | if allow_default: 90 | result_message = "Could not find a time in message, defaulting to one day" 91 | log.info(result_message) 92 | defaulted = True 93 | target_date = utils.parse_time("1 day", requested_date, None) 94 | 95 | else: 96 | result_message = f"Could not find a time in message, defaulting not allowed" 97 | log.info(result_message) 98 | return None, result_message 99 | 100 | if recurring: 101 | if defaulted: 102 | second_result_message = "Can't use a default for a recurring reminder" 103 | log.info(second_result_message) 104 | return None, result_message + "\n\n" + second_result_message 105 | 106 | else: 107 | second_target_date = utils.next_recurring_time(time_string, target_date, user.timezone) 108 | log.debug(f"Second target date: {utils.get_datetime_string(second_target_date)}") 109 | if second_target_date == target_date: 110 | result_message = f"I've got {utils.get_datetime_string(target_date)} for your first date, but when" \ 111 | f" I applied '{time_string}', I got the same date rather than one after it." 112 | log.info(result_message) 113 | return None, result_message 114 | 115 | elif second_target_date < target_date: 116 | result_message = f"I've got {utils.get_datetime_string(target_date)} for your first date, but when" \ 117 | f" I applied '{time_string}', I got a date before that rather than one after it." 118 | log.info(result_message) 119 | return None, result_message 120 | 121 | reminder = Reminder( 122 | source=source, 123 | message=message, 124 | user=user, 125 | requested_date=requested_date, 126 | target_date=target_date, 127 | recurrence=time_string if recurring else None, 128 | defaulted=defaulted 129 | ) 130 | 131 | return reminder, result_message 132 | 133 | def __str__(self): 134 | return f"{utils.get_datetime_string(self.requested_date)} " \ 135 | f": {utils.get_datetime_string(self.target_date)} : {self.user.name} " \ 136 | f": {self.source} : {self.message}" 137 | 138 | def is_cakeday(self): 139 | return self.message is not None and self.message == static.CAKEDAY_MESSAGE and \ 140 | self.recurrence is not None and self.recurrence == "1 year" 141 | 142 | def get_target_ids(self): 143 | if self.message is None: 144 | return None, None, None 145 | match = re.search(r"r/(\w+)/comments/(\w+)/\w*/?(\w+)?", self.message) 146 | if match is None: 147 | return None, None, None 148 | subreddit = match.group(1) 149 | thread_id = match.group(2) 150 | comment_id = match.group(3) 151 | return subreddit, thread_id, comment_id 152 | 153 | def render_message_confirmation(self, result_message, comment_return=None, comment_age_seconds=0): 154 | bldr = utils.str_bldr() 155 | if comment_age_seconds > (60 * 60): 156 | bldr.append("I'm really sorry about replying to this so late. There's a [detailed post about why I did here](") 157 | bldr.append("https://www.reddit.com/r/RemindMeBot/comments/13jostq/remindmebot_is_now_replying_to_comments_again/") 158 | bldr.append(").") 159 | bldr.append("\n\n") 160 | 161 | if result_message is not None: 162 | bldr.append(result_message) 163 | bldr.append("\n\n") 164 | 165 | if self.is_cakeday(): 166 | bldr.append("I will message you every year at ") 167 | bldr.append(utils.render_time(self.target_date, self.user, "%m-%d %H:%M:%S %Z")) 168 | bldr.append(" to remind you of your cakeday.") 169 | 170 | else: 171 | if self.target_date < utils.datetime_now(): 172 | bldr.append("I will be messaging you on ") 173 | else: 174 | bldr.append("I will be messaging you in ") 175 | bldr.append(utils.render_time_diff(utils.datetime_now(), self.target_date)) 176 | bldr.append(" on ") 177 | bldr.append(utils.render_time(self.target_date, self.user)) 178 | if self.recurrence is not None: 179 | bldr.append(" and then every `") 180 | bldr.append(self.recurrence) 181 | bldr.append("`") 182 | bldr.append(" to remind you") 183 | if self.message is None: 184 | bldr.append(" of [**this link**](") 185 | bldr.append(utils.check_append_context_to_link(self.source)) 186 | bldr.append(")") 187 | else: 188 | bldr.append(": ") 189 | bldr.append(self.message) 190 | 191 | if comment_return is not None and comment_return in ( 192 | ReturnType.FORBIDDEN, 193 | ReturnType.THREAD_LOCKED, 194 | ReturnType.DELETED_COMMENT, 195 | ReturnType.RATELIMIT, 196 | ReturnType.THREAD_REPLIED 197 | ): 198 | bldr.append("\n\n") 199 | bldr.append("I'm sending this to you as a message instead of replying to your comment because ") 200 | if comment_return == ReturnType.FORBIDDEN: 201 | bldr.append("I'm not allowed to reply in this subreddit.") 202 | elif comment_return == ReturnType.THREAD_LOCKED: 203 | bldr.append("the thread is locked.") 204 | elif comment_return == ReturnType.DELETED_COMMENT: 205 | bldr.append("it was deleted before I could get to it.") 206 | elif comment_return == ReturnType.RATELIMIT: 207 | bldr.append("I'm new to this subreddit and have already replied to another thread here recently.") 208 | elif comment_return == ReturnType.THREAD_REPLIED: 209 | bldr.append("I've already replied to another comment in this thread.") 210 | 211 | return bldr 212 | 213 | def render_comment_confirmation(self, thread_id, count_duplicates=0, comment_age_seconds=0): 214 | bldr = utils.str_bldr() 215 | if comment_age_seconds > (60 * 60): 216 | bldr.append("I'm really sorry about replying to this so late. There's a [detailed post about why I did here](") 217 | bldr.append("https://www.reddit.com/r/RemindMeBot/comments/13jostq/remindmebot_is_now_replying_to_comments_again/") 218 | bldr.append(").") 219 | bldr.append("\n\n") 220 | 221 | if self.defaulted: 222 | bldr.append("**Defaulted to one day.**\n\n") 223 | 224 | if self.user.timezone is not None: 225 | bldr.append("Your [default time zone](") 226 | bldr.append(static.INFO_POST_SETTINGS) 227 | bldr.append(") is set to `") 228 | bldr.append(self.user.timezone) 229 | bldr.append("`. ") 230 | 231 | if self.is_cakeday(): 232 | bldr.append("I will [message you every year](") 233 | bldr.append(static.INFO_POST_CAKEDAY) 234 | bldr.append(") at ") 235 | bldr.append(utils.render_time(self.target_date, self.user, "%m-%d %H:%M:%S %Z")) 236 | bldr.append(" to remind you of your cakeday.") 237 | 238 | else: 239 | if self.defaulted or self.target_date < utils.datetime_now(): 240 | bldr.append("I will be messaging you on ") 241 | else: 242 | bldr.append("I will be messaging you in ") 243 | bldr.append(utils.render_time_diff(self.requested_date, self.target_date)) 244 | bldr.append(" on ") 245 | bldr.append(utils.render_time(self.target_date, self.user)) 246 | if self.recurrence is not None: 247 | bldr.append(" [and then every](") 248 | bldr.append(static.INFO_POST_REPEAT) 249 | bldr.append(") `") 250 | bldr.append(self.recurrence) 251 | bldr.append("`") 252 | bldr.append(" to remind you of [**this link**](") 253 | bldr.append(utils.check_append_context_to_link(self.source)) 254 | bldr.append(")") 255 | 256 | bldr.append("\n\n") 257 | 258 | bldr.append("[**") 259 | if count_duplicates > 0: 260 | bldr.append(str(count_duplicates)) 261 | bldr.append(" OTHERS CLICKED") 262 | else: 263 | bldr.append("CLICK") 264 | bldr.append(" THIS LINK**](") 265 | bldr.append(utils.build_message_link( 266 | static.ACCOUNT_NAME, 267 | "Reminder", 268 | f"[{self.source}]\n\n{static.TRIGGER}! " 269 | f"{utils.get_datetime_string(self.target_date, format_string='%Y-%m-%d %H:%M:%S %Z')}" 270 | )) 271 | bldr.append(") to send a PM to also be reminded and to reduce spam.") 272 | 273 | if thread_id is not None: 274 | bldr.append("\n\n") 275 | bldr.append("^(Parent commenter can ) [^(delete this message to hide from others.)](") 276 | bldr.append(utils.build_message_link( 277 | static.ACCOUNT_NAME, 278 | "Delete Comment", 279 | f"Delete! {thread_id}" 280 | )) 281 | bldr.append(")") 282 | 283 | return bldr 284 | 285 | def render_notification(self): 286 | bldr = utils.str_bldr() 287 | bldr.append("RemindMeBot reminder here!") 288 | bldr.append("\n\n") 289 | 290 | if self.message is not None: 291 | bldr.append("I'm here to remind you:\n\n> ") 292 | bldr.append(self.message) 293 | bldr.append("\n\n") 294 | 295 | bldr.append("The source comment or message:\n\n>") 296 | bldr.append(utils.check_append_context_to_link(self.source)) 297 | bldr.append("\n\n") 298 | 299 | if self.requested_date is None: 300 | bldr.append("This reminder was created before I started saving the creation date of reminders.") 301 | else: 302 | bldr.append("You requested this reminder on: ") 303 | bldr.append(utils.render_time(self.requested_date, self.user)) 304 | bldr.append("\n\n") 305 | 306 | if self.recurrence is not None: 307 | if self.user.recurring_sent > static.RECURRING_LIMIT: 308 | bldr.append("I've sent you at least ") 309 | bldr.append(str(static.RECURRING_LIMIT)) 310 | bldr.append(" recurring reminders since I last heard from you, so I'm automatically canceling this reminder. ") 311 | bldr.append("[Click here](") 312 | bldr.append(utils.build_message_link( 313 | static.ACCOUNT_NAME, 314 | "ReminderRepeat", 315 | f"[{(self.message[:500] if self.message is not None else self.source)}]\n\n{static.TRIGGER_RECURRING}! {self.recurrence}" 316 | )) 317 | bldr.append(") to recreate it.") 318 | else: 319 | if self.is_cakeday(): 320 | bldr.append("I will message you every year at ") 321 | bldr.append(utils.render_time(self.target_date, self.user, "%m-%d %H:%M:%S %Z")) 322 | bldr.append(" to remind you of your cakeday.") 323 | 324 | else: 325 | bldr.append("This is a repeating reminder. I'll message you again in `") 326 | bldr.append(self.recurrence) 327 | bldr.append("`, which is ") 328 | bldr.append(utils.render_time(utils.next_recurring_time(self.recurrence, self.target_date, self.user.timezone), self.user)) 329 | bldr.append(".") 330 | 331 | bldr.append("\n\n") 332 | 333 | bldr.append("[Click here](") 334 | bldr.append(utils.build_message_link(static.ACCOUNT_NAME, "Remove", f"Remove! {self.id}")) 335 | bldr.append(") to delete this reminder.") 336 | 337 | else: 338 | bldr.append("[Click here](") 339 | bldr.append(utils.build_message_link( 340 | static.ACCOUNT_NAME, 341 | "Reminder", 342 | f"[{(self.message[:500] if self.message is not None else self.source)}]\n\n{static.TRIGGER}! " 343 | )) 344 | bldr.append(") and set the time after the ") 345 | bldr.append(static.TRIGGER) 346 | bldr.append(" command to be reminded of the original comment again.") 347 | 348 | return bldr 349 | -------------------------------------------------------------------------------- /src/classes/stat.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, ForeignKey, Integer, String 2 | from database import Base 3 | from database.UtcDateTime import UtcDateTime 4 | 5 | import utils 6 | 7 | 8 | class DbStat(Base): 9 | __tablename__ = 'stats' 10 | 11 | id = Column(Integer, primary_key=True) 12 | subreddit = Column(String(80), nullable=False) 13 | thread_id = Column(String(12), nullable=False) 14 | comment_id = Column(String(12)) 15 | initial_date = Column(UtcDateTime) 16 | count_reminders = Column(Integer, nullable=False) 17 | #thread_title = Column(String(200)) 18 | 19 | title = None 20 | answered = False 21 | count_pending_reminders = None 22 | 23 | def __init__( 24 | self, 25 | subreddit, 26 | thread_id, 27 | comment_id, 28 | count_reminders=1 29 | ): 30 | self.subreddit = subreddit 31 | self.thread_id = thread_id 32 | self.comment_id = comment_id 33 | self.count_reminders = count_reminders 34 | 35 | def __str__(self): 36 | return f"{self.id}:{self.subreddit}:{self.thread_id}:{self.comment_id}:" \ 37 | f": {utils.get_datetime_string(self.initial_date)}:{self.count_reminders}" 38 | -------------------------------------------------------------------------------- /src/classes/subreddit.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, String, Boolean, func 2 | from database.UtcDateTime import UtcDateTime 3 | from database import Base 4 | 5 | 6 | class Subreddit(Base): 7 | __tablename__ = 'subreddits' 8 | 9 | subreddit = Column(String(80), primary_key=True) 10 | banned = Column(Boolean, nullable=False) 11 | ban_checked = Column(UtcDateTime, nullable=False) 12 | 13 | def __init__( 14 | self, 15 | subreddit, 16 | banned, 17 | ban_checked 18 | ): 19 | self.subreddit = subreddit 20 | self.banned = banned 21 | self.ban_checked = ban_checked 22 | -------------------------------------------------------------------------------- /src/classes/user.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, String, Integer 2 | from database import Base 3 | 4 | 5 | class User(Base): 6 | __tablename__ = 'users' 7 | 8 | id = Column(Integer, primary_key=True) 9 | name = Column(String(80), nullable=False) 10 | timezone = Column(String(80)) 11 | time_format = Column(String(80)) 12 | recurring_sent = Column(Integer, nullable=False) 13 | 14 | def __init__( 15 | self, 16 | name, 17 | timezone=None, 18 | time_format=None, 19 | recurring_sent=0 20 | ): 21 | self.name = name 22 | self.timezone = timezone 23 | self.time_format = time_format 24 | self.recurring_sent = recurring_sent 25 | -------------------------------------------------------------------------------- /src/comments.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import traceback 3 | 4 | import utils 5 | import static 6 | import counters 7 | from classes.reminder import Reminder 8 | from classes.comment import DbComment 9 | from praw_wrapper.reddit import ReturnType 10 | 11 | 12 | log = discord_logging.get_logger() 13 | 14 | 15 | def database_set_seen(database, comment_seen): 16 | database.save_keystore("comment_timestamp", comment_seen.strftime("%Y-%m-%d %H:%M:%S")) 17 | 18 | 19 | def database_get_seen(database): 20 | result = database.get_keystore("comment_timestamp") 21 | if result is None: 22 | log.warning("Comment time not in database, returning now") 23 | now = utils.datetime_now() 24 | database_set_seen(database, now) 25 | return now 26 | return utils.parse_datetime_string(result) 27 | 28 | 29 | def trigger_start_of_line(body, trigger): 30 | for line in body.splitlines(): 31 | if line.startswith(f"{trigger}!") or line.startswith(f"!{trigger}"): 32 | return True 33 | return False 34 | 35 | 36 | def trigger_in_text(body, trigger): 37 | return f"{trigger}!" in body or f"!{trigger}" in body 38 | 39 | 40 | def parse_comment(comment, database, count_string, reddit): 41 | if comment.author == static.ACCOUNT_NAME: 42 | log.debug("Comment is from remindmebot") 43 | return None, None 44 | if comment.author in static.BLACKLISTED_ACCOUNTS: 45 | log.debug("Comment is from a blacklisted account") 46 | return None, None 47 | 48 | log.info(f"{count_string}: Processing comment {comment.id} from u/{comment.author}") 49 | body = comment.body.lower().strip() 50 | recurring = False 51 | cakeday = False 52 | allow_default = True 53 | if trigger_in_text(body, static.TRIGGER_RECURRING_LOWER): 54 | log.debug("Recurring reminder comment") 55 | recurring = True 56 | trigger = static.TRIGGER_RECURRING_LOWER 57 | elif trigger_in_text(body, static.TRIGGER_LOWER): 58 | log.debug("Regular comment") 59 | trigger = static.TRIGGER_LOWER 60 | elif trigger_start_of_line(body, static.TRIGGER_CAKEDAY_LOWER): 61 | log.debug("Cakeday comment") 62 | cakeday = True 63 | recurring = True 64 | trigger = static.TRIGGER_CAKEDAY_LOWER 65 | elif trigger_start_of_line(body, static.TRIGGER_SPLIT_LOWER): 66 | log.debug("Regular split comment") 67 | trigger = static.TRIGGER_SPLIT_LOWER 68 | allow_default = False 69 | else: 70 | log.debug("Command not in comment") 71 | return None, None 72 | 73 | target_date = None 74 | if cakeday: 75 | if database.user_has_cakeday_reminder(comment.author): 76 | log.info("Cakeday already exists") 77 | return None, None 78 | 79 | target_date = utils.get_next_anniversary(reddit.get_user_creation_date(comment.author)) 80 | message_text = static.CAKEDAY_MESSAGE 81 | time = "1 year" 82 | 83 | else: 84 | time = utils.find_reminder_time(comment.body, trigger) 85 | message_text = utils.find_reminder_message(comment.body, trigger) 86 | 87 | reminder, result_message = Reminder.build_reminder( 88 | source=utils.reddit_link(comment.permalink), 89 | message=message_text, 90 | user=database.get_or_add_user(comment.author), 91 | requested_date=utils.datetime_from_timestamp(comment.created_utc), 92 | time_string=time, 93 | recurring=recurring, 94 | target_date=target_date, 95 | allow_default=allow_default 96 | ) 97 | if reminder is None: 98 | return None, None 99 | 100 | if cakeday: 101 | counters.replies.labels(source='comment', type='cake').inc() 102 | elif recurring: 103 | counters.replies.labels(source='comment', type='repeat').inc() 104 | elif not allow_default: 105 | counters.replies.labels(source='comment', type='split').inc() 106 | else: 107 | counters.replies.labels(source='comment', type='single').inc() 108 | 109 | database.add_reminder(reminder) 110 | 111 | reminder.user.recurring_sent = 0 112 | 113 | return reminder, result_message 114 | 115 | 116 | def process_comment(comment, reddit, database, count_string=""): 117 | reminder, result_message = parse_comment(comment, database, count_string, reddit) 118 | 119 | if reminder is None: 120 | counters.replies.labels(source='comment', type='other').inc() 121 | log.debug("Not replying") 122 | return 123 | 124 | commented = False 125 | thread_id = utils.id_from_fullname(comment.link_id) 126 | comment_result = None 127 | if database.get_comment_by_thread(thread_id) is not None: 128 | comment_result = ReturnType.THREAD_REPLIED 129 | if comment_result is None and database.get_subreddit_banned(comment.subreddit): 130 | comment_result = ReturnType.FORBIDDEN 131 | comment_age_seconds = (utils.datetime_now() - utils.datetime_from_timestamp(comment.created_utc)).total_seconds() 132 | if comment_result is None: 133 | reminder.thread_id = thread_id 134 | reddit_comment = reddit.get_comment(comment.id) 135 | bldr = utils.get_footer(reminder.render_comment_confirmation(thread_id, comment_age_seconds=comment_age_seconds)) 136 | 137 | result_id, comment_result = reddit.reply_comment(reddit_comment, ''.join(bldr)) 138 | 139 | if comment_result in ( 140 | ReturnType.INVALID_USER, 141 | ReturnType.USER_DOESNT_EXIST, 142 | ReturnType.THREAD_LOCKED, 143 | ReturnType.DELETED_COMMENT, 144 | ReturnType.RATELIMIT, 145 | ReturnType.COMMENT_UNREPLIABLE): 146 | log.info(f"Unable to reply as comment: {comment_result.name}") 147 | 148 | elif comment_result in ( 149 | ReturnType.FORBIDDEN, 150 | ReturnType.SUBREDDIT_OUTBOUND_LINKING_DISALLOWED, 151 | ReturnType.COMMENT_GUIDANCE_VALIDATION_FAILED,): 152 | log.info(f"Banned in subreddit, saving: {comment.subreddit}") 153 | database.ban_subreddit(comment.subreddit) 154 | 155 | elif result_id is None: 156 | log.info(f"Reply failed, no returned comment id") 157 | 158 | else: 159 | if comment_result == ReturnType.NOTHING_RETURNED: 160 | result_id = "QUARANTINED" 161 | log.warning(f"Opting in to quarantined subreddit: {comment.subreddit}") 162 | reddit.quarantine_opt_in(comment.subreddit) 163 | 164 | log.info( 165 | f"Reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}, " 166 | f"replied as comment: {result_id}") 167 | 168 | if comment_result != ReturnType.QUARANTINED and comment.subreddit != "RemindMeBot": 169 | db_comment = DbComment( 170 | thread_id=thread_id, 171 | comment_id=result_id, 172 | reminder_id=reminder.id, 173 | user=reminder.user.name, 174 | source=reminder.source 175 | ) 176 | database.save_comment(db_comment) 177 | commented = True 178 | 179 | if not commented: 180 | log.info( 181 | f"Reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}, " 182 | f"replying as message: {comment_result.name}") 183 | bldr = utils.get_footer(reminder.render_message_confirmation(result_message, comment_result, comment_age_seconds=comment_age_seconds)) 184 | result = reddit.send_message(comment.author, "RemindMeBot Confirmation", ''.join(bldr), retry_seconds=600) 185 | if result != ReturnType.SUCCESS: 186 | log.info(f"Unable to send message: {result.name}") 187 | 188 | 189 | def process_comments(reddit, database, ingest_database): 190 | if ingest_database is None: 191 | log.debug("No ingest database passed, skipping comment search") 192 | return 0 193 | comments = ingest_database.get_comments(limit=30) 194 | 195 | if len(comments): 196 | log.debug(f"Processing {len(comments)} comments") 197 | i = 0 198 | for comment in comments[::-1]: 199 | i += 1 200 | mark_read = True 201 | try: 202 | process_comment(comment, reddit, database, f"{i}/{len(comments)}") 203 | except Exception as err: 204 | mark_read = not utils.process_error( 205 | f"Error processing comment: {comment.id} : {comment.author}", 206 | err, traceback.format_exc() 207 | ) 208 | 209 | if mark_read: 210 | ingest_database.delete_comment(comment) 211 | ingest_database.commit() 212 | database_set_seen(database, utils.datetime_from_timestamp(comment.created_utc)) 213 | else: 214 | return i 215 | 216 | return len(comments) 217 | 218 | 219 | def update_comments(reddit, database): 220 | count_incorrect = database.get_pending_incorrect_comments() 221 | 222 | incorrect_items = database.get_incorrect_comments(utils.requests_available(count_incorrect)) 223 | if len(incorrect_items): 224 | i = 0 225 | for db_comment, reminder, new_count in incorrect_items: 226 | i += 1 227 | log.info( 228 | f"{i}/{len(incorrect_items)}/{count_incorrect}: Updating comment : " 229 | f"{db_comment.comment_id} : {db_comment.current_count}/{new_count}") 230 | 231 | bldr = utils.get_footer(reminder.render_comment_confirmation(db_comment.thread_id, new_count)) 232 | result = reddit.edit_comment(''.join(bldr), comment_id=db_comment.comment_id) 233 | if result != ReturnType.SUCCESS: 234 | log.warning(f"Failed to edit comment {db_comment.comment_id}: {result}") 235 | 236 | db_comment.current_count = new_count 237 | 238 | else: 239 | log.debug("No incorrect comments") 240 | -------------------------------------------------------------------------------- /src/counters.py: -------------------------------------------------------------------------------- 1 | import prometheus_client 2 | 3 | replies = prometheus_client.Counter('bot_replies', "Count of objects replied to", ['source', 'type']) 4 | notifications = prometheus_client.Counter('bot_sent', "Count of notifications sent") 5 | queue = prometheus_client.Gauge('bot_queue', "Current queue size") 6 | objects = prometheus_client.Gauge('bot_objects', "Total number of objects by type", ['type']) 7 | errors = prometheus_client.Counter('bot_errors', "Count of errors", ['type']) 8 | run_time = prometheus_client.Summary('bot_run_seconds', "How long a full loop takes") 9 | 10 | 11 | def init(port): 12 | prometheus_client.start_http_server(port) 13 | -------------------------------------------------------------------------------- /src/database/UtcDateTime.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy.types as types 2 | 3 | import utils 4 | 5 | 6 | class UtcDateTime(types.TypeDecorator): 7 | impl = types.DateTime 8 | 9 | cache_ok = True 10 | 11 | def process_result_value(self, value, dialect): 12 | if value is not None: 13 | return utils.datetime_force_utc(value) 14 | else: 15 | return None 16 | -------------------------------------------------------------------------------- /src/database/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.ext.declarative import declarative_base 2 | from sqlalchemy import create_engine 3 | from sqlalchemy.orm import sessionmaker 4 | import os 5 | import discord_logging 6 | from shutil import copyfile 7 | 8 | Base = declarative_base() 9 | 10 | import static 11 | import utils 12 | from ._keystore import _DatabaseKeystore 13 | from ._reminders import _DatabaseReminders 14 | from ._comments import _DatabaseComments 15 | from ._subreddits import _DatabaseSubreddit 16 | from ._users import _DatabaseUsers 17 | from ._stats import _DatabaseStats 18 | 19 | log = discord_logging.get_logger() 20 | 21 | 22 | def abort_ro(*args,**kwargs): 23 | return 24 | 25 | 26 | class Database(_DatabaseReminders, _DatabaseComments, _DatabaseKeystore, _DatabaseSubreddit, _DatabaseUsers, _DatabaseStats): 27 | def __init__(self, debug=False, publish=False, override_location=None, readonly=False, quiet=False): 28 | if not quiet: 29 | log.info(f"Initializing database class: debug={debug} publish={publish}") 30 | self.debug = debug 31 | self.engine = None 32 | self.init(debug, publish, override_location, readonly) 33 | 34 | _DatabaseReminders.__init__(self) 35 | _DatabaseComments.__init__(self) 36 | _DatabaseKeystore.__init__(self) 37 | _DatabaseSubreddit.__init__(self) 38 | _DatabaseUsers.__init__(self) 39 | _DatabaseStats.__init__(self) 40 | 41 | def init(self, debug, publish, override_location=None, readonly=False): 42 | if debug: 43 | self.engine = create_engine(f'sqlite:///:memory:') 44 | else: 45 | if override_location: 46 | self.engine = create_engine(f'sqlite:///{override_location}') 47 | else: 48 | self.engine = create_engine(f'sqlite:///{static.DATABASE_NAME}') 49 | 50 | Session = sessionmaker(bind=self.engine) 51 | self.session = Session() 52 | if readonly: 53 | self.session.flush = abort_ro 54 | self.session._flush = abort_ro 55 | 56 | if publish: 57 | Base.metadata.drop_all(self.engine) 58 | 59 | Base.metadata.create_all(self.engine) 60 | 61 | self.commit() 62 | 63 | def backup(self): 64 | log.info("Backing up database") 65 | self.commit() 66 | self.close() 67 | 68 | if not os.path.exists(static.BACKUP_FOLDER_NAME): 69 | os.makedirs(static.BACKUP_FOLDER_NAME) 70 | 71 | copyfile( 72 | static.DATABASE_NAME, 73 | static.BACKUP_FOLDER_NAME + "/" + utils.datetime_now().strftime("%Y-%m-%d_%H-%M") + ".db") 74 | 75 | self.init(self.debug, False) 76 | 77 | def commit(self): 78 | self.session.commit() 79 | 80 | def close(self): 81 | self.session.commit() 82 | self.engine.dispose() 83 | -------------------------------------------------------------------------------- /src/database/_comments.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from sqlalchemy.orm import aliased 3 | from sqlalchemy.sql import func 4 | 5 | from classes.comment import DbComment 6 | from classes.reminder import Reminder 7 | 8 | log = discord_logging.get_logger() 9 | 10 | 11 | class _DatabaseComments: 12 | def __init__(self): 13 | self.session = self.session # for pycharm linting 14 | 15 | def save_comment(self, db_comment): 16 | log.debug("Saving new comment") 17 | self.session.add(db_comment) 18 | 19 | def get_comment_by_thread(self, thread_id): 20 | log.debug(f"Fetching comment for thread: {thread_id}") 21 | 22 | return self.session.query(DbComment).filter_by(thread_id=thread_id).first() 23 | 24 | def delete_comment(self, db_comment): 25 | log.debug(f"Deleting comment by id: {db_comment.id}") 26 | self.session.delete(db_comment) 27 | 28 | def get_pending_incorrect_comments(self): 29 | log.debug("Fetching count of incorrect comments") 30 | 31 | Reminder1 = aliased(Reminder) 32 | Reminder2 = aliased(Reminder) 33 | subquery = self.session.query(Reminder1.id, func.count('*').label("new_count"))\ 34 | .join(Reminder2, Reminder1.source == Reminder2.message)\ 35 | .group_by(Reminder1.id)\ 36 | .subquery() 37 | count = self.session.query(DbComment)\ 38 | .join(subquery, DbComment.reminder_id == subquery.c.id)\ 39 | .filter(subquery.c.new_count != DbComment.current_count)\ 40 | .count() 41 | log.debug(f"Incorrect comments: {count}") 42 | return count 43 | 44 | def get_incorrect_comments(self, count): 45 | log.debug(f"Fetching incorrect comments") 46 | 47 | Reminder1 = aliased(Reminder) 48 | Reminder2 = aliased(Reminder) 49 | 50 | subquery = self.session.query(Reminder1, func.count('*').label("new_count"))\ 51 | .join(Reminder2, Reminder1.source == Reminder2.message)\ 52 | .group_by(Reminder1.id)\ 53 | .subquery() 54 | 55 | Reminder3 = aliased(Reminder, subquery) 56 | 57 | results = self.session.query(DbComment, Reminder3, subquery.c.new_count)\ 58 | .join(subquery, DbComment.reminder_id == subquery.c.id)\ 59 | .filter(subquery.c.new_count != DbComment.current_count)\ 60 | .limit(count)\ 61 | .all() 62 | 63 | log.debug(f"Found incorrect comments: {len(results)}") 64 | return results 65 | 66 | def get_count_all_comments(self): 67 | return self.session.query(DbComment).count() 68 | -------------------------------------------------------------------------------- /src/database/_keystore.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | 3 | from classes.key_value import KeyValue 4 | 5 | log = discord_logging.get_logger() 6 | 7 | 8 | class _DatabaseKeystore: 9 | def __init__(self): 10 | self.session = self.session # for pycharm linting 11 | 12 | def save_keystore(self, key, value): 13 | log.debug(f"Saving keystore: {key} : {value}") 14 | self.session.merge(KeyValue(key, value)) 15 | 16 | def get_keystore(self, key): 17 | log.debug(f"Fetching keystore: {key}") 18 | key_value = self.session.query(KeyValue).filter_by(key=key).first() 19 | 20 | if key_value is None: 21 | log.debug("Key not found") 22 | return None 23 | 24 | log.debug(f"Value: {key_value.value}") 25 | return key_value.value 26 | -------------------------------------------------------------------------------- /src/database/_reminders.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from sqlalchemy.orm import joinedload 3 | 4 | import static 5 | from classes.reminder import Reminder 6 | from classes.user import User 7 | 8 | log = discord_logging.get_logger() 9 | 10 | 11 | class _DatabaseReminders: 12 | def __init__(self): 13 | self.session = self.session # for pycharm linting 14 | 15 | def add_reminder(self, reminder): 16 | log.debug("Saving new reminder") 17 | self.session.add(reminder) 18 | 19 | subreddit, thread_id, comment_id = reminder.get_target_ids() 20 | self.add_increment_stat(subreddit, thread_id, comment_id) 21 | 22 | def get_count_pending_reminders(self, timestamp): 23 | log.debug("Fetching count of pending reminders") 24 | 25 | count = self.session.query(Reminder).filter(Reminder.target_date < timestamp).count() 26 | 27 | log.debug(f"Count reminders: {count}") 28 | return count 29 | 30 | def get_pending_reminders(self, count, timestamp): 31 | log.debug("Fetching pending reminders") 32 | 33 | reminders = self.session.query(Reminder)\ 34 | .options(joinedload(Reminder.user))\ 35 | .filter(Reminder.target_date < timestamp)\ 36 | .order_by(Reminder.target_date.asc())\ 37 | .limit(count)\ 38 | .all() 39 | 40 | log.debug(f"Found reminders: {len(reminders)}") 41 | return reminders 42 | 43 | def get_all_user_reminders(self, user_name): 44 | log.debug(f"Fetching all reminders for u/{user_name}") 45 | 46 | reminders = self.session.query(Reminder)\ 47 | .join(User)\ 48 | .filter(User.name == user_name)\ 49 | .order_by(Reminder.target_date.asc())\ 50 | .all() 51 | 52 | log.debug(f"Found reminders: {len(reminders)}") 53 | return reminders 54 | 55 | def get_user_reminders(self, user_name): 56 | log.debug(f"Fetching reminders for u/{user_name}") 57 | 58 | regular_reminders = self.session.query(Reminder)\ 59 | .join(User)\ 60 | .filter(User.name == user_name)\ 61 | .filter(Reminder.recurrence == None)\ 62 | .order_by(Reminder.target_date.asc())\ 63 | .all() 64 | 65 | recurring_reminders = self.session.query(Reminder)\ 66 | .join(User)\ 67 | .filter(User.name == user_name)\ 68 | .filter(Reminder.recurrence != None)\ 69 | .order_by(Reminder.target_date.asc())\ 70 | .all() 71 | 72 | log.debug(f"Found reminders: {len(regular_reminders)} : {len(recurring_reminders)}") 73 | return regular_reminders, recurring_reminders 74 | 75 | def get_reminders_with_keyword(self, search_key, earliest_date): 76 | log.debug(f"Searching for reminders with {search_key}") 77 | 78 | count_reminders = self.session.query(Reminder)\ 79 | .filter(Reminder.target_date > earliest_date)\ 80 | .filter(Reminder.message.like(f"%{search_key}%"))\ 81 | .count() 82 | 83 | log.debug(f"Found reminders with keyword: {count_reminders}") 84 | return count_reminders 85 | 86 | def get_reminder(self, reminder_id): 87 | log.debug(f"Fetching reminder by id: {reminder_id}") 88 | 89 | reminder = self.session.query(Reminder)\ 90 | .options(joinedload(Reminder.user))\ 91 | .filter_by(id=reminder_id)\ 92 | .first() 93 | 94 | return reminder 95 | 96 | def user_has_cakeday_reminder(self, user_name): 97 | log.debug(f"Checking if user has cakeday reminder: {user_name}") 98 | 99 | reminder = self.session.query(Reminder)\ 100 | .join(User)\ 101 | .filter(User.name == user_name)\ 102 | .filter(Reminder.recurrence == "1 year")\ 103 | .filter(Reminder.message == static.CAKEDAY_MESSAGE)\ 104 | .first() 105 | 106 | return reminder is not None 107 | 108 | def delete_reminder(self, reminder): 109 | log.debug(f"Deleting reminder by id: {reminder.id}") 110 | self.session.delete(reminder) 111 | 112 | def delete_user_reminders(self, user_name): 113 | log.debug(f"Deleting all reminders for u/{user_name}") 114 | 115 | user_id = self.session.query(User.id).\ 116 | filter_by(name=user_name) 117 | 118 | return self.session.query(Reminder).\ 119 | filter(Reminder.user_id.in_(user_id.subquery())).\ 120 | delete(synchronize_session=False) 121 | 122 | def get_all_reminders(self): 123 | log.debug(f"Fetching all reminders") 124 | 125 | reminders = self.session.query(Reminder)\ 126 | .options(joinedload(Reminder.user))\ 127 | .all() 128 | 129 | log.debug(f"Found reminders: {len(reminders)}") 130 | return reminders 131 | 132 | def get_count_all_reminders(self): 133 | log.debug("Fetching count of all reminders") 134 | 135 | count = self.session.query(Reminder).count() 136 | 137 | log.debug(f"Count reminders: {count}") 138 | return count 139 | -------------------------------------------------------------------------------- /src/database/_stats.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from sqlalchemy.orm import joinedload 3 | 4 | import static 5 | from classes.stat import DbStat 6 | 7 | log = discord_logging.get_logger() 8 | 9 | 10 | class _DatabaseStats: 11 | def __init__(self): 12 | self.session = self.session # for pycharm linting 13 | 14 | def add_increment_stat(self, subreddit, thread_id, comment_id): 15 | log.debug(f"Adding or incrementing new stat") 16 | if subreddit is None or thread_id is None: 17 | log.debug(f"Empty arguments, returning") 18 | return 19 | 20 | if subreddit.lower() != "askhistorians": 21 | log.debug(f"Subreddit doesn't match filter, returning") 22 | return 23 | 24 | existing_stat = self.session.query(DbStat)\ 25 | .filter(DbStat.subreddit == subreddit)\ 26 | .filter(DbStat.thread_id == thread_id)\ 27 | .filter(DbStat.comment_id == comment_id)\ 28 | .first() 29 | 30 | if existing_stat is not None: 31 | log.debug(f"Stat exists, incrementing") 32 | existing_stat.count_reminders += 1 33 | else: 34 | log.debug(f"Stat doesn't exist, creating") 35 | new_stat = DbStat(subreddit, thread_id, comment_id) 36 | self.session.add(new_stat) 37 | 38 | def get_stats_for_ids(self, subreddit, thread_id, comment_id=None): 39 | log.debug("Fetching stat") 40 | 41 | stat = self.session.query(DbStat)\ 42 | .filter(DbStat.subreddit == subreddit)\ 43 | .filter(DbStat.thread_id == thread_id)\ 44 | .filter(DbStat.comment_id == comment_id)\ 45 | .first() 46 | 47 | if stat is None: 48 | log.debug("No stat found") 49 | else: 50 | log.debug(f"Stat found with: {stat.count_reminders}") 51 | 52 | return stat 53 | 54 | def get_stats_for_subreddit(self, subreddit, earliest_date, min_reminders=0, thread_only=False): 55 | log.debug("Fetching stats for subreddit") 56 | 57 | if thread_only: 58 | stats = self.session.query(DbStat)\ 59 | .filter(DbStat.subreddit == subreddit)\ 60 | .filter(DbStat.comment_id == None)\ 61 | .filter(DbStat.initial_date > earliest_date)\ 62 | .filter(DbStat.count_reminders >= min_reminders)\ 63 | .order_by(DbStat.initial_date.desc())\ 64 | .all() 65 | else: 66 | stats = self.session.query(DbStat)\ 67 | .filter(DbStat.subreddit == subreddit)\ 68 | .filter(DbStat.initial_date > earliest_date)\ 69 | .filter(DbStat.count_reminders >= min_reminders)\ 70 | .order_by(DbStat.initial_date.desc())\ 71 | .all() 72 | 73 | log.debug(f"{len(stats)} stats found") 74 | return stats 75 | 76 | def get_stats_without_date(self): 77 | log.debug("Fetching stats without a date") 78 | 79 | stats = self.session.query(DbStat)\ 80 | .filter(DbStat.initial_date == None)\ 81 | .all() 82 | 83 | log.debug(f"{len(stats)} stats found") 84 | return stats 85 | 86 | -------------------------------------------------------------------------------- /src/database/_subreddits.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import timedelta 3 | 4 | import utils 5 | from classes.subreddit import Subreddit 6 | 7 | log = discord_logging.get_logger() 8 | 9 | 10 | class _DatabaseSubreddit: 11 | def __init__(self): 12 | self.session = self.session # for pycharm linting 13 | 14 | def ban_subreddit(self, subreddit): 15 | log.debug(f"Banning subreddit: {subreddit}") 16 | self.session.merge(Subreddit(subreddit, True, utils.datetime_now())) 17 | 18 | def get_subreddit_banned(self, subreddit): 19 | log.debug(f"Getting subreddit ban: {subreddit}") 20 | days_ago = utils.datetime_now() - timedelta(days=30) 21 | return self.session.query(Subreddit)\ 22 | .filter_by(subreddit=subreddit)\ 23 | .filter_by(banned=True)\ 24 | .filter(Subreddit.ban_checked > days_ago)\ 25 | .scalar() is not None 26 | 27 | def get_count_all_subreddits(self): 28 | return self.session.query(Subreddit).count() 29 | 30 | def get_count_banned_subreddits(self): 31 | return self.session.query(Subreddit).filter_by(banned=True).count() 32 | -------------------------------------------------------------------------------- /src/database/_users.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | 3 | from classes.user import User 4 | 5 | log = discord_logging.get_logger() 6 | 7 | 8 | class _DatabaseUsers: 9 | def __init__(self): 10 | self.session = self.session # for pycharm linting 11 | 12 | def get_or_add_user(self, user_name): 13 | log.debug(f"Fetching user: {user_name}") 14 | user = self.session.query(User).filter_by(name=user_name).first() 15 | if user is None: 16 | log.debug(f"Creating user: {user_name}") 17 | user = User(user_name) 18 | self.session.add(user) 19 | 20 | return user 21 | 22 | def get_count_all_users(self): 23 | return self.session.query(User).count() 24 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import logging.handlers 4 | import sys 5 | import signal 6 | import time 7 | import traceback 8 | import discord_logging 9 | import argparse 10 | 11 | log = discord_logging.init_logging( 12 | backup_count=80 13 | ) 14 | 15 | import counters 16 | from database import Database 17 | import praw_wrapper 18 | import messages 19 | import comments 20 | import notifications 21 | import utils 22 | import static 23 | import stats 24 | 25 | 26 | database = None 27 | 28 | 29 | def signal_handler(signal, frame): 30 | log.info("Handling interrupt") 31 | database.close() 32 | discord_logging.flush_discord() 33 | sys.exit(0) 34 | 35 | 36 | signal.signal(signal.SIGINT, signal_handler) 37 | 38 | 39 | if __name__ == "__main__": 40 | parser = argparse.ArgumentParser(description="Reddit RemindMe bot") 41 | parser.add_argument("user", help="The reddit user account to use") 42 | parser.add_argument("--once", help="Only run the loop once", action='store_const', const=True, default=False) 43 | parser.add_argument("--debug_db", help="Use the debug database", action='store_const', const=True, default=False) 44 | parser.add_argument( 45 | "--no_post", help="Print out reddit actions instead of posting to reddit", action='store_const', const=True, 46 | default=False) 47 | parser.add_argument( 48 | "--no_backup", help="Don't backup the database", action='store_const', const=True, default=False) 49 | parser.add_argument( 50 | "--reset_comment", help="Reset the last comment read timestamp", action='store_const', const=True, 51 | default=False) 52 | parser.add_argument("--debug", help="Set the log level to debug", action='store_const', const=True, default=False) 53 | parser.add_argument("--ingest_db", help="The location of the ingest database file", default=None) 54 | args = parser.parse_args() 55 | 56 | counters.init(8001) 57 | counters.errors.labels(type="startup").inc() 58 | 59 | if args.debug: 60 | discord_logging.set_level(logging.DEBUG) 61 | 62 | discord_logging.init_discord_logging(args.user, logging.WARNING, 1) 63 | 64 | reddit = praw_wrapper.Reddit(args.user, args.no_post, user_agent=static.USER_AGENT) 65 | static.ACCOUNT_NAME = reddit.username 66 | database = Database(debug=args.debug_db) 67 | 68 | ingest_database = None 69 | if args.ingest_db: 70 | ingest_database = praw_wrapper.IngestDatabase(location=args.ingest_db) 71 | ingest_database.set_default_client("remindme") 72 | ingest_database.register_search(search_term="remindme") 73 | ingest_database.register_search(search_term="remind me") 74 | ingest_database.register_search(search_term="remindmerepeat") 75 | ingest_database.register_search(search_term="cakeday") 76 | 77 | if args.reset_comment: 78 | log.info("Resetting comment processed timestamp") 79 | database.save_keystore("comment_timestamp", utils.get_datetime_string(utils.datetime_now())) 80 | 81 | last_backup = None 82 | last_comments = None 83 | last_stats = None 84 | while True: 85 | startTime = time.perf_counter() 86 | log.debug("Starting run") 87 | 88 | actions = 0 89 | errors = 0 90 | 91 | counters.objects.labels(type="reminders").set(database.get_count_all_reminders()) 92 | counters.objects.labels(type="comments").set(database.get_count_all_comments()) 93 | counters.objects.labels(type="users").set(database.get_count_all_users()) 94 | counters.objects.labels(type="subreddits").set(database.get_count_all_subreddits()) 95 | counters.objects.labels(type="subreddits_banned").set(database.get_count_banned_subreddits()) 96 | 97 | try: 98 | actions += messages.process_messages(reddit, database) 99 | except Exception as err: 100 | utils.process_error(f"Error processing messages", err, traceback.format_exc()) 101 | errors += 1 102 | 103 | try: 104 | actions += comments.process_comments(reddit, database, ingest_database) 105 | except Exception as err: 106 | utils.process_error(f"Error processing comments", err, traceback.format_exc()) 107 | errors += 1 108 | 109 | try: 110 | actions += notifications.send_reminders(reddit, database) 111 | except Exception as err: 112 | utils.process_error(f"Error sending notifications", err, traceback.format_exc()) 113 | errors += 1 114 | 115 | if utils.time_offset(last_comments, minutes=30): 116 | try: 117 | comments.update_comments(reddit, database) 118 | last_comments = utils.datetime_now() 119 | except Exception as err: 120 | utils.process_error(f"Error updating comments", err, traceback.format_exc()) 121 | errors += 1 122 | 123 | if utils.time_offset(last_stats, minutes=60): 124 | try: 125 | stats.update_stats(reddit, database) 126 | last_stats = utils.datetime_now() 127 | except Exception as err: 128 | utils.process_error(f"Error updating stats", err, traceback.format_exc()) 129 | errors += 1 130 | 131 | if not args.no_backup and utils.time_offset(last_backup, hours=12): 132 | try: 133 | database.backup() 134 | last_backup = utils.datetime_now() 135 | except Exception as err: 136 | utils.process_error(f"Error backing up database", err, traceback.format_exc()) 137 | errors += 1 138 | 139 | database.commit() 140 | 141 | run_time = time.perf_counter() - startTime 142 | counters.run_time.observe(round(run_time, 2)) 143 | log.debug(f"Run complete after: {int(run_time)}") 144 | 145 | discord_logging.flush_discord() 146 | 147 | if args.once: 148 | break 149 | 150 | sleep_time = max(30 - actions, 0) + (30 * errors) 151 | log.debug(f"Sleeping {sleep_time}") 152 | 153 | time.sleep(sleep_time) 154 | -------------------------------------------------------------------------------- /src/messages.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import re 3 | import traceback 4 | import pytz 5 | 6 | import utils 7 | import static 8 | import counters 9 | from classes.reminder import Reminder 10 | from praw_wrapper.reddit import ReturnType 11 | 12 | 13 | log = discord_logging.get_logger() 14 | 15 | 16 | def add_list_header(bldr, recurring): 17 | if recurring: 18 | bldr.append("|Source|Message|Date|In|Repeat|Remove|\n") 19 | bldr.append("|-|-|-|-|-|:-:|\n") 20 | else: 21 | bldr.append("|Source|Message|Date|In|Remove|\n") 22 | bldr.append("|-|-|-|-|:-:|\n") 23 | 24 | 25 | def get_reminders_string(user_name, database, previous=False, include_all=False): 26 | result_messages = [] 27 | bldr = utils.str_bldr() 28 | 29 | regular_reminders, recurring_reminders = database.get_user_reminders(user_name) 30 | if len(regular_reminders) or len(recurring_reminders): 31 | if previous: 32 | bldr.append("Your previous reminders:") 33 | else: 34 | bldr.append("Your current reminders:") 35 | bldr.append("\n\n") 36 | 37 | if len(regular_reminders) + len(recurring_reminders) > 1: 38 | bldr.append("[Click here to delete all your reminders](") 39 | bldr.append(utils.build_message_link(static.ACCOUNT_NAME, "Remove All", "RemoveAll!")) 40 | bldr.append(")\n\n") 41 | 42 | user = database.get_or_add_user(user_name) 43 | if user.timezone is not None: 44 | bldr.append("Your timezone is currently set to: `") 45 | bldr.append(user.timezone) 46 | bldr.append("`\n\n") 47 | 48 | for reminders in [recurring_reminders, regular_reminders]: 49 | if len(reminders): 50 | log.debug(f"Building list with {len(reminders)} reminders") 51 | add_list_header(bldr, reminders[0].recurrence is not None) 52 | 53 | for reminder in reminders: 54 | bldr.append("|") 55 | if "reddit.com" in reminder.source: 56 | bldr.append("[Source](") 57 | bldr.append(utils.check_append_context_to_link(reminder.source)) 58 | bldr.append(")") 59 | else: 60 | bldr.append(reminder.source) 61 | bldr.append("|") 62 | if reminder.message is not None: 63 | bldr.append(reminder.message.replace("|", "|")) 64 | bldr.append("|") 65 | bldr.append(utils.render_time(reminder.target_date, reminder.user)) 66 | bldr.append("|") 67 | bldr.append(utils.render_time_diff(utils.datetime_now(), reminder.target_date)) 68 | if reminder.recurrence is not None: 69 | bldr.append("|") 70 | bldr.append(reminder.recurrence) 71 | bldr.append("|") 72 | bldr.append("[Remove](") 73 | bldr.append(utils.build_message_link(static.ACCOUNT_NAME, "Remove", f"Remove! {reminder.id}")) 74 | bldr.append(")") 75 | bldr.append("|\n") 76 | 77 | if utils.bldr_length(bldr) > 9000: 78 | if include_all: 79 | result_messages.append(''.join(bldr)) 80 | bldr = [] 81 | add_list_header(bldr, reminders[0].recurrence is not None) 82 | else: 83 | bldr.append("\nToo many reminders to display.") 84 | break 85 | 86 | bldr.append("\n") 87 | 88 | else: 89 | bldr.append("You don't have any reminders.") 90 | 91 | result_messages.append(''.join(bldr)) 92 | return result_messages 93 | 94 | 95 | def process_remind_me(message, reddit, database, recurring): 96 | log.info("Processing RemindMe message") 97 | trigger = static.TRIGGER_RECURRING_LOWER if recurring else static.TRIGGER_LOWER 98 | time = utils.find_reminder_time(message.body, trigger) 99 | 100 | message_text = utils.find_reminder_message(message.body, trigger) 101 | 102 | reminder, result_message = Reminder.build_reminder( 103 | source=utils.message_link(message.id), 104 | message=message_text, 105 | user=database.get_or_add_user(message.author.name), 106 | requested_date=utils.datetime_from_timestamp(message.created_utc), 107 | time_string=time, 108 | recurring=recurring 109 | ) 110 | if reminder is None: 111 | log.debug("Reminder not valid, returning") 112 | return [result_message], False 113 | 114 | database.add_reminder(reminder) 115 | database.commit() 116 | 117 | log.info(f"Reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}") 118 | 119 | bldr = reminder.render_message_confirmation(result_message) 120 | return [''.join(bldr)], True 121 | 122 | 123 | def process_remove_reminder(message, database): 124 | log.info("Processing remove reminder message") 125 | bldr = utils.str_bldr() 126 | 127 | ids = re.findall(r'remove!\s(\d+)', message.body, flags=re.IGNORECASE) 128 | if len(ids) == 0: 129 | bldr.append("I couldn't find a reminder id to remove.") 130 | else: 131 | reminder = database.get_reminder(ids[0]) 132 | if reminder is None or reminder.user.name != message.author.name: 133 | bldr.append("It looks like you don't own this reminder or it doesn't exist.") 134 | else: 135 | database.delete_reminder(reminder) 136 | bldr.append("Reminder deleted.") 137 | 138 | bldr.append("\n\n") 139 | bldr.append("*****") 140 | bldr.append("\n\n") 141 | 142 | bldr.extend(get_reminders_string(message.author.name, database)) 143 | 144 | return [''.join(bldr)] 145 | 146 | 147 | def process_remove_all_reminders(message, database): 148 | log.info("Processing remove all reminders message") 149 | 150 | current_reminders = get_reminders_string(message.author.name, database, True) 151 | 152 | reminders_deleted = database.delete_user_reminders(message.author.name) 153 | log.debug(f"Deleted {reminders_deleted} reminders") 154 | 155 | bldr = utils.str_bldr() 156 | if reminders_deleted != 0: 157 | bldr.append("Deleted **") 158 | bldr.append(str(reminders_deleted)) 159 | bldr.append("** reminders.\n\n") 160 | 161 | bldr.append("\n\n") 162 | bldr.append("*****") 163 | bldr.append("\n\n") 164 | 165 | bldr.extend(current_reminders) 166 | 167 | return [''.join(bldr)] 168 | 169 | 170 | def process_get_reminders(message, database): 171 | log.info("Processing get reminders message") 172 | return get_reminders_string(message.author.name, database, include_all=True) 173 | 174 | 175 | def process_delete_comment(message, reddit, database): 176 | log.info("Processing delete comment") 177 | bldr = utils.str_bldr() 178 | 179 | ids = re.findall(r'delete!\s(\w+)', message.body, flags=re.IGNORECASE) 180 | if len(ids) == 0: 181 | log.debug("Couldn't find a thread id to delete") 182 | bldr.append("I couldn't find a thread id to delete.") 183 | else: 184 | db_comment = database.get_comment_by_thread(ids[0]) 185 | if db_comment is not None: 186 | if db_comment.user == message.author.name: 187 | comment = reddit.get_comment(db_comment.comment_id) 188 | if not reddit.delete_comment(comment): 189 | log.debug(f"Unable to delete comment: {db_comment.comment_id}") 190 | bldr.append("Something went wrong deleting the comment") 191 | else: 192 | database.delete_comment(db_comment) 193 | log.debug(f"Deleted comment: {db_comment.comment_id}") 194 | bldr.append("Comment deleted.") 195 | else: 196 | log.debug(f"Bot wasn't replying to owner: {db_comment.user} : {message.author.name}") 197 | bldr.append("It looks like the bot wasn't replying to you.") 198 | else: 199 | log.debug(f"Comment doesn't exist: {ids[0]}") 200 | bldr.append("This comment doesn't exist or was already deleted.") 201 | 202 | return [''.join(bldr)] 203 | 204 | 205 | def process_cakeday_message(message, reddit, database): 206 | log.info("Processing cakeday") 207 | 208 | if database.user_has_cakeday_reminder(message.author.name): 209 | log.info("Cakeday already exists") 210 | return ["It looks like you already have a cakeday reminder set."], False 211 | 212 | next_anniversary = utils.get_next_anniversary(message.author.created_utc) 213 | 214 | reminder = Reminder( 215 | source=utils.message_link(message.id), 216 | message=static.CAKEDAY_MESSAGE, 217 | user=database.get_or_add_user(message.author.name), 218 | requested_date=utils.datetime_from_timestamp(message.created_utc), 219 | target_date=next_anniversary, 220 | recurrence="1 year", 221 | defaulted=False 222 | ) 223 | 224 | database.add_reminder(reminder) 225 | database.commit() 226 | 227 | log.info(f"Cakeday reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}") 228 | 229 | bldr = reminder.render_message_confirmation(None) 230 | return [''.join(bldr)], True 231 | 232 | 233 | def process_timezone_message(message, database): 234 | log.info("Processing timezone") 235 | bldr = utils.str_bldr() 236 | 237 | timezones = re.findall(r'(?:timezone!? )([\w/]{1,50})', message.body, flags=re.IGNORECASE) 238 | if not len(timezones): 239 | log.debug("Couldn't find a timezone in your message") 240 | bldr.append("I couldn't find a timezone in your message.") 241 | 242 | elif timezones[0] not in pytz.common_timezones: 243 | log.debug(f"Invalid timezone: {timezones[0]}") 244 | bldr.append(f"{timezones[0]} is not a valid timezone.") 245 | 246 | else: 247 | user = database.get_or_add_user(message.author.name) 248 | if timezones[0] == "UTC": 249 | user.timezone = None 250 | bldr.append(f"Reset your timezone to the default") 251 | else: 252 | user.timezone = timezones[0] 253 | bldr.append(f"Updated your timezone to {timezones[0]}") 254 | 255 | log.info(f"u/{message.author.name} timezone updated to {timezones[0]}") 256 | 257 | return [''.join(bldr)] 258 | 259 | 260 | def process_clock_message(message, database): 261 | log.info("Processing clock") 262 | bldr = utils.str_bldr() 263 | 264 | clocks = re.findall(r'(?:clock!? +)([\d]{2})', message.body, flags=re.IGNORECASE) 265 | if not len(clocks): 266 | log.debug("Couldn't find a clock type in your message") 267 | bldr.append("I couldn't find a clock type in your message.") 268 | 269 | else: 270 | user = database.get_or_add_user(message.author.name) 271 | if clocks[0] == "24": 272 | user.time_format = None 273 | bldr.append(f"Reset your clock type to the default 24 hour clock") 274 | elif clocks[0] == "12": 275 | user.time_format = "12" 276 | bldr.append(f"Updated your clock type to a 12 hour clock") 277 | else: 278 | log.debug(f"Invalid clock type: {clocks[0]}") 279 | bldr.append(f"{clocks[0]} is not a valid clock type.") 280 | return bldr 281 | 282 | log.info(f"u/{message.author.name} clock type updated to {clocks[0]}") 283 | 284 | return [''.join(bldr)] 285 | 286 | 287 | def process_message(message, reddit, database, count_string=""): 288 | log.info(f"{count_string}: Message u/{message.author.name} : {message.id}") 289 | user = database.get_or_add_user(message.author.name) 290 | user.recurring_sent = 0 291 | body = message.body.lower() 292 | 293 | result_messages = None 294 | created = False 295 | if static.TRIGGER_RECURRING_LOWER in body: 296 | result_messages, created = process_remind_me(message, reddit, database, True) 297 | if created: 298 | counters.replies.labels(source='message', type='repeat').inc() 299 | elif static.TRIGGER_LOWER in body: 300 | result_messages, created = process_remind_me(message, reddit, database, False) 301 | if created: 302 | counters.replies.labels(source='message', type='single').inc() 303 | elif "myreminders!" in body: 304 | result_messages = process_get_reminders(message, database) 305 | elif "remove!" in body: 306 | result_messages = process_remove_reminder(message, database) 307 | elif "removeall!" in body: 308 | result_messages = process_remove_all_reminders(message, database) 309 | elif "delete!" in body: 310 | result_messages = process_delete_comment(message, reddit, database) 311 | elif "cakeday!" in body: 312 | result_messages, created = process_cakeday_message(message, reddit, database) 313 | if created: 314 | counters.replies.labels(source='message', type='cake').inc() 315 | elif "timezone!" in body: 316 | result_messages = process_timezone_message(message, database) 317 | elif "clock!" in body: 318 | result_messages = process_clock_message(message, database) 319 | 320 | if not created: 321 | counters.replies.labels(source='message', type='other').inc() 322 | 323 | if result_messages is None: 324 | result_messages = ["I couldn't find anything in your message."] 325 | 326 | result_messages[-1] = result_messages[-1] + ''.join(utils.get_footer()) 327 | for result_message in result_messages: 328 | result = reddit.reply_message(message, result_message, retry_seconds=600) 329 | if result != ReturnType.SUCCESS: 330 | if result == ReturnType.INVALID_USER: 331 | log.info("User banned before reply could be sent") 332 | break 333 | else: 334 | raise ValueError(f"Error sending message: {result.name}") 335 | 336 | database.commit() 337 | 338 | 339 | def process_messages(reddit, database): 340 | messages = reddit.get_messages() 341 | if len(messages): 342 | log.debug(f"Processing {len(messages)} messages") 343 | i = 0 344 | for message in messages[::-1]: 345 | i += 1 346 | mark_read = True 347 | if reddit.is_message(message): 348 | if message.author is None: 349 | log.info(f"Message {message.id} is a system notification") 350 | elif message.author.name == "reddit": 351 | log.info(f"Message {message.id} is from reddit, skipping") 352 | elif message.author.name in static.BLACKLISTED_ACCOUNTS: 353 | log.info(f"Message {message.id} from u/{message.author.name} is blacklisted, skipping") 354 | else: 355 | try: 356 | process_message(message, reddit, database, f"{i}/{len(messages)}") 357 | except Exception as err: 358 | mark_read = not utils.process_error( 359 | f"Error processing message: {message.id} : u/{message.author.name}", 360 | err, traceback.format_exc() 361 | ) 362 | finally: 363 | database.commit() 364 | else: 365 | log.info(f"Object not message, skipping: {message.id}") 366 | 367 | if mark_read: 368 | try: 369 | reddit.mark_read(message) 370 | except Exception as err: 371 | utils.process_error( 372 | f"Error marking message read: {message.id} : {message.author.name}", 373 | err, traceback.format_exc() 374 | ) 375 | 376 | return len(messages) 377 | -------------------------------------------------------------------------------- /src/notifications.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | import re 3 | 4 | import utils 5 | import static 6 | import counters 7 | from datetime import timedelta 8 | from praw_wrapper.reddit import ReturnType 9 | 10 | 11 | log = discord_logging.get_logger() 12 | 13 | 14 | def send_reminders(reddit, database): 15 | timestamp = utils.datetime_now() 16 | count_reminders = database.get_count_pending_reminders(timestamp) 17 | counters.queue.set(count_reminders) 18 | 19 | reminders_sent = 0 20 | if count_reminders > 0: 21 | reminders = database.get_pending_reminders(utils.requests_available(count_reminders), timestamp) 22 | for reminder in reminders: 23 | reminders_sent += 1 24 | counters.notifications.inc() 25 | counters.queue.dec() 26 | if re.search(r"[^\w_-]", reminder.user.name): 27 | log.warning(f"Can't send reminder, invalid username: u/{reminder.user.name} : {reminder.id} : {utils.get_datetime_string(reminder.target_date)}") 28 | log.debug(f"{reminder.id} deleted") 29 | database.delete_reminder(reminder) 30 | 31 | else: 32 | log.info( 33 | f"{reminders_sent}/{len(reminders)}/{count_reminders}: Sending reminder to u/{reminder.user.name} : " 34 | f"{reminder.id} : {utils.get_datetime_string(reminder.target_date)}") 35 | bldr = utils.get_footer(reminder.render_notification()) 36 | result = reddit.send_message(reminder.user.name, "RemindMeBot Here!", ''.join(bldr)) 37 | if result in [ReturnType.INVALID_USER, ReturnType.USER_DOESNT_EXIST]: 38 | log.info(f"User doesn't exist: u/{reminder.user.name}") 39 | if result in [ReturnType.NOT_WHITELISTED_BY_USER_MESSAGE]: 40 | log.info(f"User blocked notification message: u/{reminder.user.name}") 41 | 42 | if reminder.recurrence is not None: 43 | if reminder.user.recurring_sent > static.RECURRING_LIMIT: 44 | log.info(f"User u/{reminder.user.name} hit their recurring limit, deleting reminder {reminder.id}") 45 | database.delete_reminder(reminder) 46 | else: 47 | new_target_date = utils.next_recurring_time(reminder.recurrence, reminder.target_date, reminder.user.timezone) 48 | log.info(f"{reminder.id} recurring from {utils.get_datetime_string(reminder.target_date)} to " 49 | f"{utils.get_datetime_string(new_target_date)}") 50 | reminder.target_date = new_target_date 51 | reminder.user.recurring_sent += 1 52 | else: 53 | log.debug(f"{reminder.id} deleted") 54 | database.delete_reminder(reminder) 55 | 56 | database.commit() 57 | 58 | else: 59 | log.debug("No reminders to send") 60 | 61 | return reminders_sent 62 | -------------------------------------------------------------------------------- /src/static.py: -------------------------------------------------------------------------------- 1 | LOG_FOLDER_NAME = "logs" 2 | USER_AGENT = "RemindMeBot (by /u/Watchful1)" 3 | OWNER = "Watchful1" 4 | DATABASE_NAME = "database.db" 5 | MESSAGE_LINK = "https://www.reddit.com/message/messages/" 6 | ACCOUNT_NAME = "Watchful1BotTest" 7 | BACKUP_FOLDER_NAME = "backup" 8 | BLACKLISTED_ACCOUNTS = ['[deleted]', 'kzreminderbot', 'AutoModerator', 'remindditbot','chaintip'] 9 | RECURRING_LIMIT = 30 10 | 11 | TRIGGER = "RemindMe" 12 | TRIGGER_LOWER = TRIGGER.lower() 13 | TRIGGER_SPLIT = "Remind Me" 14 | TRIGGER_SPLIT_LOWER = TRIGGER_SPLIT.lower() 15 | TRIGGER_RECURRING = "RemindMeRepeat" 16 | TRIGGER_RECURRING_LOWER = TRIGGER_RECURRING.lower() 17 | TRIGGER_CAKEDAY = "Cakeday" 18 | TRIGGER_CAKEDAY_LOWER = TRIGGER_CAKEDAY.lower() 19 | TRIGGER_COMBINED = f"{TRIGGER_LOWER}|{TRIGGER_CAKEDAY_LOWER}|{TRIGGER_RECURRING_LOWER}|%22{TRIGGER_SPLIT_LOWER.replace(' ', '%20')}%22" 20 | 21 | CAKEDAY_MESSAGE = "Happy Cakeday!" 22 | 23 | INFO_POST = "https://www.reddit.com/r/RemindMeBot/comments/e1bko7/remindmebot_info_v21/" 24 | INFO_POST_REPEAT = "https://www.reddit.com/r/RemindMeBot/comments/e1a9rt/remindmerepeat_info_post/" 25 | INFO_POST_CAKEDAY = "https://www.reddit.com/r/RemindMeBot/comments/e0tgoj/cakeday_info_post/" 26 | INFO_POST_SETTINGS = "https://www.reddit.com/r/RemindMeBot/comments/e1asdu/timezone_and_clock_info_post/" 27 | -------------------------------------------------------------------------------- /src/stats.py: -------------------------------------------------------------------------------- 1 | import discord_logging 2 | from datetime import timedelta 3 | import time 4 | 5 | log = discord_logging.get_logger() 6 | 7 | import utils 8 | 9 | 10 | def update_stat_dates(reddit, database): 11 | empty_stats = database.get_stats_without_date() 12 | if empty_stats: 13 | full_names = {} 14 | for stat in empty_stats: 15 | if stat.comment_id is not None: 16 | full_names[f"t1_{stat.comment_id}"] = stat 17 | else: 18 | full_names[f"t3_{stat.thread_id}"] = stat 19 | 20 | reddit_objects = reddit.call_info(full_names.keys()) 21 | count_updated = 0 22 | for reddit_object in reddit_objects: 23 | stat = full_names[reddit_object.name] 24 | stat.initial_date = utils.datetime_from_timestamp(reddit_object.created_utc) 25 | count_updated += 1 26 | 27 | if count_updated != 0: 28 | log.info(f"Updated {count_updated} stats") 29 | if count_updated != len(empty_stats): 30 | for stat in empty_stats: 31 | if stat.initial_date is None: 32 | log.warning(f"Unable to retrieve date for stat: {stat}") 33 | 34 | 35 | def update_ask_historians(reddit, database, min_reminders=10, days_back=7): 36 | startTime = time.perf_counter() 37 | earliest_date = utils.datetime_now() - timedelta(days=days_back) 38 | stats = database.get_stats_for_subreddit("AskHistorians", earliest_date, min_reminders=min_reminders, thread_only=True) 39 | 40 | bldr = utils.str_bldr() 41 | bldr.append("This page shows the number of reminders requested for each thread in r/AskHistorians in the last 7 days. Only threads with at least 10 requested reminders are included. Please contact u/Watchful1 with any feedback or suggestions.") 42 | bldr.append("\n\n") 43 | bldr.append("Thread | Thread date | Words in top answer | Total reminders | Pending reminders\n") 44 | bldr.append("---|---|----|----|----|----\n") 45 | 46 | for stat in stats: 47 | reddit_submission = reddit.get_submission(stat.thread_id) 48 | reddit_submission.comment_sort = "top" 49 | bldr.append(f"[{utils.truncate_string(reddit_submission.title, 60)}](https://www.reddit.com/{reddit_submission.permalink})|") 50 | bldr.append(f"{utils.get_datetime_string(utils.datetime_from_timestamp(reddit_submission.created_utc), '%Y-%m-%d %H:%M %Z')}|") 51 | 52 | top_comment = None 53 | for comment in reddit_submission.comments: 54 | if comment.author is not None and comment.author.name != "AutoModerator" and comment.distinguished is None: 55 | top_comment = comment 56 | break 57 | #utils.datetime_from_timestamp(comment.created_utc) 58 | if top_comment is None: 59 | bldr.append(f"|") 60 | else: 61 | bldr.append(f"{utils.surround_int_over_threshold(len(top_comment.body.split(' ')), '**', 350)}|") 62 | 63 | bldr.append(f"{utils.surround_int_over_threshold(stat.count_reminders, '**', 50)}|") 64 | bldr.append(f"{utils.surround_int_over_threshold(database.get_reminders_with_keyword(stat.thread_id, earliest_date), '**', 50)}") 65 | bldr.append(f"\n") 66 | 67 | old_wiki_content = reddit.get_subreddit_wiki_page("AskHistorians", "remindme") 68 | new_wiki_content = ''.join(bldr) 69 | log.debug(new_wiki_content) 70 | if old_wiki_content == new_wiki_content: 71 | log.debug("Wiki content unchanged") 72 | else: 73 | log.info(f"Updated stats wiki in: {int(time.perf_counter() - startTime)}") 74 | reddit.update_subreddit_wiki_page("AskHistorians", "remindme", new_wiki_content) 75 | 76 | 77 | def update_stats(reddit, database): 78 | update_stat_dates(reddit, database) 79 | 80 | update_ask_historians(reddit, database) 81 | 82 | 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import discord_logging 3 | import dateparser 4 | from dateutil.relativedelta import relativedelta 5 | from dateparser.search import search_dates 6 | import parsedatetime 7 | import pytz 8 | from datetime import datetime 9 | from datetime import timedelta 10 | import urllib.parse 11 | import urllib3 12 | import prawcore 13 | import requests 14 | 15 | import counters 16 | import static 17 | 18 | log = discord_logging.get_logger() 19 | debug_time = None 20 | cal = parsedatetime.Calendar() 21 | 22 | 23 | def process_error(message, exception, traceback): 24 | is_transient = \ 25 | isinstance(exception, prawcore.exceptions.ServerError) or \ 26 | isinstance(exception, prawcore.exceptions.ResponseException) or \ 27 | isinstance(exception, prawcore.exceptions.RequestException) or \ 28 | isinstance(exception, requests.exceptions.Timeout) or \ 29 | isinstance(exception, requests.exceptions.ReadTimeout) or \ 30 | isinstance(exception, requests.exceptions.RequestException) or \ 31 | isinstance(exception, urllib3.exceptions.ReadTimeoutError) 32 | log.warning(f"{message}: {type(exception).__name__} : {exception}") 33 | if is_transient: 34 | log.info(traceback) 35 | counters.errors.labels(type='api').inc() 36 | else: 37 | log.warning(traceback) 38 | counters.errors.labels(type='other').inc() 39 | 40 | return is_transient 41 | 42 | 43 | def find_reminder_message(body, trigger): 44 | line_match = re.search( 45 | r'(?:{trigger}.+)(?:(?:\[)([^\]]+?)(?:\])|(?:\")([^\"]+?)(?:\")|(?:“)([^”]*?)(?:”))(?:[^(]|\n|$)'.format( 46 | trigger=trigger), 47 | body, 48 | flags=re.IGNORECASE) 49 | if line_match: 50 | return line_match.group(1) or line_match.group(2) or line_match.group(3) 51 | 52 | match = re.search( 53 | r'(?:(?:\[)([^\]]+?)(?:\])|(?:\")([^\"]+?)(?:\")|(?:“)([^”]*?)(?:”))(?:[^(]|\n|$)', 54 | body, 55 | flags=re.IGNORECASE) 56 | if match: 57 | return match.group(1) or match.group(2) or match.group(3) 58 | else: 59 | return None 60 | 61 | 62 | def find_reminder_time(body, trigger): 63 | regex_string = r'(?:{trigger}.? +)(.*?)(?:\[|\n|\"|“|$|http)'.format(trigger=trigger) 64 | times = re.findall(regex_string, body, flags=re.IGNORECASE) 65 | if len(times) > 0 and times[0] != "": 66 | return times[0][:80] 67 | 68 | regex_string = r'(?:{trigger}.? *)(.*?)(?:\[|\n|\"|“|$|http)'.format(trigger=trigger) 69 | times = re.findall(regex_string, body, flags=re.IGNORECASE) 70 | if len(times) > 0 and times[0] != "": 71 | return times[0][:80] 72 | else: 73 | return None 74 | 75 | 76 | def parse_time(time_string, base_time, timezone_string): 77 | base_time = datetime_as_timezone(base_time, timezone_string) 78 | 79 | try: 80 | date_time = dateparser.parse( 81 | time_string, 82 | languages=['en'], 83 | settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 84 | except Exception: 85 | date_time = None 86 | 87 | if date_time is None: 88 | try: 89 | results = search_dates( 90 | time_string, 91 | languages=['en'], 92 | settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)}) 93 | if results is not None: 94 | temp_time = results[0][1] 95 | if temp_time.tzinfo is None: 96 | temp_time = datetime_force_utc(temp_time) 97 | 98 | if temp_time > base_time: 99 | date_time = results[0][1] 100 | else: 101 | date_time = None 102 | except Exception: 103 | date_time = None 104 | 105 | if date_time is None: 106 | try: 107 | date_time, result_code = cal.parseDT(time_string, base_time) 108 | if result_code == 0: 109 | date_time = None 110 | except Exception: 111 | date_time = None 112 | 113 | if date_time is None: 114 | return None 115 | 116 | if date_time.tzinfo is None: 117 | if timezone_string is not None: 118 | date_time = pytz.timezone(timezone_string).localize(date_time) 119 | else: 120 | date_time = datetime_force_utc(date_time) 121 | 122 | date_time = datetime_as_utc(date_time) 123 | 124 | return date_time 125 | 126 | 127 | def render_time(date_time, user=None, format_string=None): 128 | timezone = user.timezone if user is not None else None 129 | time_format = user.time_format if user is not None else None 130 | if format_string is None: 131 | if time_format == "12": 132 | format_string = "%Y-%m-%d %I:%M:%S %p %Z" 133 | else: 134 | format_string = "%Y-%m-%d %H:%M:%S %Z" 135 | 136 | bldr = str_bldr() 137 | bldr.append("[**") 138 | bldr.append(datetime_as_timezone(date_time, timezone).strftime(format_string)) 139 | bldr.append("**](http://www.wolframalpha.com/input/?i=") 140 | bldr.append(date_time.strftime('%Y-%m-%d %H:%M:%S %Z').replace(" ", "%20")) 141 | bldr.append(" To Local Time".replace(" ", "%20")) 142 | bldr.append(")") 143 | return ''.join(bldr) 144 | 145 | 146 | def render_time_diff(start_date, end_date): 147 | seconds = int((end_date - start_date).total_seconds()) 148 | if seconds > 59: 149 | try: 150 | adjusted_end_date = start_date + relativedelta(seconds=int(min(seconds * 1.02, seconds + 60 * 60 * 24))) 151 | except OverflowError: 152 | adjusted_end_date = datetime_force_utc(datetime(year=9999, month=12, day=31)) 153 | 154 | delta = relativedelta(adjusted_end_date, start_date) 155 | else: 156 | delta = relativedelta(end_date, start_date) 157 | if delta.years > 0: 158 | return f"{delta.years} year{('s' if delta.years > 1 else '')}" 159 | elif delta.months > 0: 160 | return f"{delta.months} month{('s' if delta.months > 1 else '')}" 161 | elif delta.days > 0: 162 | return f"{delta.days} day{('s' if delta.days > 1 else '')}" 163 | elif delta.hours > 0: 164 | return f"{delta.hours} hour{('s' if delta.hours > 1 else '')}" 165 | elif delta.minutes > 0: 166 | return f"{delta.minutes} minute{('s' if delta.minutes > 1 else '')}" 167 | elif delta.seconds > 0: 168 | return f"{delta.seconds} second{('s' if delta.seconds > 1 else '')}" 169 | else: 170 | return "" 171 | 172 | 173 | def message_link(message_id): 174 | return f"https://www.reddit.com/message/messages/{message_id}" 175 | 176 | 177 | def reddit_link(slug): 178 | return f"https://www.reddit.com{slug}" 179 | 180 | 181 | def id_from_fullname(fullname): 182 | return re.sub(r't\d_', "", fullname) 183 | 184 | 185 | def datetime_as_timezone(date_time, timezone_string): 186 | if timezone_string is None: 187 | return date_time 188 | else: 189 | return date_time.astimezone(pytz.timezone(timezone_string)) 190 | 191 | 192 | def datetime_as_utc(date_time): 193 | return date_time.astimezone(pytz.utc) 194 | 195 | 196 | def datetime_force_utc(date_time): 197 | return pytz.utc.localize(date_time) 198 | 199 | 200 | def time_offset(date_time, hours=0, minutes=0, seconds=0): 201 | if date_time is None: 202 | return True 203 | return date_time < datetime_now() - timedelta(hours=hours, minutes=minutes, seconds=seconds) 204 | 205 | 206 | def add_years(date_time, years): 207 | try: 208 | return date_time.replace(year=date_time.year + years) 209 | except ValueError: 210 | return date_time + (datetime(date_time.year + years, 3, 1) - datetime(date_time.year, 3, 1)) 211 | 212 | 213 | def get_next_anniversary(account_created_utc): 214 | if account_created_utc is None: 215 | log.info("Account creation date is none") 216 | return datetime_now() 217 | account_created = datetime_from_timestamp(account_created_utc) 218 | next_anniversary = add_years(account_created, datetime_now().year - account_created.year) 219 | if next_anniversary < datetime_now(): 220 | next_anniversary = add_years(next_anniversary, 1) 221 | 222 | log.debug( 223 | f"Account created {get_datetime_string(account_created)}, anniversary {get_datetime_string(next_anniversary)}") 224 | return next_anniversary 225 | 226 | 227 | def datetime_now(): 228 | if debug_time is None: 229 | return datetime_force_utc(datetime.utcnow().replace(microsecond=0)) 230 | else: 231 | return debug_time 232 | 233 | 234 | def datetime_from_timestamp(timestamp): 235 | return datetime_force_utc(datetime.utcfromtimestamp(timestamp)) 236 | 237 | 238 | def get_datetime_string(date_time, convert_utc=True, format_string="%Y-%m-%d %H:%M:%S"): 239 | if date_time is None: 240 | return "" 241 | if convert_utc: 242 | date_time = datetime_as_utc(date_time) 243 | return date_time.strftime(format_string) 244 | 245 | 246 | def parse_datetime_string(date_time_string, force_utc=True, format_string="%Y-%m-%d %H:%M:%S"): 247 | if date_time_string is None or date_time_string == "None" or date_time_string == "": 248 | return None 249 | date_time = datetime.strptime(date_time_string, format_string) 250 | if force_utc: 251 | date_time = datetime_force_utc(date_time) 252 | return date_time 253 | 254 | 255 | def html_encode(message): 256 | return urllib.parse.quote(message, safe='') 257 | 258 | 259 | def build_message_link(recipient, subject, content=None): 260 | base = "https://www.reddit.com/message/compose/?" 261 | bldr = str_bldr() 262 | bldr.append(f"to={recipient}") 263 | bldr.append(f"subject={html_encode(subject)}") 264 | if content is not None: 265 | bldr.append(f"message={html_encode(content)}") 266 | 267 | return base + '&'.join(bldr) 268 | 269 | 270 | def get_footer(bldr=None): 271 | if bldr is None: 272 | bldr = str_bldr() 273 | bldr.append("\n\n") 274 | bldr.append("*****") 275 | bldr.append("\n\n") 276 | 277 | bldr.append("|[^(Info)](") 278 | bldr.append(static.INFO_POST) 279 | bldr.append(")|[^(Custom)](") 280 | bldr.append(build_message_link( 281 | static.ACCOUNT_NAME, 282 | "Reminder", 283 | f"[Link or message inside square brackets]\n\n{static.TRIGGER}! Time period here" 284 | )) 285 | bldr.append(")") 286 | bldr.append("|[^(Your Reminders)](") 287 | bldr.append(build_message_link( 288 | static.ACCOUNT_NAME, 289 | "List Of Reminders", 290 | "MyReminders!" 291 | )) 292 | bldr.append(")") 293 | bldr.append("|[^(Feedback)](") 294 | bldr.append(build_message_link( 295 | static.OWNER, 296 | "RemindMeBot Feedback" 297 | )) 298 | bldr.append(")") 299 | bldr.append("|\n|-|-|-|-|") 300 | 301 | return bldr 302 | 303 | 304 | def str_bldr(): 305 | return [] 306 | 307 | 308 | def bldr_length(bldr): 309 | length = 0 310 | for item in bldr: 311 | length += len(item) 312 | return length 313 | 314 | 315 | def requests_available(requests_pending): 316 | if requests_pending == 0: 317 | return 0 318 | elif requests_pending < 200: 319 | return 30 320 | else: 321 | return min(1000, int(requests_pending / 5)) 322 | 323 | 324 | def check_append_context_to_link(link): 325 | if re.search(r"reddit\.com/r/\w+/comments/(\w+/){3}", link): 326 | return link + "?context=3" 327 | else: 328 | return link 329 | 330 | 331 | def truncate_string(string, total_characters): 332 | if string is not None and len(string) > total_characters: 333 | return f"{string[:total_characters - 3]}..." 334 | else: 335 | return string 336 | 337 | 338 | def surround_int_over_threshold(val, surround, threshold): 339 | if val >= threshold: 340 | return f"{surround}{val}{surround}" 341 | elif val == 0: 342 | return "" 343 | else: 344 | return f"{val}" 345 | 346 | 347 | def next_recurring_time(recurrence, target_date, timezone_string): 348 | date_with_plus_1 = parse_time(recurrence, target_date + timedelta(seconds=1), timezone_string) 349 | date_with_plus_4 = parse_time(recurrence, target_date + timedelta(seconds=4), timezone_string) 350 | if date_with_plus_1 == date_with_plus_4: # if they are the same, the plus 1 isn't adding to the time, so we shouldn't subtract from the time in the response 351 | return date_with_plus_1 352 | else: 353 | return date_with_plus_1 - timedelta(seconds=1) 354 | -------------------------------------------------------------------------------- /test/comment_test.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | 3 | import comments 4 | import utils 5 | from praw_wrapper import reddit_test, IngestDatabase, IngestComment 6 | import static 7 | from classes.reminder import Reminder 8 | 9 | 10 | def test_process_comments_ingest(database, reddit): 11 | ingest_database = IngestDatabase(debug=True) 12 | ingest_database.set_default_client("updateme") 13 | 14 | created = utils.datetime_now() 15 | username = "Watchful1" 16 | comment_id = reddit_test.random_id() 17 | thread_id = reddit_test.random_id() 18 | comment = reddit_test.RedditObject( 19 | body=f"{static.TRIGGER}! 1 day", 20 | author=username, 21 | created=created, 22 | id=comment_id, 23 | link_id="t3_"+thread_id, 24 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 25 | subreddit="test" 26 | ) 27 | 28 | reddit.add_comment(comment) 29 | 30 | ingest_database.add_comment( 31 | IngestComment( 32 | id=comment.id, 33 | author=comment.author.name, 34 | subreddit=comment.subreddit.display_name, 35 | created_utc=comment.created_utc, 36 | permalink=comment.permalink, 37 | link_id=comment.link_id, 38 | body=comment.body, 39 | client_id=ingest_database.default_client_id, 40 | ) 41 | ) 42 | 43 | comments.process_comments(reddit, database, ingest_database) 44 | result = comment.get_first_child().body 45 | 46 | assert "CLICK THIS LINK" in result 47 | 48 | reminders = database.get_all_user_reminders(username) 49 | assert len(reminders) == 1 50 | assert reminders[0].user.name == username 51 | assert reminders[0].message is None 52 | assert reminders[0].source == utils.reddit_link(comment.permalink) 53 | assert reminders[0].requested_date == created 54 | assert reminders[0].target_date == created + timedelta(hours=24) 55 | assert reminders[0].id is not None 56 | assert reminders[0].recurrence is None 57 | 58 | assert ingest_database.get_count_comments(None) == 0 59 | 60 | 61 | def test_process_comment(database, reddit): 62 | created = utils.datetime_now() 63 | username = "Watchful1" 64 | comment_id = reddit_test.random_id() 65 | thread_id = reddit_test.random_id() 66 | comment = reddit_test.RedditObject( 67 | body=f"{static.TRIGGER}! 1 day", 68 | author=username, 69 | created=created, 70 | id=comment_id, 71 | link_id="t3_"+thread_id, 72 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 73 | subreddit="test" 74 | ) 75 | 76 | reddit.add_comment(comment) 77 | 78 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 79 | result = comment.get_first_child().body 80 | 81 | assert "CLICK THIS LINK" in result 82 | 83 | reminders = database.get_all_user_reminders(username) 84 | assert len(reminders) == 1 85 | assert reminders[0].user.name == username 86 | assert reminders[0].message is None 87 | assert reminders[0].source == utils.reddit_link(comment.permalink) 88 | assert reminders[0].requested_date == created 89 | assert reminders[0].target_date == created + timedelta(hours=24) 90 | assert reminders[0].id is not None 91 | assert reminders[0].recurrence is None 92 | 93 | 94 | def test_process_comment_split(database, reddit): 95 | created = utils.datetime_now() 96 | username = "Watchful1" 97 | comment_id = reddit_test.random_id() 98 | thread_id = reddit_test.random_id() 99 | comment = reddit_test.RedditObject( 100 | body=f"{static.TRIGGER_SPLIT}! 1 day", 101 | author=username, 102 | created=created, 103 | id=comment_id, 104 | link_id="t3_"+thread_id, 105 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 106 | subreddit="test" 107 | ) 108 | 109 | reddit.add_comment(comment) 110 | 111 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 112 | result = comment.get_first_child().body 113 | 114 | assert "CLICK THIS LINK" in result 115 | 116 | reminders = database.get_all_user_reminders(username) 117 | assert len(reminders) == 1 118 | assert reminders[0].user.name == username 119 | assert reminders[0].message is None 120 | assert reminders[0].source == utils.reddit_link(comment.permalink) 121 | assert reminders[0].requested_date == created 122 | assert reminders[0].target_date == created + timedelta(hours=24) 123 | assert reminders[0].id is not None 124 | assert reminders[0].recurrence is None 125 | 126 | 127 | def test_process_comment_split_no_date(database, reddit): 128 | created = utils.datetime_now() 129 | username = "Watchful1" 130 | comment_id = reddit_test.random_id() 131 | thread_id = reddit_test.random_id() 132 | comment = reddit_test.RedditObject( 133 | body=f"{static.TRIGGER_SPLIT}! test", 134 | author=username, 135 | created=created, 136 | id=comment_id, 137 | link_id="t3_"+thread_id, 138 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 139 | subreddit="test" 140 | ) 141 | 142 | reddit.add_comment(comment) 143 | 144 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 145 | assert len(comment.children) == 0 146 | 147 | reminders = database.get_all_user_reminders(username) 148 | assert len(reminders) == 0 149 | 150 | 151 | def test_process_comment_split_not_start(database, reddit): 152 | created = utils.datetime_now() 153 | username = "Watchful1" 154 | comment_id = reddit_test.random_id() 155 | thread_id = reddit_test.random_id() 156 | comment = reddit_test.RedditObject( 157 | body=f"this is a test {static.TRIGGER_SPLIT}! 1 day", 158 | author=username, 159 | created=created, 160 | id=comment_id, 161 | link_id="t3_"+thread_id, 162 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 163 | subreddit="test" 164 | ) 165 | 166 | reddit.add_comment(comment) 167 | 168 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 169 | assert len(comment.children) == 0 170 | 171 | reminders = database.get_all_user_reminders(username) 172 | assert len(reminders) == 0 173 | 174 | 175 | def test_process_comment_timezone(database, reddit): 176 | user = database.get_or_add_user(user_name="Watchful1") 177 | user.timezone = "America/Los_Angeles" 178 | 179 | username = "Watchful1" 180 | comment_id = reddit_test.random_id() 181 | thread_id = reddit_test.random_id() 182 | created = utils.datetime_now() 183 | comment = reddit_test.RedditObject( 184 | body=f"{static.TRIGGER}! 1 day", 185 | author=username, 186 | created=created, 187 | id=comment_id, 188 | link_id="t3_"+thread_id, 189 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 190 | subreddit="test" 191 | ) 192 | reddit.add_comment(comment) 193 | 194 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 195 | result = comment.get_first_child().body 196 | 197 | assert "default time zone" in result 198 | assert "`America/Los_Angeles`" in result 199 | 200 | reminders = database.get_all_user_reminders(username) 201 | assert reminders[0].target_date == created + timedelta(hours=24) 202 | 203 | 204 | def test_comment_in_thread(database, reddit): 205 | comment_id = reddit_test.random_id() 206 | thread_id = reddit_test.random_id() 207 | comment = reddit_test.RedditObject( 208 | body=f"{static.TRIGGER}! 1 day", 209 | author="Watchful1", 210 | created=utils.datetime_now(), 211 | id=comment_id, 212 | link_id="t3_"+thread_id, 213 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 214 | subreddit="test" 215 | ) 216 | reddit.add_comment(comment) 217 | 218 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 219 | 220 | comment_id_2 = reddit_test.random_id() 221 | comment_2 = reddit_test.RedditObject( 222 | body=f"{static.TRIGGER}! 1 day", 223 | author="Watchful1", 224 | created=utils.datetime_now(), 225 | id=comment_id_2, 226 | link_id="t3_"+thread_id, 227 | permalink=f"/r/test/{thread_id}/_/{comment_id_2}/", 228 | subreddit="test" 229 | ) 230 | reddit.add_comment(comment_2) 231 | 232 | comments.process_comment(comment_2.get_ingest_comment(), reddit, database) 233 | 234 | assert len(comment_2.children) == 0 235 | assert len(reddit.sent_messages) == 1 236 | assert reddit.sent_messages[0].author.name == static.ACCOUNT_NAME 237 | assert "I've already replied to another comment in this thread" in reddit.sent_messages[0].body 238 | 239 | 240 | def test_update_incorrect_comments(database, reddit): 241 | comment_id1 = reddit_test.random_id() 242 | thread_id1 = reddit_test.random_id() 243 | comment1 = reddit_test.RedditObject( 244 | body=f"{static.TRIGGER}! 1 day", 245 | author="Watchful1", 246 | created=utils.datetime_now(), 247 | id=comment_id1, 248 | link_id="t3_"+thread_id1, 249 | permalink=f"/r/test/{thread_id1}/_/{comment_id1}/", 250 | subreddit="test" 251 | ) 252 | reddit.add_comment(comment1) 253 | comments.process_comment(comment1.get_ingest_comment(), reddit, database) 254 | 255 | comment_id2 = reddit_test.random_id() 256 | thread_id2 = reddit_test.random_id() 257 | comment2 = reddit_test.RedditObject( 258 | body=f"{static.TRIGGER}! 1 day", 259 | author="Watchful1", 260 | created=utils.datetime_now(), 261 | id=comment_id2, 262 | link_id="t3_"+thread_id2, 263 | permalink=f"/r/test/{thread_id2}/_/{comment_id2}/", 264 | subreddit="test" 265 | ) 266 | reddit.add_comment(comment2) 267 | comments.process_comment(comment2.get_ingest_comment(), reddit, database) 268 | 269 | comment_id3 = reddit_test.random_id() 270 | thread_id3 = reddit_test.random_id() 271 | comment3 = reddit_test.RedditObject( 272 | body=f"{static.TRIGGER}! 1 day", 273 | author="Watchful1", 274 | created=utils.datetime_now(), 275 | id=comment_id3, 276 | link_id="t3_"+thread_id3, 277 | permalink=f"/r/test/{thread_id3}/_/{comment_id3}/", 278 | subreddit="test" 279 | ) 280 | reddit.add_comment(comment3) 281 | comments.process_comment(comment3.get_ingest_comment(), reddit, database) 282 | 283 | reminders = [ 284 | Reminder( 285 | source="https://www.reddit.com/message/messages/XXXXX", 286 | message=utils.reddit_link(comment1.permalink), 287 | user=database.get_or_add_user("Watchful1"), 288 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 289 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 290 | ), 291 | Reminder( 292 | source="https://www.reddit.com/message/messages/XXXXX", 293 | message=utils.reddit_link(comment1.permalink), 294 | user=database.get_or_add_user("Watchful1"), 295 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 296 | target_date=utils.parse_datetime_string("2019-01-06 05:00:00") 297 | ), 298 | Reminder( 299 | source="https://www.reddit.com/message/messages/XXXXX", 300 | message=utils.reddit_link(comment1.permalink), 301 | user=database.get_or_add_user("Watchful1"), 302 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 303 | target_date=utils.parse_datetime_string("2019-01-07 05:00:00") 304 | ), 305 | Reminder( 306 | source="https://www.reddit.com/message/messages/XXXXX", 307 | message=utils.reddit_link(comment2.permalink), 308 | user=database.get_or_add_user("Watchful1"), 309 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 310 | target_date=utils.parse_datetime_string("2019-01-08 05:00:00") 311 | ), 312 | Reminder( 313 | source="https://www.reddit.com/message/messages/XXXXX", 314 | message=utils.reddit_link(comment2.permalink), 315 | user=database.get_or_add_user("Watchful1"), 316 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 317 | target_date=utils.parse_datetime_string("2019-01-09 05:00:00") 318 | ) 319 | ] 320 | for reminder in reminders: 321 | database.add_reminder(reminder) 322 | 323 | comments.update_comments(reddit, database) 324 | 325 | assert "3 OTHERS CLICKED THIS LINK" in reddit.get_comment(comment_id1).get_first_child().body 326 | assert "2 OTHERS CLICKED THIS LINK" in reddit.get_comment(comment_id2).get_first_child().body 327 | assert "CLICK THIS LINK" in reddit.get_comment(comment_id3).get_first_child().body 328 | 329 | 330 | def test_commenting_banned(database, reddit): 331 | reddit.ban_subreddit("test") 332 | 333 | comment_id = reddit_test.random_id() 334 | thread_id = reddit_test.random_id() 335 | comment = reddit_test.RedditObject( 336 | body=f"{static.TRIGGER}! 1 day", 337 | author="Watchful1", 338 | created=utils.datetime_now(), 339 | id=comment_id, 340 | link_id="t3_"+thread_id, 341 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 342 | subreddit=reddit.subreddits["test"] 343 | ) 344 | reddit.add_comment(comment) 345 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 346 | 347 | assert len(comment.children) == 0 348 | assert len(reddit.sent_messages) == 1 349 | assert "I'm not allowed to reply in this subreddit" in reddit.sent_messages[0].body 350 | 351 | 352 | def test_commenting_locked(database, reddit): 353 | thread_id = reddit_test.random_id() 354 | 355 | reddit.lock_thread(thread_id) 356 | 357 | comment_id = reddit_test.random_id() 358 | comment = reddit_test.RedditObject( 359 | body=f"{static.TRIGGER}! 1 day", 360 | author="Watchful1", 361 | created=utils.datetime_now(), 362 | id=comment_id, 363 | link_id="t3_"+thread_id, 364 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 365 | subreddit="test" 366 | ) 367 | reddit.add_comment(comment) 368 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 369 | 370 | assert len(comment.children) == 0 371 | assert len(reddit.sent_messages) == 1 372 | assert "the thread is locked" in reddit.sent_messages[0].body 373 | 374 | 375 | def test_commenting_deleted(database, reddit): 376 | comment_id = reddit_test.random_id() 377 | thread_id = reddit_test.random_id() 378 | comment = reddit_test.RedditObject( 379 | body=f"{static.TRIGGER}! 1 day", 380 | author="Watchful1", 381 | created=utils.datetime_now(), 382 | id=comment_id, 383 | link_id="t3_"+thread_id, 384 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 385 | subreddit="test" 386 | ) 387 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 388 | 389 | assert len(comment.children) == 0 390 | assert len(reddit.sent_messages) == 1 391 | assert "it was deleted before I could get to it" in reddit.sent_messages[0].body 392 | 393 | 394 | def test_process_recurring_comment_period(database, reddit): 395 | created = utils.datetime_now() 396 | username = "Watchful1" 397 | comment_id = reddit_test.random_id() 398 | thread_id = reddit_test.random_id() 399 | comment = reddit_test.RedditObject( 400 | body=f"{static.TRIGGER_RECURRING}! 1 day", 401 | author=username, 402 | created=created, 403 | id=comment_id, 404 | link_id="t3_"+thread_id, 405 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 406 | subreddit="test" 407 | ) 408 | 409 | reddit.add_comment(comment) 410 | 411 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 412 | result = comment.get_first_child().body 413 | 414 | assert "CLICK THIS LINK" in result 415 | assert "and then every" in result 416 | assert "`1 day`" in result 417 | 418 | reminders = database.get_all_user_reminders(username) 419 | assert len(reminders) == 1 420 | assert reminders[0].user.name == username 421 | assert reminders[0].message is None 422 | assert reminders[0].source == utils.reddit_link(comment.permalink) 423 | assert reminders[0].requested_date == created 424 | assert reminders[0].target_date == created + timedelta(hours=24) 425 | assert reminders[0].id is not None 426 | assert reminders[0].recurrence == "1 day" 427 | 428 | 429 | def test_process_recurring_comment_time(database, reddit): 430 | created = utils.parse_datetime_string("2019-01-05 12:00:00") 431 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 432 | username = "Watchful1" 433 | comment_id = reddit_test.random_id() 434 | thread_id = reddit_test.random_id() 435 | comment = reddit_test.RedditObject( 436 | body=f"{static.TRIGGER_RECURRING}! 9 pm", 437 | author=username, 438 | created=created, 439 | id=comment_id, 440 | link_id="t3_"+thread_id, 441 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 442 | subreddit="test" 443 | ) 444 | 445 | reddit.add_comment(comment) 446 | 447 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 448 | result = comment.get_first_child().body 449 | 450 | assert "CLICK THIS LINK" in result 451 | assert "and then every" in result 452 | assert "9 hours" in result 453 | 454 | reminders = database.get_all_user_reminders(username) 455 | assert len(reminders) == 1 456 | assert reminders[0].user.name == username 457 | assert reminders[0].message is None 458 | assert reminders[0].source == utils.reddit_link(comment.permalink) 459 | assert reminders[0].requested_date == created 460 | assert reminders[0].target_date == created + timedelta(hours=9) 461 | assert reminders[0].id is not None 462 | assert reminders[0].recurrence == "9 pm" 463 | 464 | 465 | def test_fail_recurring_comment(database, reddit): 466 | created = utils.parse_datetime_string("2019-01-04 12:00:00") 467 | utils.debug_time = utils.parse_datetime_string("2019-01-04 12:00:00") 468 | username = "Watchful1" 469 | comment_id = reddit_test.random_id() 470 | thread_id = reddit_test.random_id() 471 | comment = reddit_test.RedditObject( 472 | body=f"{static.TRIGGER_RECURRING}! 2019-01-05", 473 | author=username, 474 | created=created, 475 | id=comment_id, 476 | link_id="t3_"+thread_id, 477 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 478 | subreddit="test" 479 | ) 480 | 481 | reddit.add_comment(comment) 482 | 483 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 484 | assert len(comment.children) == 0 485 | 486 | 487 | def test_process_cakeday_comment(database, reddit): 488 | username = "Watchful1" 489 | user = reddit_test.User(username, utils.parse_datetime_string("2015-05-05 15:25:17").timestamp()) 490 | reddit.add_user(user) 491 | created = utils.parse_datetime_string("2019-01-05 11:00:00") 492 | comment_id = reddit_test.random_id() 493 | thread_id = reddit_test.random_id() 494 | comment = reddit_test.RedditObject( 495 | body=f"{static.TRIGGER_CAKEDAY}!", 496 | author=username, 497 | created=created, 498 | id=comment_id, 499 | link_id="t3_"+thread_id, 500 | permalink=f"/r/test/{thread_id}/_/{comment_id}/", 501 | subreddit="test" 502 | ) 503 | 504 | reddit.add_comment(comment) 505 | 506 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 507 | comments.process_comment(comment.get_ingest_comment(), reddit, database) 508 | result = comment.get_first_child().body 509 | 510 | assert "to remind you of your cakeday" in result 511 | 512 | reminders = database.get_all_user_reminders(username) 513 | assert len(reminders) == 1 514 | assert reminders[0].user.name == username 515 | assert reminders[0].source == utils.reddit_link(comment.permalink) 516 | assert reminders[0].requested_date == created 517 | assert reminders[0].target_date == utils.parse_datetime_string("2019-05-05 15:25:17") 518 | assert reminders[0].id is not None 519 | assert reminders[0].recurrence == "1 year" 520 | assert reminders[0].message == "Happy Cakeday!" 521 | -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pytest 3 | import discord_logging 4 | 5 | log = discord_logging.init_logging(debug=True) 6 | 7 | sys.path.append("src") 8 | 9 | import static 10 | from database import Database 11 | from praw_wrapper import reddit_test 12 | 13 | 14 | @pytest.fixture 15 | def database(): 16 | return Database(debug=True, publish=True) 17 | 18 | 19 | @pytest.fixture 20 | def reddit(): 21 | reddit = reddit_test.Reddit("Watchful1BotTest") 22 | static.ACCOUNT_NAME = reddit.username 23 | return reddit 24 | -------------------------------------------------------------------------------- /test/dateparsing_test.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import utils 4 | 5 | 6 | def test_date_parsing(): 7 | base_time = utils.datetime_force_utc(datetime.strptime("2019-01-01 01:23:45", "%Y-%m-%d %H:%M:%S")) 8 | 9 | pairs = [ 10 | ["1 day", "2019-01-02 01:23:45"], 11 | ["365 days", "2020-01-01 01:23:45"], 12 | ["2 weeks", "2019-01-15 01:23:45"], 13 | ["3 years", "2022-01-01 01:23:45"], 14 | ["3 months", "2019-04-01 01:23:45"], 15 | ["24 hours", "2019-01-02 01:23:45"], 16 | ["5 hrs", "2019-01-01 06:23:45"], 17 | ["20 minutes", "2019-01-01 01:43:45"], 18 | ["5 seconds", "2019-01-01 01:23:50"], 19 | ["tomorrow", "2019-01-02 01:23:45"], 20 | ["Next Thursday at 4pm", "2019-01-03 16:00:00"], 21 | ["Tonight", "2019-01-01 21:00:00"], 22 | ["2 pm", "2019-01-01 14:00:00"], 23 | ["eoy", "2019-12-31 09:00:00"], 24 | ["eom", "2019-01-31 09:00:00"], 25 | ["eod", "2019-01-01 17:00:00"], 26 | ["2022-01-01", "2022-01-01 00:00:00"], 27 | ["10/15/19", "2019-10-15 00:00:00"], 28 | ["April 9, 2020", "2020-04-09 00:00:00"], 29 | ["January 13th, 2020", "2020-01-13 00:00:00"], 30 | ["January 5th 2020", "2020-01-05 00:00:00"], 31 | ["June 2nd", "2019-06-02 00:00:00"], 32 | ["November 2", "2019-11-02 00:00:00"], 33 | ["August 25, 2018, at 4pm", "2018-08-25 16:00:00"], 34 | ["September 1, 2019 14:00:00", "2019-09-01 14:00:00"], 35 | ["august", "2019-08-01 00:00:00"], 36 | ["September", "2019-09-01 00:00:00"], 37 | ["2025", "2025-01-01 00:00:00"], 38 | ["2pm", "2019-01-01 14:00:00"], 39 | ["7:20 pm", "2019-01-01 19:20:00"], 40 | ["72hr", "2019-01-04 01:23:45"], 41 | ["1d", "2019-01-02 01:23:45"], 42 | ["1yr", "2020-01-01 01:23:45"], 43 | ["7h", "2019-01-01 08:23:45"], 44 | ["35m", "2019-01-01 01:58:45"], 45 | ["2 weeks with a test string", "2019-01-15 01:23:45"], 46 | ["3 years with a second date 2014", "2022-01-01 01:23:45"], 47 | ] 48 | 49 | for time_string, expected_string in pairs: 50 | result_date = utils.parse_time(time_string, base_time, "UTC") 51 | expected_date = utils.datetime_force_utc(datetime.strptime(expected_string, "%Y-%m-%d %H:%M:%S")) 52 | assert result_date == expected_date, f"`{time_string}` as `{result_date}` != `{expected_date}`" 53 | 54 | 55 | def test_date_parsing_timezone(): 56 | base_time = utils.datetime_force_utc(datetime.strptime("2019-01-01 01:23:45", "%Y-%m-%d %H:%M:%S")) 57 | 58 | timezones = [ 59 | "America/Los_Angeles", 60 | "America/Denver", 61 | "America/Chicago", 62 | "America/New_York", 63 | "Australia/Sydney", 64 | "Europe/Brussels", 65 | ] 66 | pairs = [ 67 | ["1 day", ["2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45"]], 68 | ["365 days", ["2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45"]], 69 | ["2 weeks", ["2019-01-15 01:23:45", "2019-01-15 01:23:45", "2019-01-15 01:23:45", "2019-01-15 01:23:45", "2019-01-15 01:23:45", "2019-01-15 01:23:45"]], 70 | ["3 years", ["2022-01-01 01:23:45", "2022-01-01 01:23:45", "2022-01-01 01:23:45", "2022-01-01 01:23:45", "2022-01-01 01:23:45", "2022-01-01 01:23:45"]], 71 | ["3 months", ["2019-04-01 00:23:45", "2019-04-01 00:23:45", "2019-04-01 00:23:45", "2019-04-01 00:23:45", "2019-04-01 01:23:45", "2019-04-01 00:23:45"]], 72 | ["24 hours", ["2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45"]], 73 | ["5 hrs", ["2019-01-01 06:23:45", "2019-01-01 06:23:45", "2019-01-01 06:23:45", "2019-01-01 06:23:45", "2019-01-01 06:23:45", "2019-01-01 06:23:45"]], 74 | ["20 minutes", ["2019-01-01 01:43:45", "2019-01-01 01:43:45", "2019-01-01 01:43:45", "2019-01-01 01:43:45", "2019-01-01 01:43:45", "2019-01-01 01:43:45"]], 75 | ["5 seconds", ["2019-01-01 01:23:50", "2019-01-01 01:23:50", "2019-01-01 01:23:50", "2019-01-01 01:23:50", "2019-01-01 01:23:50", "2019-01-01 01:23:50"]], 76 | ["tomorrow", ["2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45"]], 77 | ["Next Thursday at 4pm", ["2019-01-04 00:00:00", "2019-01-03 23:00:00", "2019-01-03 22:00:00", "2019-01-03 21:00:00", "2019-01-03 05:00:00", "2019-01-03 15:00:00"]], 78 | ["Tonight", ["2019-01-01 05:00:00", "2019-01-01 04:00:00", "2019-01-01 03:00:00", "2019-01-01 02:00:00", "2019-01-01 10:00:00", "2019-01-01 20:00:00"]], 79 | ["eoy", ["2018-12-31 17:00:00", "2018-12-31 16:00:00", "2018-12-31 15:00:00", "2018-12-31 14:00:00", "2019-12-30 22:00:00", "2019-12-31 08:00:00"]], 80 | ["eom", ["2018-12-31 17:00:00", "2018-12-31 16:00:00", "2018-12-31 15:00:00", "2018-12-31 14:00:00", "2019-01-30 22:00:00", "2019-01-31 08:00:00"]], 81 | ["eod", ["2019-01-01 01:00:00", "2019-01-01 00:00:00", "2018-12-31 23:00:00", "2018-12-31 22:00:00", "2019-01-01 06:00:00", "2019-01-01 16:00:00"]], 82 | ["2022-01-01", ["2022-01-01 08:00:00", "2022-01-01 07:00:00", "2022-01-01 06:00:00", "2022-01-01 05:00:00", "2021-12-31 13:00:00", "2021-12-31 23:00:00"]], 83 | ["10/15/19", ["2019-10-15 07:00:00", "2019-10-15 06:00:00", "2019-10-15 05:00:00", "2019-10-15 04:00:00", "2019-10-14 13:00:00", "2019-10-14 22:00:00"]], 84 | ["April 9, 2020", ["2020-04-09 07:00:00", "2020-04-09 06:00:00", "2020-04-09 05:00:00", "2020-04-09 04:00:00", "2020-04-08 14:00:00", "2020-04-08 22:00:00"]], 85 | ["January 13th, 2020", ["2020-01-13 08:00:00", "2020-01-13 07:00:00", "2020-01-13 06:00:00", "2020-01-13 05:00:00", "2020-01-12 13:00:00", "2020-01-12 23:00:00"]], 86 | ["January 5th 2020", ["2020-01-05 08:00:00", "2020-01-05 07:00:00", "2020-01-05 06:00:00", "2020-01-05 05:00:00", "2020-01-04 13:00:00", "2020-01-04 23:00:00"]], 87 | ["June 2nd", ["2019-06-02 07:00:00", "2019-06-02 06:00:00", "2019-06-02 05:00:00", "2019-06-02 04:00:00", "2019-06-01 14:00:00", "2019-06-01 22:00:00"]], 88 | ["November 2", ["2019-11-02 07:00:00", "2019-11-02 06:00:00", "2019-11-02 05:00:00", "2019-11-02 04:00:00", "2019-11-01 13:00:00", "2019-11-01 23:00:00"]], 89 | ["August 25, 2018, at 4pm", ["2018-08-25 23:00:00", "2018-08-25 22:00:00", "2018-08-25 21:00:00", "2018-08-25 20:00:00", "2018-08-25 06:00:00", "2018-08-25 14:00:00"]], 90 | ["September 1, 2019 14:00:00", ["2019-09-01 21:00:00", "2019-09-01 20:00:00", "2019-09-01 19:00:00", "2019-09-01 18:00:00", "2019-09-01 04:00:00", "2019-09-01 12:00:00"]], 91 | ["august", ["2019-08-31 07:00:00", "2019-08-31 06:00:00", "2019-08-31 05:00:00", "2019-08-31 04:00:00", "2019-07-31 14:00:00", "2019-07-31 22:00:00"]], 92 | ["September", ["2019-09-30 07:00:00", "2019-09-30 06:00:00", "2019-09-30 05:00:00", "2019-09-30 04:00:00", "2019-08-31 14:00:00", "2019-08-31 22:00:00"]], 93 | ["2025", ["2025-12-31 08:00:00", "2025-12-31 07:00:00", "2025-12-31 06:00:00", "2025-12-31 05:00:00", "2024-12-31 13:00:00", "2024-12-31 23:00:00"]], 94 | ["2pm", ["2019-01-01 22:00:00", "2019-01-01 21:00:00", "2019-01-01 20:00:00", "2019-01-01 19:00:00", "2019-01-01 03:00:00", "2019-01-01 13:00:00"]], 95 | ["7:20 pm", ["2019-01-01 03:20:00", "2019-01-01 02:20:00", "2019-01-02 01:20:00", "2019-01-02 00:20:00", "2019-01-01 08:20:00", "2019-01-01 18:20:00"]], 96 | ["72hr", ["2019-01-04 01:23:45", "2019-01-04 01:23:45", "2019-01-04 01:23:45", "2019-01-04 01:23:45", "2019-01-04 01:23:45", "2019-01-04 01:23:45"]], 97 | ["1d", ["2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45", "2019-01-02 01:23:45"]], 98 | ["1yr", ["2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45", "2020-01-01 01:23:45"]], 99 | ["7h", ["2019-01-01 08:23:45", "2019-01-01 08:23:45", "2019-01-01 08:23:45", "2019-01-01 08:23:45", "2019-01-01 08:23:45", "2019-01-01 08:23:45"]], 100 | ["35m", ["2019-01-01 01:58:45", "2019-01-01 01:58:45", "2019-01-01 01:58:45", "2019-01-01 01:58:45", "2019-01-01 01:58:45", "2019-01-01 01:58:45"]], 101 | ] 102 | 103 | for time_string, expected_strings in pairs: 104 | for i, timezone in enumerate(timezones): 105 | result_date = utils.parse_time(time_string, base_time, timezone) 106 | expected_date = utils.datetime_force_utc(datetime.strptime(expected_strings[i], "%Y-%m-%d %H:%M:%S")) 107 | assert result_date == expected_date, f"`{time_string}`, `{timezone}` as `{result_date}` != `{expected_date}`" 108 | -------------------------------------------------------------------------------- /test/message_test.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | import pytz 3 | import discord_logging 4 | 5 | log = discord_logging.get_logger(init=True) 6 | 7 | import messages 8 | import utils 9 | from praw_wrapper import reddit_test 10 | import static 11 | from classes.reminder import Reminder 12 | from classes.comment import DbComment 13 | 14 | 15 | def assert_date_with_tolerance(source, target, tolerance_minutes): 16 | assert target - timedelta(minutes=tolerance_minutes) < source < target + timedelta(minutes=tolerance_minutes) 17 | 18 | 19 | def test_add_reminder(database, reddit): 20 | created = utils.datetime_now() 21 | username = "Watchful1" 22 | keyword = "reminderstring" 23 | id = reddit_test.random_id() 24 | message = reddit_test.RedditObject( 25 | body=f"[{keyword}]\n{static.TRIGGER}! 1 day", 26 | author=username, 27 | created=created, 28 | id=id 29 | ) 30 | 31 | messages.process_message(message, reddit, database) 32 | result = message.get_first_child().body 33 | 34 | assert "reminderstring" in result 35 | 36 | assert "This time has already passed" not in result 37 | assert "Could not find a time in message" not in result 38 | assert "Could not parse date" not in result 39 | 40 | reminders = database.get_all_user_reminders(username) 41 | assert len(reminders) == 1 42 | assert reminders[0].user.name == username 43 | assert reminders[0].message == keyword 44 | assert reminders[0].source == utils.message_link(id) 45 | assert reminders[0].requested_date == created 46 | assert reminders[0].target_date == created + timedelta(hours=24) 47 | assert reminders[0].id is not None 48 | assert reminders[0].recurrence is None 49 | 50 | 51 | def test_add_cakeday(database, reddit): 52 | username = "Watchful1" 53 | created = utils.parse_datetime_string("2015-05-05 15:25:17") 54 | user = reddit_test.User(username, created.timestamp()) 55 | message = reddit_test.RedditObject( 56 | body="Cakeday!", 57 | author=user 58 | ) 59 | 60 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 61 | messages.process_message(message, reddit, database) 62 | result = message.get_first_child().body 63 | 64 | assert "to remind you of your cakeday" in result 65 | 66 | reminders = database.get_all_user_reminders(username) 67 | assert len(reminders) == 1 68 | assert reminders[0].user.name == username 69 | assert reminders[0].target_date == utils.parse_datetime_string("2019-05-05 15:25:17") 70 | assert reminders[0].id is not None 71 | assert reminders[0].recurrence == "1 year" 72 | assert reminders[0].message == "Happy Cakeday!" 73 | 74 | 75 | def test_add_cakeday_exists(database, reddit): 76 | username = "Watchful1" 77 | created = utils.parse_datetime_string("2015-05-05 15:25:17") 78 | user = reddit_test.User(username, created.timestamp()) 79 | message = reddit_test.RedditObject( 80 | body="Cakeday!", 81 | author=user 82 | ) 83 | messages.process_message(message, reddit, database) 84 | 85 | message2 = reddit_test.RedditObject( 86 | body="Cakeday!", 87 | author=user 88 | ) 89 | messages.process_message(message2, reddit, database) 90 | 91 | result = message2.get_first_child().body 92 | 93 | assert "It looks like you already have a cakeday reminder set." in result 94 | 95 | 96 | def test_add_reminder_no_message(database, reddit): 97 | created = utils.datetime_now() 98 | username = "Watchful1" 99 | id = reddit_test.random_id() 100 | message = reddit_test.RedditObject( 101 | body=f"{static.TRIGGER}! 1 day", 102 | author=username, 103 | created=created, 104 | id=id 105 | ) 106 | 107 | messages.process_message(message, reddit, database) 108 | result = message.get_first_child().body 109 | 110 | assert "This time has already passed" not in result 111 | assert "Could not find a time in message" not in result 112 | assert "Could not parse date" not in result 113 | 114 | reminders = database.get_all_user_reminders(username) 115 | assert len(reminders) == 1 116 | assert reminders[0].user.name == username 117 | assert reminders[0].message is None 118 | assert reminders[0].source == utils.message_link(id) 119 | assert reminders[0].requested_date == created 120 | assert reminders[0].target_date == created + timedelta(hours=24) 121 | assert reminders[0].id is not None 122 | 123 | 124 | def test_add_reminder_no_date(database, reddit): 125 | created = utils.datetime_now() 126 | username = "Watchful1" 127 | id = reddit_test.random_id() 128 | message = reddit_test.RedditObject( 129 | body=f"{static.TRIGGER}! \"error test\"", 130 | author=username, 131 | created=created, 132 | id=id 133 | ) 134 | 135 | messages.process_message(message, reddit, database) 136 | result = message.get_first_child().body 137 | 138 | assert "This time has already passed" not in result 139 | assert "Could not find a time in message, defaulting to one day" in result 140 | 141 | reminders = database.get_all_user_reminders(username) 142 | assert len(reminders) == 1 143 | assert reminders[0].user.name == username 144 | assert reminders[0].message == "error test" 145 | assert reminders[0].source == utils.message_link(id) 146 | assert reminders[0].requested_date == created 147 | assert reminders[0].target_date == created + timedelta(hours=24) 148 | assert reminders[0].id is not None 149 | 150 | 151 | def test_get_reminders(database, reddit): 152 | utils.debug_time = utils.parse_datetime_string("2019-01-01 12:00:00") 153 | message = reddit_test.RedditObject( 154 | body="MyReminders!", 155 | author="Watchful1" 156 | ) 157 | messages.process_message(message, reddit, database) 158 | result = message.get_first_child().body 159 | assert "You don't have any reminders." in result 160 | 161 | reminders = [ 162 | Reminder( 163 | source="https://www.reddit.com/message/messages/XXXXX", 164 | message="KKKKK", 165 | user=database.get_or_add_user("Watchful1"), 166 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 167 | target_date=utils.parse_datetime_string("2019-01-04 05:00:00") 168 | ), 169 | Reminder( 170 | source="https://www.reddit.com/message/messages/YYYYY", 171 | message="FFFFF", 172 | user=database.get_or_add_user("Watchful1"), 173 | requested_date=utils.parse_datetime_string("2019-02-02 06:00:00"), 174 | target_date=utils.parse_datetime_string("2019-02-05 07:00:00") 175 | ), 176 | Reminder( 177 | source="https://www.reddit.com/message/messages/ZZZZZ", 178 | message="GGGGG", 179 | user=database.get_or_add_user("Watchful1"), 180 | requested_date=utils.parse_datetime_string("2019-02-02 06:00:00"), 181 | target_date=utils.parse_datetime_string("2019-02-05 07:00:00"), 182 | recurrence="one day" 183 | ) 184 | ] 185 | for reminder in reminders: 186 | database.add_reminder(reminder) 187 | 188 | message = reddit_test.RedditObject( 189 | body="MyReminders!", 190 | author="Watchful1" 191 | ) 192 | messages.process_message(message, reddit, database) 193 | result = message.get_first_child().body 194 | 195 | assert "Click here to delete all your reminders" in result 196 | assert "|Source|Message|Date|In|Repeat|Remove|" in result 197 | assert "|Source|Message|Date|In|Remove|" in result 198 | 199 | assert reminders[0].source in result 200 | assert reminders[0].message in result 201 | assert "01-04 05" in result 202 | 203 | assert reminders[1].source in result 204 | assert reminders[1].message in result 205 | assert "02-05 07" in result 206 | 207 | assert reminders[2].recurrence in result 208 | 209 | user = database.get_or_add_user(user_name="Watchful1") 210 | user.timezone = "America/Los_Angeles" 211 | messages.process_message(message, reddit, database) 212 | result = message.get_last_child().body 213 | assert "Your timezone is currently set to: `America/Los_Angeles`" in result 214 | assert "01-03 21" in result 215 | assert "02-04 23" in result 216 | 217 | 218 | def test_get_long_reminders(database, reddit): 219 | utils.debug_time = utils.parse_datetime_string("2019-01-01 12:00:00") 220 | user = database.get_or_add_user("Watchful1") 221 | requested_date = utils.parse_datetime_string("2019-01-01 04:00:00") 222 | target_date = utils.parse_datetime_string("2019-01-01 04:00:00") 223 | for i in range(60): 224 | database.add_reminder( 225 | Reminder( 226 | source=f"https://www.reddit.com/message/messages/XXX{i}", 227 | message=f"{i}" * 50, 228 | user=user, 229 | requested_date=requested_date, 230 | target_date=target_date + timedelta(days=1) 231 | ) 232 | ) 233 | 234 | message = reddit_test.RedditObject( 235 | body="MyReminders!", 236 | author="Watchful1" 237 | ) 238 | messages.process_message(message, reddit, database) 239 | assert len(message.children) == 3 240 | assert "Click here to delete all your reminders" in message.children[0].body 241 | assert "Click here to delete all your reminders" not in message.children[1].body 242 | assert "Click here to delete all your reminders" not in message.children[2].body 243 | assert "|Source|Message|Date|In|Remove|" in message.children[0].body 244 | assert "|Source|Message|Date|In|Remove|" in message.children[1].body 245 | assert "|Source|Message|Date|In|Remove|" in message.children[2].body 246 | assert "[^(Info)]" not in message.children[0].body 247 | assert "[^(Info)]" not in message.children[1].body 248 | assert "[^(Info)]" in message.children[2].body 249 | 250 | 251 | def test_remove_reminder(database, reddit): 252 | reminder1 = Reminder( 253 | source="https://www.reddit.com/message/messages/XXXXX", 254 | message="KKKKK", 255 | user=database.get_or_add_user("Watchful1"), 256 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 257 | target_date=utils.parse_datetime_string("2019-01-04 05:00:00") 258 | ) 259 | reminder2 = Reminder( 260 | source="https://www.reddit.com/message/messages/YYYYY", 261 | message="FFFFF", 262 | user=database.get_or_add_user("Watchful1"), 263 | requested_date=utils.parse_datetime_string("2019-02-02 06:00:00"), 264 | target_date=utils.parse_datetime_string("2019-02-05 07:00:00") 265 | ) 266 | reminder3 = Reminder( 267 | source="https://www.reddit.com/message/messages/ZZZZZ", 268 | message="JJJJJ", 269 | user=database.get_or_add_user("Watchful2"), 270 | requested_date=utils.parse_datetime_string("2019-03-02 06:00:00"), 271 | target_date=utils.parse_datetime_string("2019-03-05 07:00:00") 272 | ) 273 | database.add_reminder(reminder1) 274 | database.add_reminder(reminder2) 275 | database.add_reminder(reminder3) 276 | 277 | message = reddit_test.RedditObject( 278 | body=f"Remove! test", 279 | author="Watchful2" 280 | ) 281 | messages.process_message(message, reddit, database) 282 | assert "I couldn't find a reminder id to remove." in message.get_first_child().body 283 | 284 | message = reddit_test.RedditObject( 285 | body=f"Remove! {reminder1.id}", 286 | author="Watchful2" 287 | ) 288 | messages.process_message(message, reddit, database) 289 | assert "It looks like you don't own this reminder or it doesn't exist." in message.get_first_child().body 290 | 291 | message = reddit_test.RedditObject( 292 | body=f"Remove! {reminder1.id}", 293 | author="Watchful1" 294 | ) 295 | messages.process_message(message, reddit, database) 296 | assert "Reminder deleted." in message.get_first_child().body 297 | 298 | assert len(database.get_all_user_reminders("Watchful1")) == 1 299 | assert len(database.get_all_user_reminders("Watchful2")) == 1 300 | 301 | 302 | def test_remove_all_reminders(database, reddit): 303 | utils.debug_time = utils.parse_datetime_string("2019-01-01 12:00:00") 304 | message = reddit_test.RedditObject( 305 | body=f"RemoveAll!", 306 | author="Watchful1" 307 | ) 308 | messages.process_message(message, reddit, database) 309 | assert "Deleted" not in message.get_first_child().body 310 | 311 | reminder1 = Reminder( 312 | source="https://www.reddit.com/message/messages/XXXXX", 313 | message="KKKKK", 314 | user=database.get_or_add_user("Watchful1"), 315 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 316 | target_date=utils.parse_datetime_string("2019-01-04 05:00:00") 317 | ) 318 | reminder2 = Reminder( 319 | source="https://www.reddit.com/message/messages/YYYYY", 320 | message="FFFFF", 321 | user=database.get_or_add_user("Watchful1"), 322 | requested_date=utils.parse_datetime_string("2019-02-02 06:00:00"), 323 | target_date=utils.parse_datetime_string("2019-02-05 07:00:00") 324 | ) 325 | reminder3 = Reminder( 326 | source="https://www.reddit.com/message/messages/ZZZZZ", 327 | message="JJJJJ", 328 | user=database.get_or_add_user("Watchful2"), 329 | requested_date=utils.parse_datetime_string("2019-03-02 06:00:00"), 330 | target_date=utils.parse_datetime_string("2019-03-05 07:00:00") 331 | ) 332 | database.add_reminder(reminder1) 333 | database.add_reminder(reminder2) 334 | database.add_reminder(reminder3) 335 | 336 | message = reddit_test.RedditObject( 337 | body=f"RemoveAll!", 338 | author="Watchful1" 339 | ) 340 | messages.process_message(message, reddit, database) 341 | body = message.get_first_child().body 342 | assert "Deleted **2** reminders." in body 343 | 344 | assert len(database.get_all_user_reminders("Watchful1")) == 0 345 | assert len(database.get_all_user_reminders("Watchful2")) == 1 346 | 347 | 348 | def test_delete_comment(database, reddit): 349 | db_comment = DbComment( 350 | thread_id="XXXXX", 351 | comment_id="ZZZZZ", 352 | reminder_id="YYYYY", 353 | user="Watchful1", 354 | source="www.reddit.com/r/test/comments/XXXXX", 355 | current_count=1 356 | ) 357 | database.save_comment(db_comment) 358 | comment = reddit_test.RedditObject( 359 | body="Click here for a reminder!", 360 | author=static.ACCOUNT_NAME, 361 | id="ZZZZZ" 362 | ) 363 | reddit.add_comment(comment, True) 364 | 365 | message = reddit_test.RedditObject( 366 | body=f"Delete! SSSSSS", 367 | author="Watchful1" 368 | ) 369 | messages.process_message(message, reddit, database) 370 | assert "This comment doesn't exist or was already deleted." in message.get_first_child().body 371 | 372 | message = reddit_test.RedditObject( 373 | body=f"Delete! XXXXX", 374 | author="Watchful2" 375 | ) 376 | messages.process_message(message, reddit, database) 377 | assert "It looks like the bot wasn't replying to you." in message.get_first_child().body 378 | 379 | message = reddit_test.RedditObject( 380 | body=f"Delete! XXXXX", 381 | author="Watchful1" 382 | ) 383 | messages.process_message(message, reddit, database) 384 | assert "Comment deleted." in message.get_first_child().body 385 | assert comment.id not in reddit.all_comments 386 | 387 | 388 | def test_set_timezone(database, reddit): 389 | username = "Watchful1" 390 | message = reddit_test.RedditObject( 391 | body="Timezone! ", 392 | author=username 393 | ) 394 | messages.process_message(message, reddit, database) 395 | result = message.get_last_child().body 396 | assert "I couldn't find a timezone in your message." in result 397 | 398 | message.body = "Timezone! EST" 399 | messages.process_message(message, reddit, database) 400 | result = message.get_last_child().body 401 | assert "EST is not a valid timezone." in result 402 | 403 | message.body = "Timezone! America/Los_Angeles" 404 | messages.process_message(message, reddit, database) 405 | result = message.get_last_child().body 406 | assert "Updated your timezone to America/Los_Angeles" in result 407 | user = database.get_or_add_user(username) 408 | assert user.timezone == "America/Los_Angeles" 409 | 410 | message.body = "Timezone! UTC" 411 | messages.process_message(message, reddit, database) 412 | result = message.get_last_child().body 413 | assert "Reset your timezone to the default" in result 414 | user = database.get_or_add_user(username) 415 | assert user.timezone is None 416 | 417 | 418 | def test_timezone_reminder_message(database, reddit): 419 | user = database.get_or_add_user(user_name="Watchful1") 420 | user.timezone = "America/Los_Angeles" 421 | 422 | created = utils.datetime_now() 423 | target = created + timedelta(hours=24) 424 | username = "Watchful1" 425 | message = reddit_test.RedditObject( 426 | body=f"{static.TRIGGER}! {utils.get_datetime_string(utils.datetime_as_timezone(target, user.timezone))}", 427 | author=username, 428 | created=created 429 | ) 430 | 431 | messages.process_message(message, reddit, database) 432 | 433 | reminders = database.get_all_user_reminders(username) 434 | assert len(reminders) == 1 435 | assert reminders[0].requested_date == created 436 | assert reminders[0].target_date == utils.datetime_as_utc( 437 | pytz.timezone(user.timezone).localize(target.replace(tzinfo=None)) 438 | ) 439 | 440 | 441 | def test_add_recurring_reminder(database, reddit): 442 | created = utils.datetime_now() 443 | username = "Watchful1" 444 | keyword = "reminderstring" 445 | id = reddit_test.random_id() 446 | message = reddit_test.RedditObject( 447 | body=f"[{keyword}]\n{static.TRIGGER_RECURRING}! 1 day", 448 | author=username, 449 | created=created, 450 | id=id 451 | ) 452 | 453 | messages.process_message(message, reddit, database) 454 | result = message.get_first_child().body 455 | 456 | assert "reminderstring" in result 457 | assert "and then every `1 day`" in result 458 | 459 | assert "This time has already passed" not in result 460 | assert "Could not find a time in message" not in result 461 | assert "Could not parse date" not in result 462 | assert "Can't use a default for a recurring reminder" not in result 463 | assert "I got the same date rather than one after it" not in result 464 | assert "I got a date before that rather than one after it" not in result 465 | 466 | reminders = database.get_all_user_reminders(username) 467 | assert len(reminders) == 1 468 | assert reminders[0].user.name == username 469 | assert reminders[0].message == keyword 470 | assert reminders[0].source == utils.message_link(id) 471 | assert reminders[0].requested_date == created 472 | assert reminders[0].target_date == created + timedelta(hours=24) 473 | assert reminders[0].id is not None 474 | assert reminders[0].recurrence == "1 day" 475 | 476 | 477 | def test_set_clock(database, reddit): 478 | username = "Watchful1" 479 | message = reddit_test.RedditObject( 480 | body="Clock! ", 481 | author=username 482 | ) 483 | messages.process_message(message, reddit, database) 484 | result = message.get_last_child().body 485 | assert "I couldn't find a clock type in your message." in result 486 | 487 | message.body = "Clock! 22" 488 | messages.process_message(message, reddit, database) 489 | result = message.get_last_child().body 490 | assert "22 is not a valid clock type." in result 491 | 492 | message.body = "Clock! 12" 493 | messages.process_message(message, reddit, database) 494 | result = message.get_last_child().body 495 | assert "Updated your clock type to a 12 hour clock" in result 496 | user = database.get_or_add_user(username) 497 | assert user.time_format == "12" 498 | 499 | message.body = "Clock! 24" 500 | messages.process_message(message, reddit, database) 501 | result = message.get_last_child().body 502 | assert "Reset your clock type to the default 24 hour clock" in result 503 | user = database.get_or_add_user(username) 504 | assert user.time_format is None 505 | -------------------------------------------------------------------------------- /test/reminder_test.py: -------------------------------------------------------------------------------- 1 | import utils 2 | import notifications 3 | import static 4 | from database import Database 5 | from praw_wrapper import reddit_test 6 | import messages 7 | from datetime import timedelta 8 | from classes.reminder import Reminder 9 | 10 | 11 | def test_send_reminder(database, reddit): 12 | reminder = Reminder( 13 | source="https://www.reddit.com/message/messages/XXXXX", 14 | message="KKKKK", 15 | user=database.get_or_add_user("Watchful1"), 16 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 17 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 18 | ) 19 | database.add_reminder(reminder) 20 | 21 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 22 | notifications.send_reminders(reddit, database) 23 | 24 | assert len(reddit.sent_messages) == 1 25 | 26 | message_body = reddit.sent_messages[0].body 27 | assert "I'm here to remind you" in message_body 28 | assert reminder.message in message_body 29 | assert "The source comment or message" in message_body 30 | assert reminder.source in message_body 31 | 32 | reminders = database.get_all_user_reminders("Watchful1") 33 | assert len(reminders) == 0 34 | 35 | 36 | def test_send_reminders(database, reddit): 37 | reminders = [ 38 | Reminder( 39 | source="https://www.reddit.com/message/messages/XXXXX", 40 | message="KKKKK", 41 | user=database.get_or_add_user("Watchful1"), 42 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 43 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 44 | ), 45 | Reminder( 46 | source="https://www.reddit.com/message/messages/XXXXX", 47 | message="KKKKK", 48 | user=database.get_or_add_user("Watchful1"), 49 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 50 | target_date=utils.parse_datetime_string("2019-01-06 05:00:00") 51 | ), 52 | Reminder( 53 | source="https://www.reddit.com/message/messages/XXXXX", 54 | message="KKKKK", 55 | user=database.get_or_add_user("Watchful1"), 56 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 57 | target_date=utils.parse_datetime_string("2019-01-07 05:00:00") 58 | ), 59 | Reminder( 60 | source="https://www.reddit.com/message/messages/XXXXX", 61 | message="KKKKK", 62 | user=database.get_or_add_user("Watchful1"), 63 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 64 | target_date=utils.parse_datetime_string("2019-01-08 05:00:00") 65 | ), 66 | Reminder( 67 | source="https://www.reddit.com/message/messages/XXXXX", 68 | message="KKKKK", 69 | user=database.get_or_add_user("Watchful1"), 70 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 71 | target_date=utils.parse_datetime_string("2019-01-09 05:00:00") 72 | ) 73 | ] 74 | for reminder in reminders: 75 | database.add_reminder(reminder) 76 | 77 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 78 | notifications.send_reminders(reddit, database) 79 | 80 | assert len(database.get_all_user_reminders("Watchful1")) == 4 81 | 82 | utils.debug_time = utils.parse_datetime_string("2019-01-08 12:00:00") 83 | notifications.send_reminders(reddit, database) 84 | 85 | assert len(database.get_all_user_reminders("Watchful1")) == 1 86 | 87 | 88 | def test_send_recurring_reminder(database, reddit): 89 | reminder = Reminder( 90 | source="https://www.reddit.com/message/messages/XXXXX", 91 | message="KKKKK", 92 | user=database.get_or_add_user("Watchful1"), 93 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 94 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00"), 95 | recurrence="one day" 96 | ) 97 | database.add_reminder(reminder) 98 | 99 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 100 | notifications.send_reminders(reddit, database) 101 | 102 | assert len(reddit.sent_messages) == 1 103 | 104 | message_body = reddit.sent_messages[0].body 105 | assert "I'm here to remind you" in message_body 106 | assert reminder.message in message_body 107 | assert "The source comment or message" in message_body 108 | assert reminder.source in message_body 109 | assert "This is a repeating reminder. I'll message you again in " in message_body 110 | assert reminder.recurrence in message_body 111 | 112 | reminders = database.get_all_user_reminders("Watchful1") 113 | assert len(reminders) == 1 114 | assert reminders[0].target_date == utils.parse_datetime_string("2019-01-06 05:00:00") 115 | 116 | 117 | def test_send_recurring_reminder_limit(database, reddit): 118 | old_limit = static.RECURRING_LIMIT 119 | static.RECURRING_LIMIT = 3 120 | reminder = Reminder( 121 | source="https://www.reddit.com/message/messages/XXXXX", 122 | message="KKKKK", 123 | user=database.get_or_add_user("Watchful1"), 124 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 125 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00"), 126 | recurrence="one day" 127 | ) 128 | database.add_reminder(reminder) 129 | 130 | utils.debug_time = utils.parse_datetime_string("2019-01-04 12:00:00") 131 | for i in range(static.RECURRING_LIMIT + 1): 132 | utils.debug_time = utils.debug_time + timedelta(days=1) 133 | notifications.send_reminders(reddit, database) 134 | assert "I've sent you at least" not in reddit.sent_messages[-1].body 135 | assert i+1 == database.get_or_add_user("Watchful1").recurring_sent 136 | 137 | utils.debug_time = utils.debug_time + timedelta(days=1) 138 | notifications.send_reminders(reddit, database) 139 | assert "I've sent you at least" in reddit.sent_messages[-1].body 140 | reminders = database.get_all_user_reminders("Watchful1") 141 | assert len(reminders) == 0 142 | 143 | static.RECURRING_LIMIT = old_limit 144 | 145 | 146 | def test_reset_recurring_reminder_limit(database, reddit): 147 | reminder = Reminder( 148 | source="https://www.reddit.com/message/messages/XXXXX", 149 | message="KKKKK", 150 | user=database.get_or_add_user("Watchful1"), 151 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 152 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00"), 153 | recurrence="one day" 154 | ) 155 | database.add_reminder(reminder) 156 | 157 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 158 | notifications.send_reminders(reddit, database) 159 | assert database.get_or_add_user("Watchful1").recurring_sent == 1 160 | 161 | message = reddit_test.RedditObject( 162 | body="MyReminders!", 163 | author="Watchful1" 164 | ) 165 | messages.process_message(message, reddit, database) 166 | 167 | assert database.get_or_add_user("Watchful1").recurring_sent == 0 168 | 169 | 170 | def test_confirm_recurring_reminders(database, reddit): 171 | cases = [ 172 | {"start": "2019-01-01 04:00:00", "timeString": "1 hour", "targets": ["2019-01-01 05:00:00", "2019-01-01 06:00:00", "2019-01-01 07:00:00"]}, 173 | {"start": "2019-01-01 04:00:00", "timeString": "1 day", "targets": ["2019-01-02 04:00:00", "2019-01-03 04:00:00", "2019-01-04 04:00:00"]}, 174 | {"start": "2019-01-01 04:00:00", "timeString": "4 pm", "targets": ["2019-01-01 16:00:00", "2019-01-02 16:00:00", "2019-01-03 16:00:00"]}, 175 | {"start": "2019-01-01 04:00:00", "timeString": "Monday", "targets": ["2019-01-07 00:00:00", "2019-01-14 00:00:00", "2019-01-21 00:00:00"]}, 176 | {"start": "2019-01-01 04:00:00", "timeString": "1st of the month", "targets": ["2019-02-01 00:00:00", "2019-03-01 00:00:00", "2019-04-01 00:00:00"]}, 177 | {"start": "2019-01-01 04:00:00", "timeString": "October 7th", "targets": ["2019-10-07 00:00:00", "2020-10-07 00:00:00", "2021-10-07 00:00:00"]}, 178 | {"start": "2019-01-01 16:34:41", "timeString": "4:35pm", "targets": ["2019-01-01 16:35:00", "2019-01-02 16:35:00", "2019-01-03 16:35:00"]}, 179 | ] 180 | 181 | for case in cases: 182 | database = Database(debug=True, publish=True) 183 | reddit = reddit_test.Reddit("Watchful1BotTest") 184 | 185 | # add the reminder 186 | utils.debug_time = utils.parse_datetime_string(case["start"]) 187 | created = utils.datetime_now() 188 | username = "Watchful1" 189 | message = reddit_test.RedditObject( 190 | body=f"[XXXXX]\n{static.TRIGGER_RECURRING}! {case['timeString']}", 191 | author="Watchful1", 192 | created=created, 193 | id=reddit_test.random_id() 194 | ) 195 | messages.process_message(message, reddit, database) 196 | 197 | # verify it was added correctly 198 | reminders = database.get_all_user_reminders(username) 199 | assert len(reminders) == 1 200 | assert reminders[0].requested_date == created 201 | assert reminders[0].target_date == utils.parse_datetime_string(case["targets"][0]) 202 | assert reminders[0].recurrence == case["timeString"] 203 | 204 | # send it once and verify the new target time 205 | utils.debug_time = utils.parse_datetime_string(case["targets"][0]) + timedelta(seconds=1) 206 | notifications.send_reminders(reddit, database) 207 | assert len(reddit.sent_messages) == 2 208 | reminders = database.get_all_user_reminders(username) 209 | assert len(reminders) == 1 210 | assert reminders[0].target_date == utils.parse_datetime_string(case["targets"][1]) 211 | 212 | # send it again and verify the second target time 213 | utils.debug_time = utils.parse_datetime_string(case["targets"][1]) + timedelta(seconds=1) 214 | notifications.send_reminders(reddit, database) 215 | assert len(reddit.sent_messages) == 3 216 | reminders = database.get_all_user_reminders(username) 217 | assert len(reminders) == 1 218 | assert reminders[0].target_date == utils.parse_datetime_string(case["targets"][2]) 219 | -------------------------------------------------------------------------------- /test/stat_test.py: -------------------------------------------------------------------------------- 1 | import utils 2 | import static 3 | from praw_wrapper import reddit_test 4 | from datetime import timedelta 5 | from classes.reminder import Reminder 6 | import stats 7 | 8 | 9 | def add_sample_stats(database, reddit): 10 | reminders = [ 11 | Reminder( 12 | source="https://www.reddit.com/message/messages/XXXXX", 13 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshj8/___/]", 14 | user=database.get_or_add_user("Watchful1"), 15 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 16 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 17 | ), 18 | Reminder( 19 | source="https://www.reddit.com/message/messages/XXXXX", 20 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshk6/___/]", 21 | user=database.get_or_add_user("Watchful1"), 22 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 23 | target_date=utils.parse_datetime_string("2019-01-06 05:00:00") 24 | ), 25 | Reminder( 26 | source="https://www.reddit.com/message/messages/XXXXX", 27 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshk6/___/]", 28 | user=database.get_or_add_user("Watchful1"), 29 | requested_date=utils.parse_datetime_string("2019-01-02 04:00:00"), 30 | target_date=utils.parse_datetime_string("2019-01-07 05:00:00") 31 | ), 32 | Reminder( 33 | source="https://www.reddit.com/message/messages/XXXXX", 34 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshk6/___/]", 35 | user=database.get_or_add_user("Watchful1"), 36 | requested_date=utils.parse_datetime_string("2019-01-02 04:00:00"), 37 | target_date=utils.parse_datetime_string("2019-01-07 05:00:00") 38 | ), 39 | Reminder( 40 | source="https://www.reddit.com/message/messages/XXXXX", 41 | message="[https://www.reddit.com/r/history/comments/1emshf5/___/]", 42 | user=database.get_or_add_user("Watchful1"), 43 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 44 | target_date=utils.parse_datetime_string("2019-01-08 05:00:00") 45 | ), 46 | Reminder( 47 | source="https://www.reddit.com/message/messages/XXXXX", 48 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshj8/___/]", 49 | user=database.get_or_add_user("Watchful1"), 50 | requested_date=utils.parse_datetime_string("2019-01-03 04:00:00"), 51 | target_date=utils.parse_datetime_string("2019-01-09 05:00:00") 52 | ), 53 | Reminder( 54 | source="https://www.reddit.com/message/messages/XXXXX", 55 | message="[https://www.reddit.com/r/AskHistorians/comments/1emshj2/___/]", 56 | user=database.get_or_add_user("Watchful1"), 57 | requested_date=utils.parse_datetime_string("2019-01-03 04:00:00"), 58 | target_date=utils.parse_datetime_string("2019-01-09 05:00:00") 59 | ) 60 | ] 61 | for reminder in reminders: 62 | database.add_reminder(reminder) 63 | 64 | submissions = [ 65 | {"created": utils.parse_datetime_string("2018-01-01 04:00:00"), "id": "1emshj2", "subreddit": "AskHistorians", "title": "Title1"}, 66 | {"created": utils.parse_datetime_string("2019-01-01 04:00:00"), "id": "1emshj8", "subreddit": "AskHistorians", "title": "Title2"}, 67 | {"created": utils.parse_datetime_string("2019-01-01 04:00:00"), "id": "1emshk6", "subreddit": "AskHistorians", "title": "Title3"}, 68 | {"created": utils.parse_datetime_string("2019-01-01 04:00:00"), "id": "1emshf5", "subreddit": "AskHistorians", "title": "Title4"}, 69 | ] 70 | for submission in submissions: 71 | submission_obj = reddit_test.RedditObject( 72 | body=f"blank", 73 | author="blank", 74 | title=submission["title"], 75 | created=submission["created"], 76 | id=submission["id"], 77 | permalink=f"/r/{submission['subreddit']}/comments/{submission['id']}/___/", 78 | subreddit=submission["subreddit"], 79 | prefix="t3", 80 | ) 81 | reddit.add_submission(submission_obj) 82 | 83 | 84 | def test_add_stat(database, reddit): 85 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 86 | reminder = Reminder( 87 | source="https://www.reddit.com/message/messages/XXXXX", 88 | message="""[https://www.reddit.com/r/AskHistorians/comments/1emshj8/___/] 89 | RemindMe! 2 days""", 90 | user=database.get_or_add_user("Watchful1"), 91 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 92 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 93 | ) 94 | database.add_reminder(reminder) 95 | 96 | stat = database.get_stats_for_ids("AskHistorians", "1emshj8") 97 | assert stat.count_reminders == 1 98 | assert stat.initial_date is None 99 | 100 | reminder = Reminder( 101 | source="https://www.reddit.com/message/messages/YYYYY", 102 | message="""[https://www.reddit.com/r/AskHistorians/comments/1emshj8/___/] 103 | RemindMe! 2 days""", 104 | user=database.get_or_add_user("Watchful1"), 105 | requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"), 106 | target_date=utils.parse_datetime_string("2019-01-05 05:00:00") 107 | ) 108 | database.add_reminder(reminder) 109 | 110 | stat = database.get_stats_for_ids("AskHistorians", "1emshj8") 111 | assert stat.count_reminders == 2 112 | assert stat.initial_date is None 113 | 114 | 115 | def test_update_dates(database, reddit): 116 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 117 | add_sample_stats(database, reddit) 118 | 119 | stats.update_stat_dates(reddit, database) 120 | 121 | count_empty_stats = len(database.get_stats_without_date()) 122 | assert count_empty_stats == 0 123 | 124 | post_stat = database.get_stats_for_ids("AskHistorians", "1emshj8") 125 | assert post_stat.initial_date == utils.parse_datetime_string("2019-01-01 04:00:00") 126 | 127 | sub_stats = database.get_stats_for_subreddit("AskHistorians", utils.debug_time - timedelta(days=7)) 128 | assert len(sub_stats) == 2 129 | assert sub_stats[0].count_reminders == 2 130 | assert sub_stats[1].count_reminders == 3 131 | 132 | 133 | def test_update_stat_wiki(database, reddit): 134 | utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00") 135 | add_sample_stats(database, reddit) 136 | 137 | reddit.reply_submission( 138 | reddit.get_submission("1emshj2"), 139 | "1234567890" * 30 140 | ) 141 | reddit.reply_submission( 142 | reddit.get_submission("1emshk6"), 143 | "1234567890" 144 | ) 145 | 146 | stats.update_stat_dates(reddit, database) 147 | stats.update_ask_historians(reddit, database, min_reminders=0) 148 | 149 | wiki_content = reddit.get_subreddit_wiki_page("AskHistorians", "remindme") 150 | 151 | assert wiki_content == """This page shows the number of reminders requested for each thread in r/AskHistorians in the last 7 days. Only threads with at least 10 requested reminders are included. Please contact u/Watchful1 with any feedback or suggestions. 152 | 153 | Thread | Thread date | Words in top answer | Total reminders | Pending reminders 154 | ---|---|----|----|----|---- 155 | [Title2](https://www.reddit.com//r/AskHistorians/comments/1emshj8/___/)|2019-01-01 04:00:00||2|2 156 | [Title3](https://www.reddit.com//r/AskHistorians/comments/1emshk6/___/)|2019-01-01 04:00:00||3|3 157 | """ 158 | -------------------------------------------------------------------------------- /timezones.txt: -------------------------------------------------------------------------------- 1 | Africa/Abidjan 2 | Africa/Accra 3 | Africa/Addis_Ababa 4 | Africa/Algiers 5 | Africa/Asmara 6 | Africa/Bamako 7 | Africa/Bangui 8 | Africa/Banjul 9 | Africa/Bissau 10 | Africa/Blantyre 11 | Africa/Brazzaville 12 | Africa/Bujumbura 13 | Africa/Cairo 14 | Africa/Casablanca 15 | Africa/Ceuta 16 | Africa/Conakry 17 | Africa/Dakar 18 | Africa/Dar_es_Salaam 19 | Africa/Djibouti 20 | Africa/Douala 21 | Africa/El_Aaiun 22 | Africa/Freetown 23 | Africa/Gaborone 24 | Africa/Harare 25 | Africa/Johannesburg 26 | Africa/Juba 27 | Africa/Kampala 28 | Africa/Khartoum 29 | Africa/Kigali 30 | Africa/Kinshasa 31 | Africa/Lagos 32 | Africa/Libreville 33 | Africa/Lome 34 | Africa/Luanda 35 | Africa/Lubumbashi 36 | Africa/Lusaka 37 | Africa/Malabo 38 | Africa/Maputo 39 | Africa/Maseru 40 | Africa/Mbabane 41 | Africa/Mogadishu 42 | Africa/Monrovia 43 | Africa/Nairobi 44 | Africa/Ndjamena 45 | Africa/Niamey 46 | Africa/Nouakchott 47 | Africa/Ouagadougou 48 | Africa/Porto-Novo 49 | Africa/Sao_Tome 50 | Africa/Tripoli 51 | Africa/Tunis 52 | Africa/Windhoek 53 | America/Adak 54 | America/Anchorage 55 | America/Anguilla 56 | America/Antigua 57 | America/Araguaina 58 | America/Argentina/Buenos_Aires 59 | America/Argentina/Catamarca 60 | America/Argentina/Cordoba 61 | America/Argentina/Jujuy 62 | America/Argentina/La_Rioja 63 | America/Argentina/Mendoza 64 | America/Argentina/Rio_Gallegos 65 | America/Argentina/Salta 66 | America/Argentina/San_Juan 67 | America/Argentina/San_Luis 68 | America/Argentina/Tucuman 69 | America/Argentina/Ushuaia 70 | America/Aruba 71 | America/Asuncion 72 | America/Atikokan 73 | America/Bahia 74 | America/Bahia_Banderas 75 | America/Barbados 76 | America/Belem 77 | America/Belize 78 | America/Blanc-Sablon 79 | America/Boa_Vista 80 | America/Bogota 81 | America/Boise 82 | America/Cambridge_Bay 83 | America/Campo_Grande 84 | America/Cancun 85 | America/Caracas 86 | America/Cayenne 87 | America/Cayman 88 | America/Chicago 89 | America/Chihuahua 90 | America/Costa_Rica 91 | America/Creston 92 | America/Cuiaba 93 | America/Curacao 94 | America/Danmarkshavn 95 | America/Dawson 96 | America/Dawson_Creek 97 | America/Denver 98 | America/Detroit 99 | America/Dominica 100 | America/Edmonton 101 | America/Eirunepe 102 | America/El_Salvador 103 | America/Fort_Nelson 104 | America/Fortaleza 105 | America/Glace_Bay 106 | America/Godthab 107 | America/Goose_Bay 108 | America/Grand_Turk 109 | America/Grenada 110 | America/Guadeloupe 111 | America/Guatemala 112 | America/Guayaquil 113 | America/Guyana 114 | America/Halifax 115 | America/Havana 116 | America/Hermosillo 117 | America/Indiana/Indianapolis 118 | America/Indiana/Knox 119 | America/Indiana/Marengo 120 | America/Indiana/Petersburg 121 | America/Indiana/Tell_City 122 | America/Indiana/Vevay 123 | America/Indiana/Vincennes 124 | America/Indiana/Winamac 125 | America/Inuvik 126 | America/Iqaluit 127 | America/Jamaica 128 | America/Juneau 129 | America/Kentucky/Louisville 130 | America/Kentucky/Monticello 131 | America/Kralendijk 132 | America/La_Paz 133 | America/Lima 134 | America/Los_Angeles 135 | America/Lower_Princes 136 | America/Maceio 137 | America/Managua 138 | America/Manaus 139 | America/Marigot 140 | America/Martinique 141 | America/Matamoros 142 | America/Mazatlan 143 | America/Menominee 144 | America/Merida 145 | America/Metlakatla 146 | America/Mexico_City 147 | America/Miquelon 148 | America/Moncton 149 | America/Monterrey 150 | America/Montevideo 151 | America/Montserrat 152 | America/Nassau 153 | America/New_York 154 | America/Nipigon 155 | America/Nome 156 | America/Noronha 157 | America/North_Dakota/Beulah 158 | America/North_Dakota/Center 159 | America/North_Dakota/New_Salem 160 | America/Ojinaga 161 | America/Panama 162 | America/Pangnirtung 163 | America/Paramaribo 164 | America/Phoenix 165 | America/Port-au-Prince 166 | America/Port_of_Spain 167 | America/Porto_Velho 168 | America/Puerto_Rico 169 | America/Punta_Arenas 170 | America/Rainy_River 171 | America/Rankin_Inlet 172 | America/Recife 173 | America/Regina 174 | America/Resolute 175 | America/Rio_Branco 176 | America/Santarem 177 | America/Santiago 178 | America/Santo_Domingo 179 | America/Sao_Paulo 180 | America/Scoresbysund 181 | America/Sitka 182 | America/St_Barthelemy 183 | America/St_Johns 184 | America/St_Kitts 185 | America/St_Lucia 186 | America/St_Thomas 187 | America/St_Vincent 188 | America/Swift_Current 189 | America/Tegucigalpa 190 | America/Thule 191 | America/Thunder_Bay 192 | America/Tijuana 193 | America/Toronto 194 | America/Tortola 195 | America/Vancouver 196 | America/Whitehorse 197 | America/Winnipeg 198 | America/Yakutat 199 | America/Yellowknife 200 | Antarctica/Casey 201 | Antarctica/Davis 202 | Antarctica/DumontDUrville 203 | Antarctica/Macquarie 204 | Antarctica/Mawson 205 | Antarctica/McMurdo 206 | Antarctica/Palmer 207 | Antarctica/Rothera 208 | Antarctica/Syowa 209 | Antarctica/Troll 210 | Antarctica/Vostok 211 | Arctic/Longyearbyen 212 | Asia/Aden 213 | Asia/Almaty 214 | Asia/Amman 215 | Asia/Anadyr 216 | Asia/Aqtau 217 | Asia/Aqtobe 218 | Asia/Ashgabat 219 | Asia/Atyrau 220 | Asia/Baghdad 221 | Asia/Bahrain 222 | Asia/Baku 223 | Asia/Bangkok 224 | Asia/Barnaul 225 | Asia/Beirut 226 | Asia/Bishkek 227 | Asia/Brunei 228 | Asia/Chita 229 | Asia/Choibalsan 230 | Asia/Colombo 231 | Asia/Damascus 232 | Asia/Dhaka 233 | Asia/Dili 234 | Asia/Dubai 235 | Asia/Dushanbe 236 | Asia/Famagusta 237 | Asia/Gaza 238 | Asia/Hebron 239 | Asia/Ho_Chi_Minh 240 | Asia/Hong_Kong 241 | Asia/Hovd 242 | Asia/Irkutsk 243 | Asia/Jakarta 244 | Asia/Jayapura 245 | Asia/Jerusalem 246 | Asia/Kabul 247 | Asia/Kamchatka 248 | Asia/Karachi 249 | Asia/Kathmandu 250 | Asia/Khandyga 251 | Asia/Kolkata 252 | Asia/Krasnoyarsk 253 | Asia/Kuala_Lumpur 254 | Asia/Kuching 255 | Asia/Kuwait 256 | Asia/Macau 257 | Asia/Magadan 258 | Asia/Makassar 259 | Asia/Manila 260 | Asia/Muscat 261 | Asia/Nicosia 262 | Asia/Novokuznetsk 263 | Asia/Novosibirsk 264 | Asia/Omsk 265 | Asia/Oral 266 | Asia/Phnom_Penh 267 | Asia/Pontianak 268 | Asia/Pyongyang 269 | Asia/Qatar 270 | Asia/Qostanay 271 | Asia/Qyzylorda 272 | Asia/Riyadh 273 | Asia/Sakhalin 274 | Asia/Samarkand 275 | Asia/Seoul 276 | Asia/Shanghai 277 | Asia/Singapore 278 | Asia/Srednekolymsk 279 | Asia/Taipei 280 | Asia/Tashkent 281 | Asia/Tbilisi 282 | Asia/Tehran 283 | Asia/Thimphu 284 | Asia/Tokyo 285 | Asia/Tomsk 286 | Asia/Ulaanbaatar 287 | Asia/Urumqi 288 | Asia/Ust-Nera 289 | Asia/Vientiane 290 | Asia/Vladivostok 291 | Asia/Yakutsk 292 | Asia/Yangon 293 | Asia/Yekaterinburg 294 | Asia/Yerevan 295 | Atlantic/Azores 296 | Atlantic/Bermuda 297 | Atlantic/Canary 298 | Atlantic/Cape_Verde 299 | Atlantic/Faroe 300 | Atlantic/Madeira 301 | Atlantic/Reykjavik 302 | Atlantic/South_Georgia 303 | Atlantic/St_Helena 304 | Atlantic/Stanley 305 | Australia/Adelaide 306 | Australia/Brisbane 307 | Australia/Broken_Hill 308 | Australia/Currie 309 | Australia/Darwin 310 | Australia/Eucla 311 | Australia/Hobart 312 | Australia/Lindeman 313 | Australia/Lord_Howe 314 | Australia/Melbourne 315 | Australia/Perth 316 | Australia/Sydney 317 | Canada/Atlantic 318 | Canada/Central 319 | Canada/Eastern 320 | Canada/Mountain 321 | Canada/Newfoundland 322 | Canada/Pacific 323 | Europe/Amsterdam 324 | Europe/Andorra 325 | Europe/Astrakhan 326 | Europe/Athens 327 | Europe/Belgrade 328 | Europe/Berlin 329 | Europe/Bratislava 330 | Europe/Brussels 331 | Europe/Bucharest 332 | Europe/Budapest 333 | Europe/Busingen 334 | Europe/Chisinau 335 | Europe/Copenhagen 336 | Europe/Dublin 337 | Europe/Gibraltar 338 | Europe/Guernsey 339 | Europe/Helsinki 340 | Europe/Isle_of_Man 341 | Europe/Istanbul 342 | Europe/Jersey 343 | Europe/Kaliningrad 344 | Europe/Kiev 345 | Europe/Kirov 346 | Europe/Lisbon 347 | Europe/Ljubljana 348 | Europe/London 349 | Europe/Luxembourg 350 | Europe/Madrid 351 | Europe/Malta 352 | Europe/Mariehamn 353 | Europe/Minsk 354 | Europe/Monaco 355 | Europe/Moscow 356 | Europe/Oslo 357 | Europe/Paris 358 | Europe/Podgorica 359 | Europe/Prague 360 | Europe/Riga 361 | Europe/Rome 362 | Europe/Samara 363 | Europe/San_Marino 364 | Europe/Sarajevo 365 | Europe/Saratov 366 | Europe/Simferopol 367 | Europe/Skopje 368 | Europe/Sofia 369 | Europe/Stockholm 370 | Europe/Tallinn 371 | Europe/Tirane 372 | Europe/Ulyanovsk 373 | Europe/Uzhgorod 374 | Europe/Vaduz 375 | Europe/Vatican 376 | Europe/Vienna 377 | Europe/Vilnius 378 | Europe/Volgograd 379 | Europe/Warsaw 380 | Europe/Zagreb 381 | Europe/Zaporozhye 382 | Europe/Zurich 383 | Indian/Antananarivo 384 | Indian/Chagos 385 | Indian/Christmas 386 | Indian/Cocos 387 | Indian/Comoro 388 | Indian/Kerguelen 389 | Indian/Mahe 390 | Indian/Maldives 391 | Indian/Mauritius 392 | Indian/Mayotte 393 | Indian/Reunion 394 | Pacific/Apia 395 | Pacific/Auckland 396 | Pacific/Bougainville 397 | Pacific/Chatham 398 | Pacific/Chuuk 399 | Pacific/Easter 400 | Pacific/Efate 401 | Pacific/Enderbury 402 | Pacific/Fakaofo 403 | Pacific/Fiji 404 | Pacific/Funafuti 405 | Pacific/Galapagos 406 | Pacific/Gambier 407 | Pacific/Guadalcanal 408 | Pacific/Guam 409 | Pacific/Honolulu 410 | Pacific/Kiritimati 411 | Pacific/Kosrae 412 | Pacific/Kwajalein 413 | Pacific/Majuro 414 | Pacific/Marquesas 415 | Pacific/Midway 416 | Pacific/Nauru 417 | Pacific/Niue 418 | Pacific/Norfolk 419 | Pacific/Noumea 420 | Pacific/Pago_Pago 421 | Pacific/Palau 422 | Pacific/Pitcairn 423 | Pacific/Pohnpei 424 | Pacific/Port_Moresby 425 | Pacific/Rarotonga 426 | Pacific/Saipan 427 | Pacific/Tahiti 428 | Pacific/Tarawa 429 | Pacific/Tongatapu 430 | Pacific/Wake 431 | Pacific/Wallis 432 | US/Alaska 433 | US/Arizona 434 | US/Central 435 | US/Eastern 436 | US/Hawaii 437 | US/Mountain 438 | US/Pacific 439 | UTC -------------------------------------------------------------------------------- /todo.txt: -------------------------------------------------------------------------------- 1 | -process messages: 2 | -add reminder 3 | -remove reminder 4 | -get reminders 5 | -delete comment 6 | -remove all reminders 7 | -handle replies and mentions 8 | -search for trigger 9 | -send reminders 10 | -update comments 11 | -database backups 12 | -close database in handler 13 | -cakeday 14 | -add cakeday 15 | -remove cakeday 16 | -send cakeday 17 | -migration script 18 | -remove quotes 19 | -handle default message 20 | -handle empty origin date 21 | -handle source of info page 22 | -discord logging 23 | -dynamic timing 24 | -dynamic looping 25 | -date parsing problems 26 | -numbers as words 27 | -search_dates 28 | -draft info post 29 | r/cakeday 30 | -new date library 31 | readme 32 | -track subreddits the bot is banned in 33 | -include reason for message instead of comment 34 | specify time zone 35 | relative dates in reminder list 36 | -message to set time zone 37 | -include current time zone in reminder list 38 | -default time zone for reminder parsing 39 | -use timezone when rendering time 40 | time zone list 41 | link to time zone list in comment replies that use it 42 | -database backup downloading 43 | -recurring reminders 44 | -message parsing 45 | -comment parsing 46 | -cakeday parsing 47 | -cakeday comment parsing 48 | -reminder list 49 | -sending reminders 50 | -user level timeouts 51 | -tests 52 | -show context on comment links 53 | -search_dates 54 | -update library 55 | -force english 56 | changes 57 | times like "tomorrow" and "next tuesday" return 0 utc rather than 9:00 utc 58 | edge cases like "2 hours after noon" don't work anymore. Use "2 pm" instead 59 | -12/24 hour setting 60 | -apologize when late responding to comments because pushshift is behind 61 | -migration script 62 | -say how long from now in confirmations 63 | -info pages for recurring reminders, cakedays, timezones and 12 hour clock 64 | -change test triggers back 65 | 66 | 67 | -trigger on "remind me!" "remind me !" 68 | -tests 69 | delete comment objects after either 6 months or when the associated reminder is sent 70 | delete reminders if the related user comes back as deleted 71 | -when updating comments, don't use the current time 72 | -remindmerepeat comment trigger 73 | don't delete comments from database when the reminder is sent 74 | port over better pushshift error logging 75 | don't log stack traces on known errors 76 | add prometheus tracking 77 | if the first trigger word doesn't have a valid date after it, check the second one 78 | --------------------------------------------------------------------------------