├── .babelrc ├── .env.template ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .tsconfig ├── .vscode ├── launch.json └── settings.json ├── LICENSE ├── Procfile ├── README.md ├── docs ├── development.md ├── hosting.md ├── image_detection.md └── settings.md ├── eng.traineddata ├── package-lock.json ├── package.json ├── src ├── client │ ├── index.html │ ├── index.jsx │ └── style.css ├── common_words.ts ├── database.ts ├── database_manager.ts ├── dhash_gen.ts ├── first_time_init.ts ├── image_utils.ts ├── img │ └── favicon.ico ├── inbox_message_processor.ts ├── inbox_processor.ts ├── jobs │ └── keep_alive.ts ├── master_database_manager.ts ├── master_stats.ts ├── modded_subreddits.ts ├── processing_modules │ └── submission_modules │ │ └── image │ │ ├── existing_submission │ │ ├── allowRepostOnlyByUser.ts │ │ ├── removeBlacklisted.ts │ │ └── removeReposts.ts │ │ └── precheck │ │ ├── removeImagesWithText.ts │ │ ├── removeSmallImages.ts │ │ └── removeUncroppedImages.ts ├── reddit.ts ├── reddit_utils.ts ├── scripts │ ├── hamming_compare.ts │ └── ping_mongo.ts ├── server.ts ├── settings_processor.ts ├── submission_processor.ts ├── submission_queue.ts ├── subreddit_processor.ts ├── unmoderated_processor.ts └── wiki_utils.ts ├── tsconfig.json └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["env", "react"], 3 | "plugins": ["syntax-async-functions","transform-regenerator","babel-polyfill"] 4 | } -------------------------------------------------------------------------------- /.env.template: -------------------------------------------------------------------------------- 1 | ACCOUNT_USERNAME= 2 | ALLOW_INVITES=allow 3 | CLIENT_ID= 4 | CLIENT_SECRET= 5 | LOG_LEVEL=info 6 | MONGODB_URI= 7 | NODE_ENV=develop 8 | PASSWORD= 9 | DAYS_EXPIRY=100 10 | EXTERNAL_DATABASES= 11 | MODQUEUE_SUBREDDITS= -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # build 2 | build/* 3 | 4 | # Logs 5 | logs 6 | *.log 7 | npm-debug.log* 8 | yarn-debug.log* 9 | yarn-error.log* 10 | 11 | # Runtime data 12 | pids 13 | *.pid 14 | *.seed 15 | *.pid.lock 16 | 17 | # Directory for instrumented libs generated by jscoverage/JSCover 18 | lib-cov 19 | tmp 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | 24 | # nyc test coverage 25 | .nyc_output 26 | 27 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 28 | .grunt 29 | 30 | # Bower dependency directory (https://bower.io/) 31 | bower_components 32 | 33 | # node-waf configuration 34 | .lock-wscript 35 | 36 | # Compiled binary addons (https://nodejs.org/api/addons.html) 37 | build/Release 38 | 39 | # Dependency directories 40 | node_modules/ 41 | jspm_packages/ 42 | 43 | # TypeScript v1 declaration files 44 | typings/ 45 | 46 | # Optional npm cache directory 47 | .npm 48 | 49 | # Optional eslint cache 50 | .eslintcache 51 | 52 | # Optional REPL history 53 | .node_repl_history 54 | 55 | # Output of 'npm pack' 56 | *.tgz 57 | 58 | # Yarn Integrity file 59 | .yarn-integrity 60 | 61 | # dotenv environment variables file 62 | .env 63 | 64 | # next.js build output 65 | .next -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/downfromthetrees/the_magic_eye/39143aa1fd8c8020096d4a376cd4f12d2721c004/.prettierignore -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": true, 3 | "singleQuote": true, 4 | "printWidth": 180, 5 | "tabWidth": 4 6 | } -------------------------------------------------------------------------------- /.tsconfig: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./build", 4 | "allowJs": true, 5 | "target": "es5" 6 | }, 7 | "include": [ 8 | "./src/**/*" 9 | ] 10 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Launch Program", 11 | "program": "${workspaceFolder}\\src\\server.js", 12 | "outFiles": [ 13 | "${workspaceFolder}/**/*.js" 14 | ] 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules\\typescript\\lib" 3 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 downfromthetrees 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: node --trace-warnings --expose-gc --optimize_for_size --max_old_space_size=460 --gc_interval=100 ./build/server.js -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Magic Eye 2 | 3 | Magic Eye is an image detection and moderation bot originally developed for r/hmmm. It is provided as a service using the account [u/MAGIC_EYE_BOT](https://www.reddit.com/user/MAGIC_EYE_BOT). 4 | 5 | Unlike other bots that purely detect image reposts, Magic Eye was developed to actively support moderators with complex and unique removal workflows. It also has several other general moderation features. 6 | 7 | Check out [r/MAGIC_EYE_BOT](https://www.reddit.com/r/MAGIC_EYE_BOT/) for support. 8 | 9 | --- 10 | 11 | 12 | 13 | - [Magic Eye](#magic-eye) 14 | - [Setup](#setup) 15 | - [Information](#information) 16 | - [General info](#general-info) 17 | - [Repost removal workflow](#repost-removal-workflow) 18 | - [Features and Configuration](#features-and-configuration) 19 | - [How does it work?](#how-does-it-work) 20 | - [Credits](#credits) 21 | 22 | 23 | 24 | ## Setup 25 | 26 | There is just one step to adding Magic Eye to your subreddit: 27 | 28 | --- 29 | 30 | - Invite [u/MAGIC_EYE_BOT](www.reddit.com/u/MAGIC_EYE_BOT) as a moderator with `flair`, `posts` and `wiki` permissions. 31 | 32 | --- 33 | 34 | Once you've invited Magic Eye as a mod it will: 35 | 36 | - Accept the invite 37 | - Build a database from the `/new` and `/top` posts in your subreddit (can take up to an hour) 38 | - Create a settings page in your wiki at `r/YOUR_SUB_NAME/wiki/magic_eye` 39 | - Send you a modmail to let you know it has finished initialising and is active 40 | 41 | _Please only add one subreddit at a time, and wait for it to complete._ 42 | 43 | ## Information 44 | 45 | ### General info 46 | 47 | - By default it will work on both images and animated media (videos/gifs) 48 | 49 | - If users reply to [u/MAGIC_EYE_BOT](https://www.reddit.com/user/MAGIC_EYE_BOT), by default it will report the comment so you can check it out. 50 | 51 | - Magic Eye has sensible default repost settings (a 15-50 day repost limit depending on karma of last post) so is safe to add to your subreddit without changing it further. 52 | 53 | - You can safely demod/remod Magic Eye at any time without affecting your database of images. 54 | 55 | - Magic Eye checks for new submissions roughly every minute, so avoid moderating posts that are very new if you want Magic Eye to process them first. It will ignore posts that have already been approved. 56 | 57 | - On rare occasions Magic Eye can misdetect images and when it does the images may not look anything like each other. It isn't a bug, Magic Eye just doesn't see the image like our eyes and brain do. If an image is cropped in specific ways it also may no longer match. It's a trade off, and you can tweak the tolerance in the settings to fit your subreddit. 58 | 59 | - You can reply to a removal message by [u/MAGIC_EYE_BOT](https://www.reddit.com/user/MAGIC_EYE_BOT) with `clear` and it'll remove the image from it's database. There's generally no need to do this, except perhaps for rare problematic images (they tend to have [lots of grey space](https://i.imgur.com/Avp2Y57.png)). 60 | 61 | - Because of memory limits on the server, images greater than 6000 pixels in height or width will be ignored 62 | 63 | ### Repost removal workflow 64 | 65 | Magic Eye is designed so it is easy to override old decisions or fix mistakes. 66 | 67 | - When a repost is detected, Magic Eye looks at the current state of the last submission of that image to figure out what to do. 68 | - If the last submission is approved/unmoderated, Magic Eye acts based on your repost settings. 69 | - If the last submission is removed by a moderator... 70 | - ...and it is blacklisted: Magic Eye will automatically remove it for you. 71 | - ...and it is not blacklisted: Magic Eye will ignore it and treat the repost as a new submission. 72 | 73 | Because of this, most subreddits will want to blacklist images. See [the blacklisting section](./docs/settings.md#remove-blacklisted-images-enabled-by-default) for how to do it automatically with Toolbox. 74 | 75 | ## Features and Configuration 76 | 77 | [**Find the configuration details for all Magic Eye settings here.**](./docs/settings.md) 78 | 79 | By default Magic Eye will: 80 | 81 | - Remove recent reposts (15-50 day repost period) 82 | - Remove blacklisted images (see [how to blacklist images](./docs/settings.md#remove-blacklisted-images-enabled-by-default)) 83 | - Remove broken image links 84 | 85 | If you find it has too many false positives, you can [lower the tolerance.](./docs/settings.md#set-the-tolerance) 86 | 87 | But it has several other moderation features, such as reporting images over a karma threshold or removing small images. 88 | 89 | ## How does it work? 90 | 91 | A technical explaination of the algorithm can be found [here](docs/image_detection.md). 92 | 93 | ## Credits 94 | 95 | - Magic Eye was created and is maintained by [u/CosmicKeys](https://www.reddit.com/u/CosmicKeys). 96 | - Thanks to [u/not_an_aardvark](https://www.reddit.com/u/not_an_aardvark) for his awesome [snoowrap](https://github.com/not-an-aardvark/snoowrap) project. 97 | - Thanks to [u/creesch](https://www.reddit.com/u/creesch), [u/agentlame](https://www.reddit.com/u/agentlame), and everyone who has contributed to [r/toolbox](https://www.reddit.com/r/toolbox). Alexis owes you a Lexus. 98 | -------------------------------------------------------------------------------- /docs/development.md: -------------------------------------------------------------------------------- 1 | 2 | # Local development 3 | 4 | Magic Eye is pretty simple to develop on and test locally. 5 | 6 | * Create a reddit account for your bot 7 | * While logged in as your bot go to https://www.reddit.com/prefs/apps 8 | * Create an app 9 | * Choose `script` 10 | * Enter this as the redirect uri: `https://not-an-aardvark.github.io/reddit-oauth-helper/` 11 | * Record the client id ([under the name](https://i.imgur.com/dcl8EY8.png)), and secret. You'll need them in a momment. 12 | ---- 13 | * Create an account on mlab.com and go through the steps to create a new database. 14 | * Create a new user for your database. 15 | * Grab the url (inserting relevant parts). It'll look like `mongodb://mycoolname:mycoolpassword@ds125293.mlab.com:25293/mycooldb` 16 | ---- 17 | * Create a copy of `env.template` and call it `.env` 18 | * Fill in the details of `.env` 19 | * `ACCOUNT_USERNAME`=username of bot you created above 20 | * `ALLOW_INVITES`=`allow` 21 | * `CLIENT_ID`=client id above 22 | * `CLIENT_SECRET`=client secret above 23 | * `DAYS_EXPIRY`=number of days before submission/user entries expire 24 | * `EXTERNAL_DATABASES`=A comma separated list of mongoDB database urls (one is fine, same one as `MONGODB_URI` is fine) 25 | * `LOG_LEVEL`=`info` (change to `debug` for more information) 26 | * `MONGODB_URI`= mongoDB url for master data 27 | * `NODE_ENV`=`develop` 28 | * `PASSWORD`= password of your bot account 29 | 30 | * Run `npm run install` to install dependencies, and `npm run start` to begin. 31 | * Follow the normal setup instructions in the README to add it to a subreddit. 32 | 33 | -------------------------------------------------------------------------------- /docs/hosting.md: -------------------------------------------------------------------------------- 1 | 2 | # Hosting documentation 3 | 4 | MAGIC_EYE_BOT is a node express server backed by mongoDB. It can be hosted on Heroku for free, and mLab is a good option for free mongo solutions. 5 | 6 | This was initially intended for an audience when the bot was "if you can set it up, you can use it", but likely is not needed anymore except for re-creating the hosting now that the bot has enough databases to host everyone. 7 | 8 | ## Things to know before setting it up 9 | 10 | * Heroku requires a credit card number for validation. [No charges](https://devcenter.heroku.com/articles/free-dyno-hours#free-dyno-hour-pool) can or will ever be made by setting up Magic Eye though. 11 | * You need a github account [create one here](http://github.com/) 12 | * If you're hosting it outside of Heroku, you must install ImageMagick 13 | 14 | ## Hosting setup 15 | 16 | * Login to github 17 | * Go to https://github.com/downfromthetrees/the_magic_eye 18 | * Hit the fork button 19 | ---- 20 | * Create a reddit account for your bot 21 | * While logged in as your bot go to https://www.reddit.com/prefs/apps 22 | * Create an app 23 | * Choose `script` 24 | * Enter this as the redirect uri: `https://not-an-aardvark.github.io/reddit-oauth-helper/` 25 | * Record the client id ([under the name](https://i.imgur.com/dcl8EY8.png)), and secret. You'll need them in a momment. 26 | ---- 27 | * Create a new account on https://www.heroku.com (pick node.js) 28 | * Create new app (give it a name like my-bot-name) 29 | * Add a credit card number: https://dashboard.heroku.com/account/billing 30 | * In **Settings** hit **Reveal Config Vars** and add the essential ones: 31 | * `ACCOUNT_USERNAME`=your bots username (no u/), example: `MyCoolBot` 32 | * `CLIENT_ID`=generated above 33 | * `CLIENT_SECRET`=generated above 34 | * `NODE_ENV`=`production` 35 | * `PASSWORD`=your bots password 36 | * `MONGODB_URI`=your database (will be auto-generated in next step) 37 | * `EXTERNAL_DATABASES`=your database (need to paste after next step autogenerates above. Or can be a comma separated list of them as you can create multiple in mlab - good for many subs, lots of data) 38 | * `DAYS_EXPIRY`=days until items expire (depends on how much data, but 360 is fine) 39 | * `LOG_LEVEL`=`info` 40 | ---- 41 | * Click the **Resources** tab and use the search bar to search and add these (free tier) add-ons: 42 | * mLab MongoDB 43 | * Create as many databases as needed - 500,000 submissions is = 100mb so one is plenty. MEB is hosted using 10 free teir databases. 44 | * Papertrail 45 | * New Relic APM 46 | * Click the **Deploy** tab and select **GitHub** under **Deployment Method** 47 | * Login to github 48 | * Search for the_magic_eye and connect 49 | * Under **Automatic deploys**, choose **Enable Automatic Deploys** 50 | * Deploy it for the first time by selecting **Deploy Branch** under **Manual Deploy** 51 | * Go back to **Resources**, click on Papertrail and you should see logs coming out that the bot is successfully initializing by processing old posts before it starts running normally. You can use Papetrail any time you want to see what it's up to (filter out the keepalive calls first). 52 | 53 | Your bot is now up and running. 54 | 55 | Heroku apps need interaction to keep running, so follow the last step below: 56 | 57 | ---- 58 | * Setup a ping to keep the app alive: 59 | * Click on **New Relic APM** in the list 60 | * Click the **Synthetics** tab 61 | * Create new monitor 62 | * Choose **Ping** (should be the default) 63 | * Set url to: `https://.herokuapp.com/keepalive` (open it in a browser to test it works) 64 | * Set the validation string to: `{"status":"ok"}` 65 | * Check one American location 66 | * Set the schedule to 1 minute 67 | 68 | 69 | 70 | ## Undocmented settings 71 | 72 | If you are hosting it yourself, you can also use Magic Eye to detect text in images. Only words over 3 characters long are detected. The required setting is: 73 | 74 | ` 75 | "removeImagesWithText": { 76 | "action":"remove", 77 | "message": "You need some skin? This skin I am in!", 78 | "blacklistedWords": ["skin", "ape"] 79 | }, 80 | ` 81 | 82 | action can be `warn` or `remove`. -------------------------------------------------------------------------------- /docs/image_detection.md: -------------------------------------------------------------------------------- 1 | # Image detection 2 | 3 | For each image a [hash](https://en.wikipedia.org/wiki/Hash_function) is created, i.e. the image is converted into a small string of characters like `B1C1C1236369C950`. Those hashes are then compared and if they are similar enough, it counts as a match. 4 | 5 | ## Hashing algorithm 6 | 7 | Here is the basic algorithm to create the hash: 8 | 9 | * The image is shrunk down to a 9x8 pixel image and colours are removed. It now looks like [this](https://i.imgur.com/8k2LTmw.png). 10 | 11 | * Each pixel is compared to the horizontally adjacent one, and given a value of 0 for "less bright" and 1 for "more bright" 12 | 13 | * This gives 64 bits of information. That is converted into a 16 character hex number (so 2 characters per row of pixels in the shrunken image). 14 | 15 | Transparent pixels are ignored. 16 | 17 | ## Hash comparison 18 | 19 | The difference between images is calculated by finding the Hamming distance between two hashes (how many characters are different between the two ids). For example if we have `FFC1C1236369C950` and `B1C1C1236469C950`, 3 characters are different, so the hamming distance is 3. 20 | 21 | This is what the 0-16 `tolerance` value is in the Magic Eye settings. When two hashes are compared, the "tolerance" is how many hex values of difference is allowed. 22 | 23 | What this means practically is that for the two strings above, the top of the image was different but the rest of it was extremely similar. Maybe for example a watermark has been added to the top of the image. 24 | 25 | This is a simple but considerably effective algorithm, since we are measuring gradients it is not affected by things like image jpegyness or colour alterations. On the other hand, it is affected by cropping. There are lots of things that could be done to alter the algorithm: you could add vertical hashing, or compare in comparison to the mean rather than adjacent pixels etc. But it works pretty well as is. 26 | 27 | This is why the Magic Eye documentation says that the bot doesn't "see" images as we do. Gradient comparisons work well, but when there is a misdetection it's not obvious because we focus on colours, image quality etc. rather than gradients. 28 | 29 | -------------------------------------------------------------------------------- /docs/settings.md: -------------------------------------------------------------------------------- 1 | # Magic Eye Configuration 2 | 3 | Like AutoModerator, Magic Eye is configured using a wiki page that stores subreddit settings: 4 | 5 | http://www.reddit.com/r/YOUR_SUB_NAME/wiki/magic_eye 6 | 7 | - The settings are in JSON format. It is a different format from what AutoModerator uses, but it is still human readable. 8 | 9 | - MAGIC_EYE_BOT will let you know if your updates are successful, or give you help if there is a formatting issue. It keeps the actual settings in it's own database so if you mess up your wiki page it's not a concern, the bot will just keep using the last valid settings you had. 10 | 11 | - NOTE: Because of the popularity of Magic Eye, it now takes 30 minutes to detect changes to the settings wiki page. Be patient and it will send you an notification eventually. To avoid waiting just to get a failure, please use a JSON validator on your settings before updating them: https://jsonlint.com/ 12 | 13 | - Magic Eye can't detect when you use the wiki page "revert" button. If you use it to revert to previous settings, just edit and save the wiki page (no changes needed) to get Magic Eye to pick up the change. 14 | 15 | --- 16 | 17 | 18 | 19 | - [Magic Eye Configuration](#magic-eye-configuration) 20 | - [Media types](#media-types) 21 | - [Set the tolerance](#set-the-tolerance) 22 | - [On user reply](#on-user-reply) 23 | - [Remove reposts](#remove-reposts) 24 | - [Remove blacklisted images](#remove-blacklisted-images) 25 | - [Remove broken image links](#remove-broken-image-links) 26 | - [Remove small images](#remove-small-images) 27 | - [Remove uncropped images](#remove-uncropped-images) 28 | - [Custom footer](#custom-footer) 29 | - [Report unmoderated posts](#report-unmoderated-posts) 30 | - [Removal message type](#removal-message-type) 31 | 32 | 33 | 34 | ## Media types 35 | 36 | "processImages": true, 37 | "processAnimatedMedia": true, 38 | 39 | Individually turn on/off processing of images or animated media (i.e. gifs/videos). Both are enabled by default. 40 | 41 | Note: NSFW animated media will only be processed in NSFW communities, as it relies on the un-censored thumbnail. 42 | 43 | Videos/gifs with a pure black/single colour starting frame will be ignored. 44 | 45 | ## Set the tolerance 46 | 47 | "similarityTolerance": 5, 48 | 49 | The tolerance to image differences. 50 | 51 | - Range is 1-16, where 1 matches exact as possible images and 16 matches every image 52 | - The default is 5, but if you're a subreddit that has issues with similar memes/tweets, experiment with lower numbers. Tolerances above 5 generally aren't recommended. 53 | 54 | ## On user reply 55 | 56 | "onUserReply": "reportBot", 57 | 58 | When a user replies to one of the bots removal messages, report it happening. 59 | 60 | Notes: 61 | 62 | - `onUserReply`: This can be one of: 63 | - `"reportBot"`: reports the bots removal comment with the users message in the report. 64 | - `"reportUser"`: reports the comment that replied to Magic Eye 65 | 66 | ## Remove reposts 67 | 68 | [Here is a handy image to help understand the threshold settings.](https://i.imgur.com/MmdfDci.png) 69 | 70 | "reposts": { 71 | "smallScore": 0, 72 | "smallScoreRepostDays": 15, 73 | "mediumScore": 400, 74 | "mediumScoreRepostDays": 25, 75 | "largeScore": 10000, 76 | "largeScoreRepostDays": 50, 77 | "topScore": 999999999, 78 | "approveIfOverRepostDays": false, 79 | "reflairApprovedReposts": false, 80 | "actionRepostsIfDeleted": false, 81 | "action": "remove" 82 | }, 83 | 84 | Optional fields: 85 | 86 | "reposts": { 87 | ... 88 | "removalMessage": "Bad luck buckaroo, this image is a repost!", 89 | "allTimeTopRemovalMessage": "Bad luck buckaroo, this image is an all time classic!", 90 | "fullRemovalMessage": "Hey {{author}}, I control this message. Here's my link: {{last_submission_link}}.\n\nAnd [here's the url]({{last_submission_url}}) posted {{time_ago}} by {{last_author}}", 91 | "sameAuthorRemovalMessage": "Hey {{author}}, Didn't you already post this? Sure you did, right here: {{last_submission_link}}.", 92 | "actionAll": false 93 | }, 94 | 95 | Notes: 96 | 97 | - `action`: This can be one of: 98 | - `"remove"`: removes the post and posts a message to the user 99 | - `"warn"`: reports the post and posts a removed comment in the thread with links 100 | - `"warnByModmail"`: sends a modmail with info about the repost 101 | - `"silent"`: remove the thread without a comment or report 102 | - You can override the first sentence with `removalMessage`/`allTimeTopRemovalMessage`, or the whole message with `fullRemovalMessage` and use the variables in the example as you like. `\n` for line break. Additionally you can use `sameAuthorRemovalMessage` to post a removal message custom for the situation when the same user has reposted the same image. 103 | - `actionRepostsIfDeleted`: Performs `action` on reposts even if the previous post was deleted. 104 | - `approveIfOverRepostDays`: Auto-approves a repost over the time limit to save you doing it 105 | - Score thresholds: Magic Eye keeps track of the last successful post of an image and uses the score it got + how long ago it was posted to determine what to do. There are a few thresholds so that it can make smarter decisions for reposts of popular vs less popular reposts. For example in the default settings: if the last matching submission got over `mediumScore` points (in this case 400), it'll be removed if it's less than `mediumScoreRepostDays` days old (in this case 25 days). 106 | - You can set `smallScore` higher than 0 and it will let anything through that got a score under that amount of points last time 107 | - If `topScore` is set lower it will remove any post that ever got over this threshold permanently, with a unique message saying it's an all time subreddit top post. 108 | - `actionAll`: As a shortcut, if instead of thresholds you just want to remove/warn about every repost detected regardless of time, add this field with the value `true` and it will override the threshold settings. 109 | - `reflairApprovedReposts`: Reflairs reposts with the same flair as the last one had 110 | 111 | Advanced: 112 | 113 | Sometimes Magic Eye will miss reposts, and you will have to remove them manually. But since Magic Eye ignores removed threads, we need a special solution to tell it to also remove future threads like it's a repost. 114 | 115 | Like blacklisting, you can do this by adding the special `[](#repost)` tag to the removal message. 116 | 117 | [See it in action here](https://www.reddit.com/r/hmmm/comments/a2sseh/hmmm/eb0vmwv/) (note the extended message). 118 | 119 | ## Remove blacklisted images 120 | 121 | "removeBlacklisted": {}, 122 | 123 | Images can be blacklisted by removing a thread and making a **distinguished** comment in it with this (NOTE to new reddit interface users: you must be in "Markdown mode", not "Fancy Pants Editor"): 124 | 125 | [](#start_removal) 126 | 127 | My cool removal reason. 128 | 129 | [](#end_removal) 130 | 131 | The `[](#thing)` tags are special empty links that are invisible to users when put in a comment. 132 | 133 | When Magic Eye sees the image again, it will look back at the blacklisted thread, retrieve the removal reason in between the tags and post it to the new user. [Here is an example of blacklisting in action](https://www.reddit.com/r/hmmm/comments/a2x5d0/hmmm/eb1tdf1/) in r/hmmm. 134 | 135 | Some suggested methods to add the tags: 136 | 137 | - Toolbox: 138 | - Get [Toolbox](http://www.reddit.com/r/toolbox), which is an awesome browser extension for reddit mods 139 | - In the [Toolbox configuration](https://i.imgur.com/NtNRP9t.png) add the tags around each individual removal, excluding the ones like "Please resubmit with a better title" where you want to allow reposting. 140 | - New reddit interface removal reasons (just add them to the removal reason) 141 | - RES Macros 142 | - Get [Reddit Enhancement Suite](https://redditenhancementsuite.com/) and use the macro feature 143 | - Just manually copy the tags in and write your removal in between them. 144 | - Watch out about the new reddit interface, it can mess with the formatting (just switch to markdown). 145 | 146 | Optional fields: 147 | 148 | "removeBlacklisted": { 149 | ..., 150 | "fullRemovalMessage": "I control this message buckaroo, here's my link: {{last_submission_link}}.\n\nAnd [here's the url]({{last_submission_url}}), and here's the blacklist reason: {{blacklist_reason}}", 151 | "action": "silent" 152 | }, 153 | 154 | Notes: 155 | 156 | - Blacklisting works by looking at the _current state_ of threads/comments every time, it doesn't store anything remotely. This means it's easy to override, modify old decisions etc. See the [repost removal workflow](../README.md#repost-removal-workflow) for more information. 157 | 158 | - You can customize the removal message with the `fullRemovalMessage` parameter, and variables shown in the example above will be substituted in. 159 | 160 | - To remove posts without a comment, set `"action"` to `"silent"`. 161 | 162 | ## Remove broken image links 163 | 164 | "removeBrokenImages": {}, 165 | 166 | Optional fields: 167 | 168 | "removeBrokenImages": { 169 | "fullRemovalMessage": "Hey buckaroo, your horse looks weary and broken. Resubmit a better link." 170 | }, 171 | 172 | If the image can't be downloaded, Magic Eye will remove it as broken and ask the user to fix the link. This is commonly when the user posts a link to a reddit image that's deleted. 173 | 174 | You can [see it in action here](https://www.reddit.com/r/hmmm/comments/ah3d4t/hmmm/eeb2x85/). 175 | 176 | ## Remove small images 177 | 178 | "removeSmallImages": { 179 | "widthMinimum": 450 180 | "heightMinimum": 600 181 | }, 182 | 183 | Optional fields: 184 | 185 | "removeSmallImages": { 186 | ... 187 | "fullRemovalMessage": "Hey buckaroo, that's a tiny little horse. Post a bigger one. The width needs to be bigger than {{widthMinimum}}, and the height bigger than {{heightMinimum}}", 188 | "smallDimension": 330 // legacy size*size setting 189 | }, 190 | 191 | Details: 192 | 193 | - There is a default message printed out if you don't add one 194 | - Size is in pixels 195 | - Does not work on animated media 196 | 197 | ## Remove uncropped images 198 | 199 | "removeUncroppedImages": { 200 | "removeHorizontal": false, 201 | "removeVertical": true 202 | }, 203 | 204 | Optional fields: 205 | 206 | "removeUncroppedImages": { 207 | "fullRemovalMessage": "Hey buckaroo, top and bottom gotta go." 208 | }, 209 | 210 | Removes images with [black bars](https://i.imgur.com/6a4SCcw.png) at the either the bottom and top typical of cellphone screenshots (vertical), and or the sides (horizontal). 211 | 212 | (Legacy: If no direction is mentioned, vertical only is assumed) 213 | 214 | ## Custom footer 215 | 216 | "customFooter": "[Read the damn rules](https://www.reddit.com/r/mrplow/wiki/rules) before replying to this bot or *so help me god...*", 217 | 218 | Replaces the default bot footer statement to a custom version. 219 | 220 | ## Report unmoderated posts 221 | 222 | "reportUnmoderated": { 223 | "reportUnmoderatedScore": 50 224 | }, 225 | 226 | Periodically looks at the top posts of the day and reports any post over a certain threshold that are not yet moderated. This can be helpful if you want to keep an eye on the big posts in a sub. 227 | 228 | Details: 229 | 230 | - `reportUnmoderatedScore`: karma threshold to report umoderated post 231 | 232 | ## Removal message type 233 | 234 | "removalMethod": "default", 235 | 236 | - `"default"`: (or the setting is absent): Reply in the thread 237 | - `"replyAsSubreddit"`: Reply on behalf of the subreddit, causing all replies to go to modmail (**requires** `mail` **permission**) 238 | -------------------------------------------------------------------------------- /eng.traineddata: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/downfromthetrees/the_magic_eye/39143aa1fd8c8020096d4a376cd4f12d2721c004/eng.traineddata -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "the_magic_eye", 3 | "version": "1.1.0", 4 | "description": "Magic eye is a bot for detecting image reposts in subreddits, as well as managing moderator tasks around reposted images.", 5 | "scripts": { 6 | "start": "node --expose-gc ./build/server.js", 7 | "dev": "ts-node ./src/server.ts", 8 | "clean": "shx rm -rf build", 9 | "build": "npm run clean && tsc", 10 | "compare": "ts-node ./src/scripts/hamming_compare.ts", 11 | "ping": "ts-node ./src/scripts/ping_mongo.ts" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/downfromthetrees/the_magic_eye.git" 16 | }, 17 | "author": "", 18 | "license": "ISC", 19 | "bugs": { 20 | "url": "https://github.com/downfromthetrees/the_magic_eye/issues" 21 | }, 22 | "homepage": "https://github.com/downfromthetrees/the_magic_eye#readme", 23 | "dependencies": { 24 | "@types/es6-promise": "^3.3.0", 25 | "@types/express": "^4.17.1", 26 | "@types/moment": "^2.13.0", 27 | "chalk": "^2.4.0", 28 | "dotenv": "^5.0.1", 29 | "express": "^4.16.3", 30 | "fetch": "^1.1.0", 31 | "fs-extra": "^5.0.0", 32 | "gm": "^1.23.1", 33 | "hamming": "0.0.2", 34 | "https": "^1.0.0", 35 | "image-downloader": "^3.3.0", 36 | "image-size": "^0.6.3", 37 | "imagemagick": "^0.1.3", 38 | "indent-string": "^3.2.0", 39 | "javascript-time-ago": "^2.0.1", 40 | "jsonschema": "^1.2.4", 41 | "loglevel": "^1.6.1", 42 | "moment": "^2.22.1", 43 | "mongodb": "^3.3.3", 44 | "node-fetch": "^2.2.0", 45 | "object-sizeof": "^1.5.2", 46 | "outdent": "^0.5.0", 47 | "pg": "^7.4.1", 48 | "phash-imagemagick": "^1.0.1", 49 | "png-js": "^0.1.1", 50 | "request": "^2.85.0", 51 | "shelljs": "^0.8.1", 52 | "shx": "^0.2.2", 53 | "snoowrap": "^1.15.2", 54 | "stream-to-array": "^2.3.0", 55 | "stripchar": "^1.2.1", 56 | "tesseract.js": "^1.0.19", 57 | "tslint": "^5.20.1" 58 | }, 59 | "devDependencies": { 60 | "@types/mongodb": "^3.3.3", 61 | "@types/node": "^12.7.5", 62 | "ts-node": "^8.4.1", 63 | "typescript": "^3.6.3" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 |
6 | 7 | -------------------------------------------------------------------------------- /src/client/index.jsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | import * as ReactDOM from 'react-dom'; 3 | import './style.css'; 4 | 5 | const App = () => { 6 | return
The Magic Eye... watches
7 | } 8 | ReactDOM.render( 9 | , 10 | document.querySelector('#root') 11 | ); -------------------------------------------------------------------------------- /src/client/style.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/downfromthetrees/the_magic_eye/39143aa1fd8c8020096d4a376cd4f12d2721c004/src/client/style.css -------------------------------------------------------------------------------- /src/database.ts: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | require('dotenv').config(); 3 | const chalk = require('chalk'); 4 | const MongoClient = require('mongodb').MongoClient; 5 | const hammingDistance = require('hamming'); 6 | const log = require('loglevel'); 7 | const sizeof = require('object-sizeof'); 8 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 9 | 10 | import { getSubmissionCollection, getCacheName, MagicSubmission } from './database_manager'; 11 | 12 | export class MagicDatabase { 13 | subredditName; 14 | connection; 15 | dhash_cache; 16 | dhash_cache_updated = false; 17 | 18 | constructor(subredditName, connection, dhash_cache) { 19 | this.subredditName = subredditName; 20 | this.connection = connection; 21 | this.dhash_cache = dhash_cache; 22 | } 23 | 24 | async saveMagicSubmission(submission, addToCache) { 25 | if (submission._id == null) { 26 | throw new Error('Cannot create magic submission with null _id'); 27 | } 28 | 29 | submission.createdAt = new Date(); // reset expiry date 30 | try { 31 | const collection = await getSubmissionCollection(this); 32 | await collection.save(submission); 33 | if (addToCache) { 34 | this.dhash_cache.push(submission._id); 35 | this.dhash_cache_updated = true; 36 | } 37 | } catch (err) { 38 | log.error(chalk.red('MongoDb error saving magic submission:'), submission, err); 39 | } 40 | } 41 | 42 | async closeDatabase() { 43 | // flushes new items to disk 44 | if (this.dhash_cache_updated) { 45 | const startTime = new Date().getTime(); 46 | fs.writeFileSync(getCacheName(this.subredditName), JSON.stringify(this.dhash_cache), (err) => { 47 | if (err) throw err; 48 | log.error(chalk.red('Failed to write to cache disk for:'), this.subredditName, ' error: ', err); 49 | }); 50 | const endTime = new Date().getTime(); 51 | log.debug( 52 | chalk.green('[FILE_WRITE] Database cache wrote from disk, took: '), 53 | (endTime - startTime) / 1000, 54 | 's to load ', 55 | this.dhash_cache.length, 56 | 'entries for ', 57 | this.subredditName 58 | ); 59 | this.dhash_cache = null; 60 | this.connection = null; 61 | } 62 | } 63 | 64 | async getMagicSubmission(inputDHash, similarityTolerance) { 65 | let hammingThreshold = 5; 66 | if (!isNaN(similarityTolerance)) { 67 | hammingThreshold = similarityTolerance == 0 ? 1 : similarityTolerance; 68 | } 69 | 70 | function isMatch(cachedHashKey) { 71 | return hammingDistance(cachedHashKey, inputDHash) < hammingThreshold; 72 | } 73 | const canonicalHashKey = this.dhash_cache.find(isMatch); 74 | 75 | if (canonicalHashKey == undefined) { 76 | // No cache hit for hashKey 77 | return null; 78 | } 79 | 80 | log.debug(chalk.blue('Cached hamming match, hamming distance is: ', hammingDistance(canonicalHashKey, inputDHash))); 81 | 82 | try { 83 | const collection = await getSubmissionCollection(this); 84 | const magicSubmission = await collection.findOne({ _id: canonicalHashKey }); 85 | chalk.yellow('hashKey:', canonicalHashKey, 'value:', JSON.stringify(magicSubmission)); 86 | chalk.yellow(magicSubmission); 87 | 88 | if (magicSubmission.exactMatchOnly == true && magicSubmission.dhash != inputDHash) { 89 | log.debug('cache hit, but ignoring because exactMatchOnly is set for image'); 90 | return null; 91 | } 92 | 93 | return magicSubmission; 94 | } catch (err) { 95 | log.error(chalk.red('MongoDb error getting magic submission:'), canonicalHashKey, err); 96 | return null; 97 | } 98 | } 99 | 100 | async getMagicSubmissionById(submission_id): Promise { 101 | try { 102 | const collection = await getSubmissionCollection(this); 103 | return await collection.findOne({ reddit_id: submission_id }); 104 | } catch (err) { 105 | log.error(chalk.red('MongoDb error getting submission by id:'), submission_id, err); 106 | return undefined; 107 | } 108 | } 109 | 110 | async deleteMagicSubmission(submission) { 111 | try { 112 | const collection = await getSubmissionCollection(this); 113 | await collection.remove({ _id: submission._id }); 114 | 115 | const index = this.dhash_cache.indexOf(submission._id); 116 | if (index > -1) { 117 | this.dhash_cache.splice(index, 1); 118 | this.dhash_cache_updated = true; 119 | } 120 | } catch (err) { 121 | log.error(chalk.red('MongoDb error deleting magic submission:'), submission, err); 122 | } 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/database_manager.ts: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | require('dotenv').config(); 3 | const chalk = require('chalk'); 4 | const MongoClient = require('mongodb').MongoClient; 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | import { getMasterProperty } from './master_database_manager'; 9 | import { MagicDatabase } from './database'; 10 | 11 | interface LocalCacheInfo { 12 | dhash_cache_exists: boolean; 13 | } 14 | 15 | interface LocalCacheInfoList { 16 | [name: string]: LocalCacheInfo; 17 | } 18 | 19 | const localCacheInfoList: LocalCacheInfoList = {}; 20 | 21 | interface ConnectionCache { 22 | [url: string]: any; 23 | } 24 | 25 | const connectionList: ConnectionCache = {}; 26 | 27 | export class MagicProperty { 28 | _id; 29 | value; 30 | 31 | constructor(name, value) { 32 | this._id = name; 33 | this.value = value; 34 | } 35 | } 36 | 37 | export class MagicSubmission { 38 | _id; // dhash of the original 39 | createdAt; // automatic expiry indicator 40 | reddit_id; // the last reddit id that matched the dhash (dhash within hamming distance) 41 | author; 42 | duplicates; // array of reddit ids, includes removed and approved posts 43 | exactMatchOnly; // boolean value 44 | highest_score; // number 45 | type; // 'image' or 'animated' 46 | 47 | constructor(dhash, redditSubmission, highestScore, submissionType) { 48 | this._id = dhash; 49 | this.createdAt = new Date(); 50 | this.reddit_id = redditSubmission.id; 51 | this.duplicates = [redditSubmission.id]; 52 | this.exactMatchOnly = null; 53 | this.highest_score = highestScore; 54 | this.type = submissionType; 55 | this.author = redditSubmission.author.name; 56 | } 57 | } 58 | 59 | export async function updateMagicSubmission(magicSubmission, redditSubmission) { 60 | magicSubmission.reddit_id = await redditSubmission.id; 61 | magicSubmission.author = await redditSubmission.author.name; 62 | } 63 | 64 | export function getCollectionName(collection, subredditName) { 65 | const collectionPrefix = (process.env.NODE_ENV == 'production' ? '' : process.env.NODE_ENV + ':') + subredditName + ':'; 66 | return collectionPrefix + collection; 67 | } 68 | 69 | export async function getSubmissionCollection(database) { 70 | return database.connection.collection(getCollectionName('submissions', database.subredditName)); 71 | } 72 | 73 | export async function getPropertyCollection(database) { 74 | return database.connection.collection(getCollectionName('properties', database.subredditName)); 75 | } 76 | 77 | function setLocalDatabaseCache(name: string, dhash_cache: any) { 78 | localCacheInfoList[name] = { dhash_cache_exists: true }; 79 | fs.writeFileSync(getCacheName(name), JSON.stringify(dhash_cache), err => { 80 | if (err) { 81 | log.error(chalk.red('Failed to write to cache disk for:'), name, ' error: ', err); 82 | localCacheInfoList[name].dhash_cache_exists = false; 83 | } 84 | }); 85 | } 86 | 87 | function getLocalDatabaseCache(name: string): string[] | undefined { 88 | if (!localCacheInfoList[name]) { 89 | return undefined; 90 | } 91 | 92 | if (!localCacheInfoList[name].dhash_cache_exists) { 93 | return undefined; 94 | } 95 | 96 | log.debug(chalk.red('Local database cache exists')); 97 | 98 | try { 99 | const startTime = new Date().getTime(); 100 | const dhash_cache = JSON.parse(fs.readFileSync(getCacheName(name))); 101 | const endTime = new Date().getTime(); 102 | log.debug(chalk.green('[FILE_LOAD] Database cache loaded from disk, took: '), (endTime - startTime) / 1000, 's to load ', dhash_cache.length, 'entries for ', name); 103 | 104 | return dhash_cache; 105 | } catch (err) { 106 | log.error(chalk.red('ERROR: Could not get local database cache for: '), name, ', error: ', err); 107 | return undefined; 108 | } 109 | } 110 | 111 | export function getCacheName(subredditName) { 112 | return `./tmp/${subredditName}-hash_cache.json`; 113 | } 114 | 115 | export async function initDatabase(name, connectionUrl, expiry?: number | undefined) { 116 | if (!connectionList[connectionUrl]) { 117 | log.debug(chalk.blue('Connecting to database...', name, '-', connectionUrl)); 118 | try { 119 | const client = await MongoClient.connect(connectionUrl, { useNewUrlParser: true, connectTimeoutMS: 5000 }); 120 | connectionList[connectionUrl] = await client.db(); 121 | 122 | if (!connectionList[connectionUrl]) { 123 | log.error(chalk.red('ERROR: Could not access connection for: '), name); 124 | return null; 125 | } 126 | 127 | log.debug(chalk.red('Finished connecting to: '), name); 128 | } catch (err) { 129 | log.info(chalk.red('********* Fatal MongoDb connection error for ********* : '), name, err, connectionUrl); 130 | return null; 131 | } 132 | } 133 | 134 | const expiryDays = expiry ? expiry : parseInt(process.env.DAYS_EXPIRY, 10); 135 | const finalExpirySeconds = 60 * 60 * 24 * expiryDays; 136 | log.debug(chalk.blue('EXPIRYDAYS '), expiryDays); 137 | 138 | const connection = connectionList[connectionUrl]; 139 | log.debug(chalk.blue('Loading database cache for '), name); 140 | const startTime = new Date().getTime(); 141 | 142 | let dhash_cache = getLocalDatabaseCache(name); 143 | 144 | if (!dhash_cache) { 145 | log.debug(chalk.blue('Connecting to database to get dhashes...', name, '-', connectionUrl)); 146 | try { 147 | const submissionCollection = await connection.collection(getCollectionName('submissions', name)); 148 | submissionCollection.ensureIndex({ createdAt: 1 }, { expireAfterSeconds: finalExpirySeconds }); 149 | submissionCollection.ensureIndex({ reddit_id: 1 }, { background: true }); 150 | 151 | dhash_cache = await submissionCollection 152 | .find() 153 | .project({ _id: 1 }) 154 | .map(x => x._id) 155 | .toArray(); 156 | 157 | setLocalDatabaseCache(name, dhash_cache); 158 | } catch (err) { 159 | log.info(chalk.red('Fatal MongoDb error access hashes for: '), name, err); 160 | return null; 161 | } 162 | } 163 | const endTime = new Date().getTime(); 164 | 165 | log.debug(chalk.green('[cacheload] Database cache loaded, took: '), (endTime - startTime) / 1000, 's to load ', dhash_cache.length, 'entries for ', name); 166 | 167 | return new MagicDatabase(name, connection, dhash_cache); 168 | } 169 | 170 | export function databaseConnectionListSize() { 171 | return Object.keys(connectionList).length; 172 | } 173 | -------------------------------------------------------------------------------- /src/dhash_gen.ts: -------------------------------------------------------------------------------- 1 | const gm = require('gm').subClass({ 2 | imageMagick: true 3 | }); 4 | const PNG = require('png-js'); 5 | const toArray = require('stream-to-array'); 6 | const DEFAULT_HASH_SIZE = 8; 7 | const PIXEL_LENGTH = 4; 8 | 9 | function px(pixels: any, width: any, x: any, y: any) { 10 | return pixels[width * PIXEL_LENGTH * y + x * PIXEL_LENGTH]; 11 | } 12 | 13 | function binaryToHex(s: any) { 14 | let output = ''; 15 | for (let i = 0; i < s.length; i += 4) { 16 | const bytes = s.substr(i, 4); 17 | const decimal = parseInt(bytes, 2); 18 | const hex = decimal.toString(16); 19 | output += hex.toUpperCase(); 20 | } 21 | return output; 22 | } 23 | 24 | export function dhash_gen(path: any, callback: any, hashSize: any) { 25 | const height = hashSize || DEFAULT_HASH_SIZE; 26 | const width = height + 1; // Covert to small gray image 27 | gm(path).colorspace('GRAY').resize(width, height, '!').stream('png', (err: any, stream: any) => { 28 | if (err) { 29 | if (callback) { 30 | callback(err); 31 | } 32 | } else { // Get pixel data 33 | toArray(stream, (toArrayErr: any, arr: any) => { 34 | if (toArrayErr) { 35 | if (callback) { 36 | callback(toArrayErr); 37 | } 38 | } else { 39 | try { 40 | const png = new PNG(Buffer.concat(arr)); 41 | png.decode((pixels: any) => { // Compare adjacent pixels. 42 | let difference = ''; 43 | for (let row = 0; row < height; row++) { 44 | for (let col = 0; col < height; col++) { // height is not a mistake here... 45 | const left = px(pixels, width, col, row); 46 | const right = px(pixels, width, col + 1, row); 47 | difference += left < right ? 1 : 0; 48 | } 49 | } // Convert difference to hex string 50 | if (callback) { 51 | callback(false, binaryToHex(difference)); 52 | } 53 | }); 54 | } catch (pngErr) { 55 | return callback && callback(pngErr); 56 | } 57 | } 58 | }); 59 | } 60 | }); 61 | }; -------------------------------------------------------------------------------- /src/first_time_init.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const log = require('loglevel'); 3 | const outdent = require('outdent'); 4 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 5 | 6 | import { processSubmission } from './submission_processor'; 7 | import { setSubredditSettings, getMasterProperty, setMasterProperty } from './master_database_manager'; 8 | import { printSubmission } from './reddit_utils'; 9 | 10 | let inProgress = Array(); 11 | 12 | export async function firstTimeInit(reddit, subredditName, database, masterSettings, suppressFirstTimeInitModmail = false) { 13 | const subreddit = await reddit.getSubreddit(subredditName); 14 | 15 | log.info(chalk.blue(`[${subredditName}]`, 'Beginning first time initialisation for', subredditName, '. Retrieving top posts...')); 16 | if (!isInitialising(subredditName)) { 17 | inProgress.push(subredditName); 18 | } 19 | 20 | const startTime = new Date().getTime(); 21 | 22 | try { 23 | const postAmount = 1000; // reddits current limit 24 | const alreadyProcessed = []; 25 | 26 | const topSubmissionsAll = await subreddit.getTop({time: 'all'}).fetchAll({amount: postAmount}); 27 | await processOldSubmissions(topSubmissionsAll, alreadyProcessed, 'all time top', subredditName, database, masterSettings); 28 | const topSubmissionsYear = await subreddit.getTop({time: 'year'}).fetchAll({amount: postAmount}); 29 | await processOldSubmissions(topSubmissionsYear, alreadyProcessed, 'year top', subredditName, database, masterSettings); 30 | const topSubmissionsMonth = await subreddit.getTop({time: 'month'}).fetchAll({amount: postAmount}); 31 | await processOldSubmissions(topSubmissionsMonth, alreadyProcessed, 'month top', subredditName, database, masterSettings); 32 | const topSubmissionsWeek = await subreddit.getTop({time: 'week'}).fetchAll({amount: postAmount}); 33 | await processOldSubmissions(topSubmissionsWeek, alreadyProcessed, 'week top', subredditName, database, masterSettings); 34 | const newSubmissions = await subreddit.getNew().fetchAll({amount: postAmount}); 35 | await processOldSubmissions(newSubmissions, alreadyProcessed, 'new', subredditName, database, masterSettings); 36 | } catch (e) { 37 | log.error(chalk.red('Error first time initialising subreddit:'), subredditName, e); 38 | inProgress = inProgress.filter(item => item !== subredditName); 39 | return; 40 | } 41 | 42 | inProgress = inProgress.filter(item => item !== subredditName); 43 | 44 | const endTime = new Date().getTime(); 45 | const totalTimeMinutes = Math.floor(((endTime - startTime) / 1000) / 60); 46 | log.info(`[${subredditName}]`, chalk.blue('Top and new posts successfully processed for', subredditName, '. Took: '), totalTimeMinutes, 'minutes'); 47 | 48 | masterSettings.config.firstTimeInit = true; 49 | await setSubredditSettings(subredditName, masterSettings); 50 | log.info(`[${subredditName}]`, chalk.blue('Master settings for ', subredditName, ' set. Init is complete at this point.')); 51 | if (!masterSettings.config.suppressFirstTimeInitModmail || !suppressFirstTimeInitModmail) { 52 | log.info(`[${subredditName}]`, 'Sending initialisation complete modmail message...'); 53 | await reddit.composeMessage({ 54 | to: await `/r/${subredditName}`, 55 | subject: `Initialisation complete.`, 56 | text: outdent` 57 | Hi all, I am a repost moderation bot and I'm now checking new posts made in your subreddit. 58 | 59 | These are the current settings for your subreddit: 60 | 61 | * Remove recent image/animated media reposts 62 | * Remove [images you choose to blacklist](https://github.com/downfromthetrees/the_magic_eye/blob/master/README.md#remove-blacklisted-images) 63 | * Remove broken image links 64 | 65 | Like AutoModerator I have a wiki page where you can edit settings. Here is a link to your settings page: r/${subredditName}/wiki/magic_eye 66 | 67 | You can learn all about me at r/MAGIC_EYE_BOT or see the full documentation below: 68 | 69 | https://github.com/downfromthetrees/the_magic_eye/blob/master/README.md` 70 | }); 71 | log.info(`[${subredditName}]`, chalk.blue('Success modmail sent and init set true for', subredditName)); 72 | } 73 | 74 | log.info(`[${subredditName}]`, 'Sending maintainer update...'); 75 | await reddit.composeMessage({ 76 | to: process.env.MAINTAINER, 77 | subject: "First time init complete", 78 | text: `First time init complete for: r/${subreddit.display_name}\n\n Took ${totalTimeMinutes} minutes.` 79 | }); 80 | 81 | await database.closeDatabase(); 82 | log.info(`[${subredditName}]`, 'First time init finalised successfully.'); 83 | } 84 | 85 | export async function processOldSubmissions(submissions, alreadyProcessed, name, subredditName, database, masterSettings) { 86 | const submissionsToProcess = submissions.filter(submission => !alreadyProcessed.includes(submission.id)); 87 | log.info(`[${subredditName}]`, 'Retrived', submissions.length, name, 'posts for', subredditName, ',', submissionsToProcess.length, ' are new posts.'); 88 | let processedCount = 0; 89 | 90 | let startTime = new Date().getTime(); 91 | for (const submission of submissionsToProcess) { 92 | let knownPoisonedIds = await getMasterProperty('known_poisoned_ids'); 93 | if (!knownPoisonedIds) { 94 | knownPoisonedIds = []; 95 | await setMasterProperty('known_poisoned_ids', knownPoisonedIds); 96 | } 97 | try { 98 | if (!knownPoisonedIds.includes(submission.id)) { 99 | knownPoisonedIds.push(submission.id); 100 | await setMasterProperty('known_poisoned_ids', knownPoisonedIds); 101 | await processSubmission(submission, masterSettings, database, null, false); 102 | 103 | var submissionIndex = knownPoisonedIds.indexOf(submission.id); 104 | if (submissionIndex > -1) { 105 | knownPoisonedIds.splice(submissionIndex, 1); 106 | } 107 | await setMasterProperty('known_poisoned_ids', knownPoisonedIds); 108 | } else { 109 | log.info(`[${subredditName}][first_time_init]`, 'Skipping poison submission:', await printSubmission(submission)); 110 | } 111 | } catch (e) { 112 | log.info(`[${subredditName}][first_time_init]`, 'Error thrown while processing:', await printSubmission(submission), e); 113 | } 114 | processedCount++; 115 | if (processedCount % 30 == 0) { 116 | log.info(`[${subredditName}]`, processedCount, '/', submissionsToProcess.length, name, 'posts for', subredditName, 'completed'); 117 | } 118 | alreadyProcessed.push(submission.id); 119 | } 120 | let endTime = new Date().getTime(); 121 | log.info(`[${subredditName}]`, chalk.blue('Processed', processedCount, name, ' submissions for ', subredditName),' Took: ', (endTime - startTime) / 1000, 's.'); 122 | } 123 | 124 | export function isInitialising(subredditName) { 125 | return inProgress.includes(subredditName); 126 | } 127 | 128 | export function isAnythingInitialising() { 129 | return inProgress.length > 0; 130 | } 131 | -------------------------------------------------------------------------------- /src/image_utils.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const { promisify } = require('util'); 3 | const fs = require('fs'); 4 | const imageDownloader = require('image-downloader'); 5 | const imageMagick = require('imagemagick'); 6 | const tesseract = require('tesseract.js'); 7 | const stripchar = require('stripchar').StripChar; 8 | const fetch = require('node-fetch'); 9 | const imageSize = require('image-size'); 10 | 11 | import { dhash_gen } from './dhash_gen'; 12 | const dhashGet = promisify(dhash_gen); 13 | import { getCommonWords } from './common_words'; 14 | const commonWords = getCommonWords(); 15 | 16 | import { logDetectText } from './master_stats'; 17 | 18 | require('dotenv').config(); 19 | const log = require('loglevel'); 20 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 21 | 22 | export async function generateDHash(imagePath, logUrl) { 23 | try { 24 | return await dhashGet(imagePath); 25 | } catch (e) { 26 | log.warn('Could not generate dhash for: ', logUrl, ', ', e); 27 | return null; 28 | } 29 | } 30 | 31 | export async function downloadImage(submissionUrl) { 32 | const options = { 33 | url: submissionUrl, 34 | dest: './tmp', 35 | headers: { 'User-Agent': 'MAGIC_EYE_BOT' }, 36 | }; 37 | 38 | try { 39 | const { filename, image } = await imageDownloader.image(options); 40 | return filename; 41 | } catch (err) { 42 | log.warn("Error: Couldn't download image (probably deleted): ", submissionUrl); 43 | return null; 44 | } 45 | } 46 | 47 | export function deleteImage(imagePath) { 48 | fs.unlink(imagePath, (e) => { 49 | if (e) { 50 | log.error(chalk.red('Failed to delete file: '), imagePath, e); 51 | } 52 | }); 53 | } 54 | 55 | export async function getImageUrl(submission) { 56 | let imageUrl = await submission.url; 57 | const thumbnail = await submission.thumbnail; 58 | if (imageUrl.endsWith('/')) { 59 | imageUrl = imageUrl.slice(0, imageUrl.length - 1); 60 | } 61 | 62 | if (imageUrl.includes('catbox')) return null; 63 | 64 | const suffix = imageUrl.split('.')[imageUrl.split('.').length - 1].split('?')[0]; // http://imgur.com/a/liD3a.gif?horrible=true 65 | const images = ['png', 'jpg', 'jpeg', 'bmp']; 66 | if (images.includes(suffix)) { 67 | return { imageUrl: imageUrl, submissionType: 'image' }; 68 | } 69 | 70 | const isVid = await submission.is_video; 71 | const crossPostParent = await submission.crosspost_parent_list; 72 | const isCrosspostVid = crossPostParent && crossPostParent[0] && crossPostParent[0].is_video; 73 | 74 | const isGfycat = imageUrl.includes('gfycat.com') || imageUrl.includes('redgifs.com'); 75 | const animatedMedia = ['gif', 'gifv', 'mp4', 'webm']; 76 | if (animatedMedia.includes(suffix) || isVid || isGfycat || isCrosspostVid) { 77 | return animatedMediaUrl(thumbnail); 78 | } 79 | 80 | const isImgur = imageUrl.includes('imgur.com'); 81 | if (isImgur) { 82 | // cases: 83 | // http://i.imgur.com/f7VXJQF - single image 84 | // http://imgur.com/mLkJuXP/ - single image, different url formatting 85 | // https://imgur.com/a/9RKPOtA - album, single image 86 | // http://imgur.com/a/liD3a - album, multiple images 87 | // http://imgur.com/gallery/HFoOCeg gallery, single image 88 | // https://imgur.com/gallery/5l71D gallery, multiple images (album) 89 | 90 | // An alternative method for imgur gifs/videos is to use "_d.jpg?maxwidth=520&shape=thumb&fidelity=high", however to keep them consistent with 91 | // giphy etc, magic eye will use the reddit thumbnail 92 | 93 | let imgurHash = imageUrl.split('/')[imageUrl.split('/').length - 1]; // http://imgur.com/S1dZBPm.weird?horrible=true 94 | imgurHash = imgurHash.split('.')[0]; 95 | imgurHash = imgurHash.split('?')[0]; 96 | const imgurClientId = '1317612995a5ccf'; 97 | const options = { 98 | headers: { 99 | Authorization: `Client-ID ${imgurClientId}`, 100 | }, 101 | }; 102 | 103 | const isAlbum = imageUrl.includes('imgur.com/a/'); 104 | const isGallery = imageUrl.includes('imgur.com/gallery/'); 105 | if (isGallery || isAlbum) { 106 | const albumFetchUrl = isGallery ? `https://api.imgur.com/3/gallery/album/${imgurHash}/images` : `https://api.imgur.com/3/album/${imgurHash}/images`; 107 | const albumResult = await fetch(albumFetchUrl, options); // gallery album 108 | const albumData = await albumResult.json(); 109 | if (albumData.success && albumData.data && albumData.data[0]) { 110 | // gallery with multiple images 111 | if (albumData.data[0].animated) { 112 | return animatedMediaUrl(thumbnail); 113 | } 114 | return { imageUrl: albumData.data[0].link, submissionType: 'image' }; 115 | } else if (albumData.success && albumData.data && albumData.data.images && albumData.data.images[0]) { 116 | // Not sure if case is valid - log for testing 117 | log.warn('Abnormal gallery url for processing: ', imageUrl); 118 | return null; 119 | } else { 120 | // gallery but only one image 121 | const albumImageFetchUrl = `https://api.imgur.com/3/gallery/image/${imgurHash}`; 122 | const imageResult = await fetch(albumImageFetchUrl, options); 123 | const albumImage = await imageResult.json(); 124 | if (albumImage.success && albumImage.data) { 125 | if (albumImage.data.animated) { 126 | return animatedMediaUrl(thumbnail); 127 | } 128 | 129 | return { imageUrl: albumImage.data.link, submissionType: 'image' }; 130 | } else { 131 | log.warn('Tried to parse this imgur album/gallery url but failed: ', imageUrl); 132 | return null; 133 | } 134 | } 135 | } else { 136 | // single image 137 | const result = await fetch(`https://api.imgur.com/3/image/${imgurHash}`, options); 138 | const singleImage = await result.json(); 139 | if (singleImage.success && singleImage.data) { 140 | if (singleImage.data.animated) { 141 | return animatedMediaUrl(thumbnail); 142 | } 143 | 144 | return { imageUrl: singleImage.data.link, submissionType: 'image' }; 145 | } else { 146 | log.warn('Tried to parse this imgur url but failed: ', imageUrl); 147 | return null; 148 | } 149 | } 150 | } 151 | 152 | return null; 153 | } 154 | 155 | function animatedMediaUrl(thumbnail) { 156 | return thumbnail === 'default' ? null : { imageUrl: thumbnail, submissionType: 'animated' }; 157 | } 158 | 159 | export async function getImageDetails(submissionUrl, includeWords, blacklistedWords?): Promise { 160 | const imagePath = await downloadImage(submissionUrl); 161 | if (imagePath == null) { 162 | return null; 163 | } 164 | 165 | if (getFilesizeInMegaBytes(imagePath) > 15) { 166 | log.error('Image was too large - ignoring. (is it a renamed gif?) ', submissionUrl); 167 | return { tooLarge: true }; 168 | } 169 | 170 | const imageDetails = { dhash: null, height: null, width: null, trimmedHeight: null, trimmedWidth: null, words: null, tooLarge: false, ignore: false }; 171 | 172 | const imageSize = await getImageSize(imagePath, submissionUrl); 173 | if (imageSize != null) { 174 | if (imageSize.height > 6000 || imageSize.width > 6000) { 175 | return { tooLarge: true }; 176 | } 177 | 178 | imageDetails.height = imageSize.height; 179 | imageDetails.width = imageSize.width; 180 | } else { 181 | log.error('Failed to generate size for ', submissionUrl); 182 | return { ignore: true, tooLarge: false }; 183 | } 184 | 185 | imageDetails.dhash = await generateDHash(imagePath, submissionUrl); 186 | 187 | if (isSolidColor(imageDetails.dhash)) { 188 | log.info('Rejecting solid colour dhash:', imageDetails.dhash); 189 | return { ignore: true, tooLarge: false }; 190 | } 191 | 192 | if (imageDetails.dhash == null) { 193 | return null; // must generate a dhash to be valid details 194 | } 195 | 196 | imageDetails.words = includeWords ? await getWordsInImage(imagePath, imageSize.height, blacklistedWords) : []; 197 | 198 | try { 199 | const trimmedPath = imagePath + '_trimmed'; 200 | await promisify(imageMagick.convert)([imagePath, '-trim', trimmedPath]); 201 | const trimmedImageSize = await getImageSize(trimmedPath, submissionUrl); 202 | if (trimmedImageSize != null) { 203 | imageDetails.trimmedHeight = trimmedImageSize.height; 204 | imageDetails.trimmedWidth = trimmedImageSize.width; 205 | } else { 206 | log.error('Failed to generate trimmed size for ', submissionUrl); 207 | } 208 | deleteImage(trimmedPath); 209 | } catch (e) { 210 | log.error(chalk.red('Could not trim submission:'), submissionUrl, ' - imagemagick error: ', e); 211 | } 212 | 213 | deleteImage(imagePath); 214 | return imageDetails; 215 | } 216 | 217 | function isSolidColor(dhash) { 218 | // for some reason dhash_gen will produce the second hash for white. 219 | return dhash === '0000000000000000' || dhash === '5500000000000000'; 220 | } 221 | 222 | async function getImageSize(path, submissionUrl) { 223 | try { 224 | return imageSize(path); 225 | } catch (e) { 226 | log.error(chalk.red('Could not get imageSize for submission:'), submissionUrl, e); 227 | return null; 228 | } 229 | } 230 | 231 | function getFilesizeInMegaBytes(filename) { 232 | const stats = fs.statSync(filename); 233 | const fileSizeInBytes = stats.size; 234 | return fileSizeInBytes / 1000000.0; 235 | } 236 | 237 | async function getWordsInImage(originalImagePath, height, blacklistedWords) { 238 | try { 239 | // resize it first, issues with large images 240 | let imagePath = originalImagePath; 241 | const resizeImageFirst = height > 500; 242 | if (resizeImageFirst) { 243 | imagePath = originalImagePath + '-reduced'; 244 | await promisify(imageMagick.convert)([originalImagePath, '-resize', '500', imagePath]); // maintains dimensions over exact size 245 | } 246 | 247 | const startTime = new Date().getTime(); 248 | let result; 249 | log.debug(chalk.blue('Begin text detection in image:', imagePath)); 250 | await tesseract.recognize(imagePath).then((data) => (result = data)); 251 | const detectedStrings = result.words.map((word) => stripchar.RSExceptUnsAlpNum(word.text.toLowerCase())); 252 | const detectedWords = detectedStrings.filter((item) => item.length > 3 && (blacklistedWords ? blacklistedWords.includes(item) : commonWords.has(item))); 253 | log.debug(chalk.blue('Text detected in image:'), detectedWords, 'blacklisted:', blacklistedWords); 254 | const endTime = new Date().getTime(); 255 | const timeTaken = (endTime - startTime) / 1000; 256 | logDetectText(timeTaken); 257 | if (timeTaken > 20) { 258 | log.info(chalk.red('End text detection, took: '), timeTaken, 's to load '); 259 | } 260 | 261 | if (resizeImageFirst) { 262 | await deleteImage(imagePath); 263 | } 264 | 265 | return detectedWords; 266 | } catch (e) { 267 | log.error(chalk.red('Text detection error:'), e); 268 | } 269 | return []; 270 | } 271 | -------------------------------------------------------------------------------- /src/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/downfromthetrees/the_magic_eye/39143aa1fd8c8020096d4a376cd4f12d2721c004/src/img/favicon.ico -------------------------------------------------------------------------------- /src/inbox_message_processor.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye modules 9 | import { getImageDetails, getImageUrl } from './image_utils'; 10 | import { sliceSubmissionId } from './reddit_utils'; 11 | import { updateModdedSubreddits } from './modded_subreddits'; 12 | 13 | export async function processInboxMessage(inboxMessage, reddit, database, messageSubreddit, masterSettings) { 14 | const subredditName = messageSubreddit ? messageSubreddit.display_name : null; 15 | const subreddit = messageSubreddit ? await reddit.getSubreddit(subredditName) : null; 16 | 17 | if (inboxMessage.author && inboxMessage.author.name === process.env.ACCOUNT_USERNAME) { 18 | log.warn('Ignoring message from self...', inboxMessage.id); 19 | return; 20 | } 21 | 22 | if (inboxMessage.author && inboxMessage.author.name === 'AutoModerator') { 23 | return; 24 | } 25 | 26 | if (inboxMessage.was_comment) { 27 | const moderators = await subreddit.getModerators(); 28 | const isMod = moderators.find((moderator) => moderator.name === inboxMessage.author.name || inboxMessage.author.name === 'CosmicKeys'); 29 | 30 | if (isMod) { 31 | await processModComment(subredditName, inboxMessage, reddit, database, masterSettings); 32 | } else { 33 | await processUserComment(subredditName, inboxMessage, reddit, masterSettings); 34 | } 35 | } else { 36 | await processUserPrivateMessage(inboxMessage, subreddit, reddit); 37 | } 38 | } 39 | 40 | async function processModComment(subredditName, inboxMessage, reddit, database, masterSettings) { 41 | if (inboxMessage.subject == 'username mention') { 42 | log.info(`[${subredditName}]`, 'Username mention:', inboxMessage.id); 43 | return; 44 | } 45 | 46 | if (inboxMessage.subject == 'username mention') { 47 | log.info(`[${subredditName}]`, 'Username mention:', inboxMessage.id); 48 | return; 49 | } 50 | 51 | // moderator commands 52 | switch (inboxMessage.body.toLowerCase().trim()) { 53 | case 'help': 54 | printHelp(inboxMessage); 55 | break; 56 | case 'clear': 57 | runCommand(inboxMessage, reddit, database, masterSettings, command_clearSubmission); 58 | break; 59 | case 'wrong': 60 | runCommand(inboxMessage, reddit, database, masterSettings, command_removeDuplicate); 61 | break; 62 | case 'avoid': 63 | runCommand(inboxMessage, reddit, database, masterSettings, command_setExactMatchOnly); 64 | break; 65 | default: 66 | await inboxMessage.reply('Not sure what that command is. Try `help` to see the commands I support.').distinguish(); 67 | break; 68 | } 69 | } 70 | 71 | async function processUserComment(subredditName, inboxMessage, reddit, masterSettings) { 72 | if (inboxMessage.subject == 'username mention') { 73 | log.info(`[${subredditName}]`, 'Username mention:', inboxMessage.id); 74 | return; 75 | } 76 | 77 | if (masterSettings.settings.onUserReply && masterSettings.settings.onUserReply === 'reportBot') { 78 | const botComment = await getBotComment(reddit, inboxMessage); 79 | // await botComment.report({'reason': 'User comment - "' + inboxMessage.body.substring(0, 83) + '"'}); // 100 char limit 80 | await botComment.report({ reason: 'Moderator requested - click context for details' }); 81 | } else { 82 | inboxMessage.report({ reason: 'Moderator requested' }); 83 | } 84 | 85 | log.info(`[${subredditName}]`, 'User requesting assistance:', inboxMessage.id); 86 | } 87 | 88 | async function getBotComment(reddit, inboxMessage) { 89 | const comment = reddit.getComment(inboxMessage.id); 90 | const submission = reddit.getSubmission(sliceSubmissionId(await comment.link_id)); 91 | const comments = await submission.comments; 92 | return comments.find((comment) => comment.distinguished === 'moderator' && comment.removed != true && comment.author.name === process.env.ACCOUNT_USERNAME); 93 | } 94 | 95 | async function processUserPrivateMessage(inboxMessage, subreddit, reddit) { 96 | updateModdedSubreddits(); 97 | if (inboxMessage.subject.includes('invitation to moderate')) { 98 | try { 99 | if (process.env.ALLOW_INVITES) { 100 | log.info(`[${await subreddit.display_name}]`, 'Accepting mod invite for: ', await subreddit.display_name); 101 | await subreddit.acceptModeratorInvite(); 102 | if (process.env.MAINTAINER) { 103 | reddit.composeMessage({ 104 | to: process.env.MAINTAINER, 105 | subject: 'New subreddit added', 106 | text: `I have been modded to: r/${subreddit.display_name}`, 107 | }); 108 | } 109 | } else { 110 | log.warn('User attempted mod invite for: ', await subreddit.display_name, ', but ALLOW_INVITES is not set.'); 111 | } 112 | } catch (e) { 113 | log.error(`[${await subreddit.display_name}]`, 'Error accepting mod invite: ', inboxMessage.id, e); 114 | } 115 | return; 116 | } else if (inboxMessage.subject.includes('Has Been Removed As A Moderator')) { 117 | // likely never hit - bad case 118 | log.info('Removed as moderator from subreddit: ', inboxMessage.subject); 119 | return; 120 | } 121 | 122 | if ((await inboxMessage.distinguished) !== 'moderator') { 123 | // don't spam modmail 124 | inboxMessage.reply('I am a robot so I cannot answer your message. Contact the moderators of the subreddit for information.'); 125 | log.info('Processed inbox private message with standard reply:', inboxMessage.id); 126 | } else { 127 | log.info('Processed inbox private message - ignored mod thread:', inboxMessage.id); 128 | } 129 | } 130 | 131 | async function printHelp(inboxMessage) { 132 | const helpMessage = outdent` 133 | Here are the commands I support as replies in a thread (root submission is the one linked, current submission is from this thread): 134 | 135 | * \`wrong\`: Removes the current submission as a duplicate of the root. (future feature wanted here so that the two submissions won't match again.) 136 | * \`avoid\`: Only match identical images with the root the future. Helps with root images that keep matching wrong (commonly because they are dark). 137 | * \`clear\`: Removes all the information I have about the root submission that it the current submission was matched with. For when it doesn't really matter and you want the root to go away. 138 | `; 139 | await inboxMessage.reply(helpMessage).distinguish(); 140 | } 141 | 142 | async function runCommand(inboxMessage, reddit, database, masterSettings, commandFunction) { 143 | const comment = await reddit.getComment(inboxMessage.id); 144 | await comment.fetch(); 145 | const submission = await reddit.getSubmission(sliceSubmissionId(await comment.link_id)); 146 | await submission.fetch(); 147 | 148 | const imageUrlInfo = await getImageUrl(submission); 149 | if (!imageUrlInfo) { 150 | log.warn('Could not download submission to run inbox mod command in submission:', submission.id); 151 | inboxMessage.reply("I couldn't do that that... image is deleted, has default thumbnail, or something has gone wrong.").distinguish(); 152 | return false; 153 | } 154 | 155 | const { imageUrl, submissionType } = imageUrlInfo; 156 | 157 | const imageDetails = await getImageDetails(imageUrl, false); 158 | if (imageDetails == null || imageDetails.ignore) { 159 | log.warn('Could not download image for clear (probably deleted), imageDetails: ', imageDetails, ', link: ', +(await submission.permalink)); 160 | inboxMessage.reply("I couldn't do that that... image is deleted, has default thumbnail, or something has gone wrong.").distinguish(); 161 | return false; 162 | } 163 | 164 | if (!masterSettings) { 165 | log.warn('Master settings not provided when attempting to run inbox mod command, https://www.reddit.com', await submission.permalink); 166 | return false; 167 | } 168 | 169 | const existingMagicSubmission = await database.getMagicSubmission(imageDetails.dhash, masterSettings.settings.similarityTolerance); 170 | if (existingMagicSubmission == null) { 171 | log.info('No magic submission found for clear, ignoring. dhash: ', await submission._id); 172 | inboxMessage.reply('No info for this found, so consider it already gone.').distinguish(); 173 | return true; // already cleared 174 | } 175 | 176 | const success = await commandFunction(submission, existingMagicSubmission, database); 177 | inboxMessage.reply(success ? 'Thanks, all done.' : "I couldn't do that that... image deleted or something?").distinguish(); 178 | } 179 | 180 | async function command_clearSubmission(submission, existingMagicSubmission, database) { 181 | log.info(chalk.yellow('Clearing magic submission by: '), await submission.author.name, ', submitted: ', new Date((await submission.created_utc) * 1000)); 182 | await database.deleteMagicSubmission(existingMagicSubmission); 183 | return true; 184 | } 185 | 186 | async function command_removeDuplicate(submission, existingMagicSubmission, database) { 187 | log.info(chalk.yellow('Starting process for remove duplicate by: '), await submission.author.name, ', submitted: ', new Date((await submission.created_utc) * 1000)); 188 | const duplicateIndex = existingMagicSubmission.duplicates.indexOf(await submission.id); 189 | existingMagicSubmission.duplicates.splice(duplicateIndex, 1); 190 | await database.saveMagicSubmission(existingMagicSubmission); 191 | return true; 192 | } 193 | 194 | async function command_setExactMatchOnly(submission, existingMagicSubmission, database) { 195 | log.info(chalk.yellow('Setting exact match only for submission by: '), await submission.author.name, ', submitted: ', new Date((await submission.created_utc) * 1000)); 196 | existingMagicSubmission.exactMatchOnly = true; 197 | await database.saveMagicSubmission(existingMagicSubmission); 198 | return true; 199 | } 200 | -------------------------------------------------------------------------------- /src/inbox_processor.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | require('dotenv').config(); 3 | const log = require('loglevel'); 4 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 5 | import { reddit } from './reddit'; 6 | import { getSubredditSettings } from './master_database_manager'; 7 | import { initDatabase } from './database_manager'; 8 | import { processInboxMessage } from './inbox_message_processor'; 9 | 10 | export async function mainInboxProcessor() { 11 | const timeoutTimeSeconds = 60; 12 | try { 13 | log.debug(chalk.blue("Starting inbox processing cycle")); 14 | await doInboxProcessing(); 15 | } catch (err) { 16 | log.error(chalk.red("Inbox loop error: ", err)); 17 | } 18 | 19 | setTimeout(mainInboxProcessor, timeoutTimeSeconds * 1000); // run again in timeoutTimeSeconds 20 | } 21 | 22 | 23 | export async function doInboxProcessing() { 24 | // inbox 25 | const startInboxTime = new Date().getTime(); 26 | try { 27 | const unreadMessages = await reddit.getUnreadMessages(); 28 | if (!unreadMessages) { 29 | log.error(chalk.red('Cannot get new inbox items to process - api is probably down for maintenance.')); 30 | return; 31 | } 32 | if (unreadMessages.length > 0) { 33 | await reddit.markMessagesAsRead(unreadMessages); 34 | } 35 | for (let message of unreadMessages) { 36 | const messageSubreddit = await message.subreddit; 37 | let database = null; 38 | let masterSettings = null; 39 | if (messageSubreddit) { 40 | const messageSubredditName = await messageSubreddit.display_name; 41 | masterSettings = await getSubredditSettings(messageSubredditName); 42 | if (masterSettings) { 43 | database = await initDatabase(messageSubredditName, masterSettings.config.databaseUrl, masterSettings.config.expiryDays); 44 | } 45 | } 46 | await processInboxMessage(message, reddit, database, messageSubreddit, masterSettings); 47 | if (database) { 48 | await database.closeDatabase(); 49 | } 50 | } 51 | const endInboxTime = new Date().getTime(); 52 | const getTimeTaken = (endInboxTime - startInboxTime) / 1000; 53 | if (unreadMessages.length > 0) { 54 | log.info(chalk.blue('========= Processed', unreadMessages.length, ' new inbox messages, took: ', getTimeTaken)); 55 | } 56 | } catch (err) { 57 | log.error(chalk.red("Failed to process inbox: ", err)); 58 | } 59 | } -------------------------------------------------------------------------------- /src/jobs/keep_alive.ts: -------------------------------------------------------------------------------- 1 | import {get} from 'https'; 2 | 3 | get('https://the-magic-eye.herokuapp.com/keepalive', (resp: any) => { 4 | let data = ''; 5 | // A chunk of data has been recieved. 6 | resp.on('data', (chunk: any) => { 7 | data += chunk; 8 | }); 9 | // The whole response has been received. Print out the result. 10 | resp.on('end', () => { 11 | console.log('keep alive status:', JSON.parse(data).status); 12 | }); 13 | }).on("error", (err: any) => { 14 | console.log("Error: " + err.message); 15 | }); -------------------------------------------------------------------------------- /src/master_database_manager.ts: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const chalk = require('chalk'); 3 | const MongoClient = require('mongodb').MongoClient; 4 | const log = require('loglevel'); 5 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 6 | 7 | let masterConnection = null; 8 | 9 | let subredditSettingsCache: CachedSubredditSettings = {}; 10 | 11 | type CachedSubredditSettings = { 12 | [name: string]: string; 13 | }; 14 | 15 | export class MasterProperty { 16 | _id; 17 | value; 18 | 19 | constructor(name, value) { 20 | this._id = name; 21 | this.value = value; 22 | } 23 | } 24 | 25 | export class Stats { 26 | subredditName; 27 | action; 28 | timeTaken; 29 | date; 30 | 31 | constructor(subredditName, action, timeTaken, date) { 32 | this.subredditName = subredditName; 33 | this.action = action; 34 | this.timeTaken = timeTaken; 35 | this.date = date; 36 | } 37 | } 38 | 39 | const currentVersion = '2'; 40 | 41 | // default mod editable settings 42 | export class SubredditSettings { 43 | _id; // subreddit name 44 | config; // private config settings 45 | settings; // default settings 46 | version; 47 | 48 | constructor(subredditName) { 49 | this.version = currentVersion; 50 | this._id = subredditName; 51 | 52 | this.config = { 53 | firstTimeInit: false, 54 | databaseUrl: null, 55 | reportUnmoderatedTime: 0, 56 | }; 57 | 58 | this.settings = { 59 | processImages: true, 60 | processAnimatedMedia: true, 61 | similarityTolerance: 5, 62 | onUserReply: 'reportBot', 63 | reposts: { 64 | smallScore: 0, 65 | smallScoreRepostDays: 15, 66 | mediumScore: 400, 67 | mediumScoreRepostDays: 25, 68 | largeScore: 10000, 69 | largeScoreRepostDays: 50, 70 | topScore: 999999999, 71 | approveIfOverRepostDays: false, 72 | reflairApprovedReposts: false, 73 | actionRepostsIfDeleted: false, 74 | action: 'remove', 75 | }, 76 | removeBlacklisted: {}, 77 | removeBrokenImages: {}, 78 | }; 79 | } 80 | } 81 | 82 | export function needsUpgrade(masterSettings) { 83 | return masterSettings.version != currentVersion; 84 | } 85 | 86 | export function getCollectionName(collection) { 87 | const collectionPrefix = process.env.NODE_ENV == 'production' ? '' : process.env.NODE_ENV + ':'; 88 | return collectionPrefix + collection; 89 | } 90 | 91 | export async function getSubredditSettingsCollection() { 92 | return masterConnection.collection(getCollectionName('subreddit-settings')); 93 | } 94 | 95 | export async function getPropertyCollection() { 96 | return masterConnection.collection(getCollectionName('properties')); 97 | } 98 | 99 | export async function getStatsCollection() { 100 | return masterConnection.collection(getCollectionName('stats')); 101 | } 102 | 103 | export async function addSubredditStat(statistic) { 104 | try { 105 | const collection = await getStatsCollection(); 106 | await collection.save(statistic); 107 | } catch (err) { 108 | log.error(chalk.red('MongoDb error adding subreddit statistic (full database?):'), err); 109 | return null; 110 | } 111 | } 112 | 113 | export async function getSubredditStat(actionName) { 114 | try { 115 | const collection = await getStatsCollection(); 116 | return await collection.find({ action: actionName }).toArray(); 117 | } catch (err) { 118 | log.error(chalk.red('MongoDb error getting subreddit statistic:'), err); 119 | return null; 120 | } 121 | } 122 | 123 | export async function setSubredditSettings(subredditName, settings) { 124 | try { 125 | const collection = await getSubredditSettingsCollection(); 126 | await collection.save(settings); 127 | subredditSettingsCache[subredditName] = settings; 128 | } catch (err) { 129 | log.error(chalk.red('MongoDb error setting sub settings:'), subredditName, err); 130 | return null; 131 | } 132 | } 133 | 134 | export async function getSubredditSettings(subredditName) { 135 | try { 136 | if (subredditSettingsCache[subredditName]) { 137 | return subredditSettingsCache[subredditName]; 138 | } 139 | const collection = await getSubredditSettingsCollection(); 140 | const property = await collection.findOne({ _id: subredditName }); 141 | if (property != null) { 142 | subredditSettingsCache[subredditName] = property; 143 | return property; 144 | } 145 | } catch (err) { 146 | log.error(chalk.red('MongoDb error getting sub settings:'), subredditName, err); 147 | } 148 | return null; 149 | } 150 | 151 | export async function setMasterProperty(key, value) { 152 | try { 153 | const collection = await getPropertyCollection(); 154 | const newMasterProp = new MasterProperty(key, value); 155 | await collection.save(newMasterProp); 156 | } catch (err) { 157 | log.error(chalk.red('MongoDb error set property:'), key, err); 158 | return null; 159 | } 160 | } 161 | 162 | export async function getMasterProperty(key) { 163 | try { 164 | const collection = await getPropertyCollection(); 165 | const property = await collection.findOne({ _id: key }); 166 | if (property != null) { 167 | return property.value; 168 | } 169 | } catch (err) { 170 | log.error(chalk.red('MongoDb error getting master property:'), err); 171 | } 172 | return null; 173 | } 174 | 175 | export async function initMasterDatabase() { 176 | log.info(chalk.blue('Connecting to master database...')); 177 | try { 178 | const client = await MongoClient.connect(process.env.MONGODB_URI, { useNewUrlParser: true }); 179 | masterConnection = await client.db(); 180 | } catch (err) { 181 | log.error(chalk.red('Fatal MongoDb connection error for master database:'), err); 182 | return null; 183 | } 184 | return true; 185 | } 186 | 187 | export async function refreshAvailableDatabases() { 188 | try { 189 | if (!process.env.EXTERNAL_DATABASES) return; 190 | 191 | const masterDatabaseUrls = process.env.EXTERNAL_DATABASES.split(','); 192 | let databaseList = await getMasterProperty('databases'); 193 | if (!databaseList) { 194 | log.info('First time external database config...'); 195 | databaseList = {}; 196 | } 197 | 198 | for (const masterDatabaseUrl of masterDatabaseUrls) { 199 | if (!databaseList[masterDatabaseUrl]) { 200 | log.info('Adding new database url: ', masterDatabaseUrl); 201 | databaseList[masterDatabaseUrl] = { 202 | url: masterDatabaseUrl, 203 | count: 0, 204 | }; 205 | await setMasterProperty('databases', databaseList); 206 | } 207 | } 208 | } catch (err) { 209 | log.error(chalk.red('Error: could not refresh database list'), err); 210 | return null; 211 | } 212 | } 213 | 214 | export async function upgradeMasterDatabase() { 215 | try { 216 | log.info(`[UPGRADE]`, 'START UPGRADING MASTER DATABASE'); 217 | const collection = await getSubredditSettingsCollection(); 218 | const subredditSettings = await collection.find().toArray(); 219 | for (const masterSettings of subredditSettings) { 220 | if (needsUpgrade(masterSettings)) { 221 | const secondsRun = Math.floor(Math.random() * Math.floor(500)); 222 | const doForSub = async () => { 223 | masterSettings.version = currentVersion; // bump the version 224 | // do upgrade here 225 | log.info(`[UPGRADE]`, 'Upgrading', masterSettings._id, ' - updated:'); 226 | await setSubredditSettings(masterSettings._id, masterSettings); 227 | }; 228 | setTimeout(doForSub, secondsRun * 1000); 229 | } else { 230 | log.info(`[UPGRADE]`, 'NO UPGRADE REQUIRED', masterSettings._id, ' - :'); 231 | } 232 | } 233 | } catch (err) { 234 | log.info(`[UPGRADE]: ERROR: `, err); 235 | } 236 | } 237 | -------------------------------------------------------------------------------- /src/master_stats.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const log = require('loglevel'); 3 | const outdent = require('outdent'); 4 | const moment = require('moment'); 5 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 6 | 7 | import { Stats, addSubredditStat, getSubredditStat } from './master_database_manager'; 8 | 9 | export async function logActionRepost(subredditName, timeTaken) { 10 | if (!process.env.LOG_STATS) 11 | return; 12 | 13 | const statistic = new Stats(subredditName, 'action-repost', timeTaken, getDateString()); 14 | await addSubredditStat(statistic); 15 | } 16 | 17 | export async function logActionBlacklisted(subredditName, timeTaken) { 18 | if (!process.env.LOG_STATS) 19 | return; 20 | 21 | const statistic = new Stats(subredditName, 'action-blacklisted', timeTaken, getDateString()); 22 | await addSubredditStat(statistic); 23 | } 24 | 25 | export async function logApproval(subredditName, timeTaken) { 26 | if (!process.env.LOG_STATS) 27 | return; 28 | 29 | const statistic = new Stats(subredditName, 'approve', timeTaken, getDateString()); 30 | await addSubredditStat(statistic); 31 | } 32 | 33 | export async function logRepostDetected(subredditName) { 34 | if (!process.env.LOG_STATS) 35 | return; 36 | 37 | const statistic = new Stats(subredditName, 'repost-detected', null, getDateString()); 38 | await addSubredditStat(statistic); 39 | } 40 | 41 | export async function logDetectText(timeTaken) { 42 | if (!process.env.LOG_STATS) 43 | return; 44 | 45 | const statistic = new Stats('global', 'detect-text', timeTaken, getDateString()); 46 | await addSubredditStat(statistic); 47 | } 48 | 49 | export async function logRemoveBroken(subredditName, timeTaken) { 50 | if (!process.env.LOG_STATS) 51 | return; 52 | 53 | const statistic = new Stats(subredditName, 'action-broken', timeTaken, getDateString()); 54 | await addSubredditStat(statistic); 55 | } 56 | 57 | export async function logRemoveUncropped(subredditName, timeTaken) { 58 | if (!process.env.LOG_STATS) 59 | return; 60 | 61 | const statistic = new Stats(subredditName, 'action-uncropped', timeTaken, getDateString()); 62 | await addSubredditStat(statistic); 63 | } 64 | 65 | export async function logRemoveText(subredditName, timeTaken) { 66 | if (!process.env.LOG_STATS) 67 | return; 68 | 69 | const statistic = new Stats(subredditName, 'action-text', timeTaken, getDateString()); 70 | await addSubredditStat(statistic); 71 | } 72 | 73 | export async function logRemoveSmall(subredditName, timeTaken) { 74 | if (!process.env.LOG_STATS) 75 | return; 76 | 77 | const statistic = new Stats(subredditName, 'action-small', timeTaken, getDateString()); 78 | await addSubredditStat(statistic); 79 | } 80 | 81 | export async function logProcessPost(subredditName, timeTaken) { 82 | if (!process.env.LOG_STATS) 83 | return; 84 | 85 | const statistic = new Stats(subredditName, 'process-post', timeTaken, getDateString()); 86 | await addSubredditStat(statistic); 87 | } 88 | 89 | export async function logProcessCycle(timeTaken) { 90 | if (!process.env.LOG_STATS) 91 | return; 92 | 93 | const statistic = new Stats('global', 'process-cycle', timeTaken, getDateString()); 94 | await addSubredditStat(statistic); 95 | } 96 | 97 | 98 | export function getDateString(){ 99 | const date = new Date(); 100 | const day = date.getDate(); 101 | const month = date.getMonth() + 1; 102 | const year = date.getFullYear(); 103 | return year + '-' + month + '-' + day; 104 | } 105 | 106 | export async function printStats() { 107 | log.info('Retrieving stats...'); 108 | 109 | const startDate = moment('25/04/2019', 'DD/MM/YYYY'); 110 | const endDate = moment(); 111 | const daysSince = endDate.diff(startDate, 'days'); 112 | 113 | try { 114 | const repostsDetected = await getSubredditStat('repost-detected'); 115 | const reposts = await getSubredditStat('action-repost'); 116 | const small = await getSubredditStat('action-small'); 117 | const text = await getSubredditStat('action-text'); 118 | const uncropped = await getSubredditStat('action-uncropped'); 119 | const broken = await getSubredditStat('action-broken'); 120 | const approve = await getSubredditStat('approve'); 121 | const blacklisted = await getSubredditStat('action-blacklisted'); 122 | const detectText = await getSubredditStat('detect-text'); 123 | const averageTextDetect = detectText.reduce((prev, curr) => ({timeTaken: prev.timeTaken + curr.timeTaken})); 124 | const processPost = await getSubredditStat('process-post'); 125 | const averageProcessPost = processPost.reduce((prev, curr) => ({timeTaken: prev.timeTaken + curr.timeTaken})); 126 | const processCycle = await getSubredditStat('process-cycle'); 127 | const averageProcessCycle = processCycle.reduce((prev, curr) => ({timeTaken: prev.timeTaken + curr.timeTaken})); 128 | log.info('==========================='); 129 | log.info(' STATS'); 130 | log.info('==========================='); 131 | log.info('Reposts: '); 132 | log.info(`* Detected: ${repostsDetected.length} (${repostsDetected.length/daysSince} per day)`); 133 | log.info(' '); 134 | log.info('Removals for: '); 135 | log.info(`* Reposts: ${reposts.length} (${Math.ceil(reposts.length/daysSince)} per day)`); 136 | log.info(`* Small: ${small.length} (${Math.ceil(small.length/daysSince)} per day)`); 137 | log.info(`* Text: ${text.length} (${Math.ceil(text.length/daysSince)} per day)`); 138 | log.info(`* Uncropped: ${uncropped.length} (${Math.ceil(uncropped.length/daysSince)} per day)`); 139 | log.info(`* Broken: ${broken.length} (${Math.ceil(broken.length/daysSince)} per day)`); 140 | log.info(`* Approved: ${approve.length} (${Math.ceil(approve.length/daysSince)} per day)`); 141 | log.info(`* Blacklisted: ${ blacklisted.length} (${Math.ceil(blacklisted.length/daysSince)} per day)`); 142 | log.info(' '); 143 | log.info('Average time to:'); 144 | if (detectText.length) { 145 | log.info('* Detect text: ', (averageTextDetect.timeTaken / detectText.length).toFixed(1)); 146 | } 147 | if (processPost.length) { 148 | log.info('* Process post: ', (averageProcessPost.timeTaken / processPost.length).toFixed(1)); 149 | } 150 | if (processCycle.length) { 151 | log.info('* Process cycle: ', (averageProcessCycle.timeTaken / processCycle.length).toFixed(1)); 152 | } 153 | log.info('==========================='); 154 | } catch (e) { 155 | log.error("Error printing stats", e); 156 | } 157 | 158 | } 159 | 160 | -------------------------------------------------------------------------------- /src/modded_subreddits.ts: -------------------------------------------------------------------------------- 1 | import { reddit } from './reddit'; 2 | 3 | // standard server modules 4 | const chalk = require('chalk'); 5 | require('dotenv').config(); 6 | const log = require('loglevel'); 7 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 8 | 9 | let moddedSubsCache = null; 10 | 11 | // returns concat string for multi, "meow_irl+hmmm+aww" 12 | export async function getModdedSubredditsMulti() { 13 | if (moddedSubsCache) { 14 | return moddedSubsCache; 15 | } 16 | 17 | console.log('Refreshing modded subreddits'); 18 | moddedSubsCache = await getModdedSubredditsRecursive(reddit, null); 19 | console.log('Modded subreddits loaded successfully, subreddits:', moddedSubsCache.length); 20 | return moddedSubsCache; 21 | } 22 | 23 | async function getModdedSubredditsRecursive(reddit, after) { 24 | try { 25 | const moddedSubsUrl = '/subreddits/mine/moderator.json' + (after ? `?after=${after}` : ''); 26 | const moddedSubsData = await reddit.oauthRequest({ uri: moddedSubsUrl, method: 'get' }); 27 | 28 | if (!moddedSubsData) { 29 | log.error(chalk.red('Could not request modded subreddits from reddit')); 30 | return []; 31 | } 32 | 33 | if (moddedSubsData.length == 0) { 34 | return []; 35 | } 36 | 37 | let moddedSubs = moddedSubsData.map(moddedSub => moddedSub.display_name); 38 | if (moddedSubs.length == 25) { 39 | // pagination, get more 40 | const newAfter = moddedSubsData[moddedSubsData.length - 1].name; 41 | return moddedSubs.concat(await getModdedSubredditsRecursive(reddit, newAfter)); 42 | } else { 43 | return moddedSubs; 44 | } 45 | } catch (e) { 46 | log.error(chalk.red('Error accessing modded subreddits'), e); 47 | return []; 48 | } 49 | } 50 | 51 | export function updateModdedSubreddits() { 52 | moddedSubsCache = null; 53 | } 54 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/existing_submission/allowRepostOnlyByUser.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const chalk = require('chalk'); 4 | const log = require('loglevel'); 5 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 6 | 7 | // magic eye modules 8 | import { printSubmission, isRepostOnlyByUserRemoval } from '../../../../reddit_utils'; 9 | import { MagicSubmission, updateMagicSubmission } from '../../../../database_manager'; 10 | 11 | //===================================== 12 | 13 | export async function allowRepostsOnlyByUser(reddit, modComment, submission, lastSubmission, existingMagicSubmission: MagicSubmission, subSettings, subredditName, submissionType) { 14 | if (!subSettings.removeBlacklisted) { // rely on blacklisted instead 15 | return true; 16 | } 17 | 18 | const lastIsRepostOnlyByUser = await isRepostOnlyByUserRemoval(modComment); // mod has told them to resubmit an altered/cropped version 19 | const lastSubmissionDeleted = await lastSubmission.author.name == '[deleted]'; 20 | const sameUserForBothSubmissions = lastSubmissionDeleted || await lastSubmission.author.name == await submission.author.name; 21 | 22 | if (lastIsRepostOnlyByUser && sameUserForBothSubmissions) { 23 | log.info(`[${subredditName}]`, 'Found matching hash for submission', await printSubmission(submission, submissionType), ', but ignoring as special user only repost of submission: http://redd.it/', existingMagicSubmission.reddit_id); 24 | await updateMagicSubmission(existingMagicSubmission, submission); 25 | return false; 26 | } 27 | 28 | return true; 29 | } 30 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/existing_submission/removeBlacklisted.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye modules 9 | import { isRepostRemoval, removePost, printSubmission } from '../../../../reddit_utils'; 10 | import { logActionBlacklisted } from '../../../../master_stats'; 11 | import { updateMagicSubmission } from '../../../../database_manager'; 12 | 13 | //===================================== 14 | 15 | export async function removeBlacklisted(reddit, modComment, submission, lastSubmission, existingMagicSubmission, subSettings, subredditName, submissionType) { 16 | if (!subSettings.removeBlacklisted) { 17 | return true; 18 | } 19 | 20 | // We missed detecting a valid repost so a mod manually removed it. That submission is reposted but we don't know the approved submission. 21 | const lastIsRemovedAsRepost = await isRepostRemoval(modComment); 22 | 23 | const imageIsBlacklisted = ((await lastSubmission.removed) || (await lastSubmission.spam)) && !lastIsRemovedAsRepost; 24 | if (imageIsBlacklisted) { 25 | const removalReason = await getRemovalReason(modComment, subredditName); 26 | if (removalReason == null) { 27 | log.info( 28 | `[${subredditName}]`, 29 | chalk.red( 30 | "Ignoring submission because couldn't read the last removal message. Submission: ", 31 | await printSubmission(submission, submissionType), 32 | ', removal message thread: http://redd.it/' + existingMagicSubmission.reddit_id 33 | ) 34 | ); 35 | await updateMagicSubmission(existingMagicSubmission, submission); 36 | await logModcomment(reddit, await lastSubmission.id, subredditName); 37 | } else { 38 | removeAsBlacklisted(reddit, submission, lastSubmission, removalReason, subSettings, subredditName, submissionType); 39 | } 40 | 41 | return false; 42 | } 43 | 44 | return true; 45 | } 46 | 47 | async function removeAsBlacklisted(reddit, submission, lastSubmission, blacklistReason, subSettings, subredditName, submissionType) { 48 | log.info( 49 | `[${subredditName}]`, 50 | 'Removing as blacklisted:', 51 | await printSubmission(submission, submissionType), 52 | '. Origin: ', 53 | await printSubmission(lastSubmission, submissionType) 54 | ); 55 | 56 | // get removal text 57 | let removalReason = ''; 58 | if (subSettings.removeBlacklisted.fullRemovalMessage) { 59 | removalReason = await createFullCustomRemovalMessage(subSettings, lastSubmission, blacklistReason); 60 | } else { 61 | removalReason = await createRemovalMessage(lastSubmission, blacklistReason); 62 | } 63 | 64 | const silentRemoval = subSettings.removeBlacklisted.action && subSettings.removeBlacklisted.action.includes('silent'); 65 | 66 | removePost(submission, removalReason, subSettings, reddit, silentRemoval); 67 | logActionBlacklisted(subredditName, null); 68 | } 69 | 70 | async function createRemovalMessage(lastSubmission, blacklistReason) { 71 | const permalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 72 | const removalReason = outdent`This post has been automatically removed because it is a repost of [this image](${await lastSubmission.url}) posted [here](${permalink}), and that post was removed because: 73 | 74 | ${blacklistReason}`; 75 | return removalReason; 76 | } 77 | 78 | async function createFullCustomRemovalMessage(subSettings, lastSubmission, blacklistReason) { 79 | const permalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 80 | let removalText = subSettings.removeBlacklisted.fullRemovalMessage; 81 | removalText = removalText.split('{{last_submission_link}}').join(permalink); 82 | removalText = removalText.split('{{last_submission_url}}').join(await lastSubmission.url); 83 | removalText = removalText.split('{{blacklist_reason}}').join(blacklistReason); 84 | return removalText; 85 | } 86 | 87 | async function getRemovalReason(modComment, subredditName) { 88 | const body = await modComment.body; 89 | const startRemoval = '[](#start_removal)'; 90 | const endRemoval = '[](#end_removal)'; 91 | 92 | if (!body.includes(startRemoval) || !body.includes(endRemoval)) { 93 | log.info(chalk.magenta("Moderator comment doesn't include correct bookend tags", `[${subredditName}]`)); 94 | return null; 95 | } 96 | 97 | return body.substring(body.indexOf(startRemoval) + startRemoval.length, body.lastIndexOf(endRemoval)); 98 | } 99 | 100 | async function logModcomment(reddit, submissionId, subredditName) { 101 | log.info(`[${subredditName}]`, chalk.red('TEMP LOGGING TO DEBUG AUTOMOD AUTHOR: ', submissionId)); 102 | const submission = reddit.getSubmission(submissionId); 103 | const comments = await submission.comments; 104 | log.info(`[${subredditName}]`, JSON.stringify(comments)); 105 | } 106 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/existing_submission/removeReposts.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const moment = require('moment'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | const outdent = require('outdent'); 7 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 8 | const TimeAgo = require('javascript-time-ago'); 9 | const en_locale = require('javascript-time-ago/locale/en'); 10 | 11 | TimeAgo.addLocale(en_locale); 12 | const timeAgo = new TimeAgo('en'); 13 | 14 | // magic eye modules 15 | import { isRepostRemoval, removePost, printSubmission } from '../../../../reddit_utils'; 16 | import { logActionRepost } from '../../../../master_stats'; 17 | import { updateMagicSubmission } from '../../../../database_manager'; 18 | 19 | //===================================== 20 | 21 | export async function removeReposts(reddit, modComment, submission, lastSubmission, existingMagicSubmission, subSettings, subredditName, submissionType) { 22 | if (!subSettings.reposts) { 23 | return true; 24 | } 25 | 26 | if (existingMagicSubmission.reddit_id == (await submission.id)) { 27 | log.error(`[${subredditName}]`, 'Asked to remove repost of itself - ignoring:', await printSubmission(submission)); 28 | return true; 29 | } 30 | 31 | const lastAuthor = existingMagicSubmission.author ? existingMagicSubmission.author : await lastSubmission.author.name; 32 | const processorSettings = subSettings.reposts; 33 | const lastSubmissionDeleted = (await lastSubmission.author.name) === '[deleted]'; 34 | 35 | // ignore deleted 36 | if (lastSubmissionDeleted && !processorSettings.actionRepostsIfDeleted) { 37 | log.info( 38 | `[${subredditName}]`, 39 | 'Found matching hash for submission', 40 | await printSubmission(submission), 41 | ', but approving as the last submission was deleted: http://redd.it/' + existingMagicSubmission.reddit_id 42 | ); 43 | existingMagicSubmission.approve = true; 44 | await updateMagicSubmission(existingMagicSubmission, submission); 45 | if (processorSettings.approveIfRepostDeleted === true) { 46 | submission.approve(); 47 | } 48 | return false; 49 | } 50 | 51 | // Last submission was removed by AutoModerator and we somehow saw it - ignore 52 | const modWhoRemoved = await lastSubmission.banned_by; 53 | if (!!modWhoRemoved && (modWhoRemoved === 'AutoModerator' || modWhoRemoved.name === 'AutoModerator')) { 54 | log.info( 55 | `[${subredditName}]`, 56 | 'Found last submission removed by AutoModerator, ignoring ', 57 | await printSubmission(submission), 58 | ', matched,', 59 | existingMagicSubmission.reddit_id 60 | ); 61 | await updateMagicSubmission(existingMagicSubmission, submission); 62 | return false; 63 | } 64 | 65 | // all time top posts 66 | const topRepost = existingMagicSubmission.highest_score > +processorSettings.topScore; 67 | if (topRepost) { 68 | actionAsRepost(submission, lastSubmission, false, false, subSettings, subredditName, submissionType, true, reddit, lastAuthor); 69 | return false; 70 | } 71 | 72 | // recent reposts 73 | const lastIsRemovedAsRepost = await isRepostRemoval(modComment); 74 | const recentRepost = await isRecentRepost(submission, lastSubmission, processorSettings); 75 | if (recentRepost) { 76 | actionAsRepost( 77 | submission, 78 | lastSubmission, 79 | lastIsRemovedAsRepost, 80 | lastSubmissionDeleted && processorSettings.actionRepostsIfDeleted, 81 | subSettings, 82 | subredditName, 83 | submissionType, 84 | false, 85 | reddit, 86 | lastAuthor 87 | ); 88 | return false; 89 | } 90 | 91 | // over the repost limit 92 | const lastSubmissionRemoved = (await lastSubmission.removed) || (await lastSubmission.spam); 93 | if (!lastSubmissionRemoved || lastIsRemovedAsRepost) { 94 | if (processorSettings.approveIfOverRepostDays === true) { 95 | submission.approve(); 96 | } 97 | if (processorSettings.reflairApprovedReposts === true) { 98 | submission.assignFlair({ text: await lastSubmission.link_flair_text }); // reflair with same flair 99 | } 100 | } 101 | 102 | log.info( 103 | `[${subredditName}]`, 104 | 'Found matching hash for ', 105 | await printSubmission(submission), 106 | ', matched,', 107 | existingMagicSubmission.reddit_id, 108 | ' - valid as over the repost limit.' 109 | ); 110 | await updateMagicSubmission(existingMagicSubmission, submission); 111 | return true; 112 | } 113 | 114 | async function isRecentRepost(currentSubmission, lastSubmission, processorSettings) { 115 | if (processorSettings.actionAll === true) { 116 | return true; 117 | } 118 | 119 | const currentDate = moment((await currentSubmission.created_utc) * 1000); 120 | const lastPosted = moment((await lastSubmission.created_utc) * 1000); 121 | 122 | const lastScore = await lastSubmission.score; 123 | let daysLimit = +processorSettings.smallScoreRepostDays; 124 | 125 | if (lastScore > +processorSettings.largeScore) { 126 | daysLimit = processorSettings.largeScoreRepostDays; 127 | } else if (lastScore > +processorSettings.mediumScore) { 128 | daysLimit = processorSettings.mediumScoreRepostDays; 129 | } else if (lastScore < +processorSettings.smallScore) { 130 | return false; 131 | } 132 | 133 | const daysSincePosted = currentDate.diff(lastPosted, 'days'); 134 | return daysSincePosted < daysLimit; 135 | } 136 | 137 | async function actionAsRepost( 138 | submission, 139 | lastSubmission, 140 | noOriginalSubmission, 141 | warnAboutDeletedReposts, 142 | subSettings, 143 | subredditName, 144 | submissionType, 145 | allTimeTopRemoval, 146 | reddit, 147 | lastAuthor 148 | ) { 149 | log.info( 150 | `[${subredditName}]`, 151 | 'Found matching hash for submission: ', 152 | await printSubmission(submission), 153 | `, actioning [${subSettings.reposts.action}] as ${allTimeTopRemoval ? 'all time top' : 'recent'} repost of:`, 154 | await lastSubmission.id, 155 | `[${submissionType}]` 156 | ); 157 | 158 | if (!subSettings.reposts.action) { 159 | log.error(`[${subredditName}]`, 'Missing repost action - taking no action'); 160 | return; 161 | } 162 | 163 | if (subSettings.reposts.action.includes('remove')) { 164 | await removeAsRepost( 165 | submission, 166 | lastSubmission, 167 | noOriginalSubmission, 168 | warnAboutDeletedReposts, 169 | subSettings, 170 | subredditName, 171 | submissionType, 172 | allTimeTopRemoval, 173 | reddit, 174 | lastAuthor 175 | ); 176 | } else if (subSettings.reposts.action.includes('warnByModmail')) { 177 | await warnByModmailAsRepost(submission, lastSubmission, subredditName, reddit); 178 | } else if (subSettings.reposts.action.includes('warn')) { 179 | await warnAsRepost(submission, lastSubmission); 180 | } else if (subSettings.reposts.action.includes('silent')) { 181 | await removePost(submission, '', subSettings, reddit, true); 182 | } else { 183 | log.error(`[${subredditName}]`, 'Unknown action', subSettings.reposts.action); 184 | } 185 | 186 | logActionRepost(subredditName, null); 187 | } 188 | 189 | async function warnAsRepost(submission, lastSubmission) { 190 | const permalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 191 | let message = outdent` 192 | Detected repost of: 193 | 194 | * [Click here to see the submission](${permalink}) 195 | * [Direct image link](${await lastSubmission.url})`; 196 | 197 | try { 198 | const replyable = await submission.reply(message); 199 | await replyable.remove(); 200 | await replyable.distinguish(); 201 | await submission.report({ reason: 'Repost detected: ' + 'http://redd.it/' + (await lastSubmission.id) }); 202 | } catch (e) { 203 | log.error('Tried to warn as repost but failed: ', printSubmission(submission), e); 204 | } 205 | } 206 | 207 | async function warnByModmailAsRepost(submission, lastSubmission, subredditName: string, reddit) { 208 | const submissionPermalink = 'https://www.reddit.com' + (await submission.permalink); 209 | const originalPermalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 210 | let message = outdent` 211 | Detected repost: 212 | 213 | * [Repost thread](${submissionPermalink}) 214 | * [Direct image link](${await submission.url}) 215 | * ---------------- 216 | * [Original thread](${originalPermalink}) 217 | * [Direct image link](${await lastSubmission.url})`; 218 | 219 | try { 220 | await reddit.composeMessage({ 221 | to: await `/r/${subredditName}`, 222 | subject: `Repost detected`, 223 | text: message, 224 | }); 225 | } catch (e) { 226 | log.error('Tried to warn by modmail as repost but failed: ', printSubmission(submission), e); 227 | } 228 | } 229 | 230 | async function removeAsRepost( 231 | submission, 232 | lastSubmission, 233 | noOriginalSubmission, 234 | warnAboutDeletedReposts, 235 | subSettings, 236 | subredditName, 237 | submissionType, 238 | allTimeTopRemoval, 239 | reddit, 240 | lastAuthor 241 | ) { 242 | if (submission.id == (await lastSubmission.id)) { 243 | log.error(`[${subredditName}]`, chalk.red('Duplicate detection error, ignoring but this indicates a real issue.', `[${submissionType}]`)); 244 | return; 245 | } 246 | 247 | const author = await submission.author.name; 248 | 249 | // get removal text 250 | let removalReason = ''; 251 | if (subSettings.reposts.sameAuthorRemovalMessage && author === lastAuthor) { 252 | removalReason = await replacePlaceholders(subSettings, lastSubmission, lastAuthor, submission, subSettings.reposts.sameAuthorRemovalMessage); 253 | } else if (subSettings.reposts.fullRemovalMessage) { 254 | removalReason = await replacePlaceholders(subSettings, lastSubmission, lastAuthor, submission, subSettings.reposts.fullRemovalMessage); 255 | } else { 256 | removalReason = await createStandardRemovalMessage(lastSubmission, noOriginalSubmission, warnAboutDeletedReposts, subSettings, allTimeTopRemoval, lastAuthor, submission); 257 | } 258 | 259 | await removePost(submission, removalReason, subSettings, reddit); 260 | } 261 | 262 | async function replacePlaceholders(subSettings, lastSubmission, lastAuthor, submission, inputRemovalText) { 263 | const permalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 264 | let removalText = inputRemovalText; 265 | removalText = removalText.split('{{last_submission_link}}').join(permalink); 266 | removalText = removalText.split('{{last_submission_url}}').join(await lastSubmission.url); 267 | removalText = removalText.split('{{time_ago}}').join(await getTimeAgoString(lastSubmission)); 268 | removalText = removalText.split('{{last_author}}').join(lastAuthor); 269 | removalText = removalText.split('{{author}}').join(await submission.author.name); 270 | 271 | return removalText; 272 | } 273 | 274 | async function createStandardRemovalMessage(lastSubmission, noOriginalSubmission, warnAboutDeletedReposts, subSettings, allTimeTopRemoval, lastAuthor, submission) { 275 | let headerText; 276 | if (allTimeTopRemoval) { 277 | headerText = subSettings.reposts.allTimeTopRemovalMessage 278 | ? subSettings.reposts.allTimeTopRemovalMessage 279 | : 'Good post but unfortunately it has been removed because it is one of this subreddits all time top posts:'; 280 | } else { 281 | headerText = subSettings.reposts.removalMessage 282 | ? subSettings.reposts.removalMessage 283 | : 'Good post but unfortunately it has been removed because it has already been posted recently:'; 284 | } 285 | 286 | const permalink = 'https://www.reddit.com' + (await lastSubmission.permalink); 287 | const manualRepostWarning = noOriginalSubmission ? 'That submission was also removed by a moderator as a repost, so it has been posted by another user recently' : ''; 288 | const noDeletedRepostsWarning = warnAboutDeletedReposts ? '**Note:** Users may not delete and resubmit images without a good reason' : ''; 289 | 290 | let removalText = outdent` 291 | ${headerText} 292 | 293 | * [Submission link (posted ${await getTimeAgoString(lastSubmission)})](${permalink}) 294 | * [Direct image link](${await lastSubmission.url}) 295 | 296 | ${manualRepostWarning} 297 | ${noDeletedRepostsWarning} 298 | `; 299 | 300 | return await replacePlaceholders(subSettings, lastSubmission, lastAuthor, submission, removalText); 301 | } 302 | 303 | async function getTimeAgoString(submission) { 304 | const postedDate = new Date((await submission.created_utc) * 1000); 305 | return timeAgo.format(postedDate); 306 | } 307 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/precheck/removeImagesWithText.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye modules 9 | import { removePost, printSubmission } from '../../../../reddit_utils'; 10 | import { logRemoveText } from '../../../../master_stats'; 11 | 12 | //===================================== 13 | 14 | export async function removeImagesWithText(reddit, submission, imageDetails, subSettings, subredditName, submissionType) { 15 | if (!subSettings.removeImagesWithText_hidden || submissionType !== 'image') { 16 | return true; 17 | } 18 | 19 | const blacklistedWords = subSettings.removeImagesWithText_hidden.blacklistedWords; 20 | if (blacklistedWords) { 21 | const containsBlacklistedWord = imageDetails.words.some(word => blacklistedWords.includes(word)); 22 | if (containsBlacklistedWord) { 23 | const removalReason = subSettings.removeImagesWithText_hidden.message ? subSettings.removeImagesWithText_hidden.message : `This image has been removed because it contains banned text. Detected words:` + imageDetails.words; 24 | await action(submission, removalReason, subSettings, reddit, subredditName); 25 | return false; 26 | } 27 | } else { 28 | // remove all text, above 2 words since 2 can be yeild false positives 29 | if (imageDetails.words.length > 2) { 30 | log.info(`[${subredditName}]`, "Text detected, removing - actioning submission: ", await printSubmission(submission)); 31 | const removalReasonMessage = subSettings.removeImagesWithText_hidden.message ? subSettings.removeImagesWithText_hidden.message : ''; 32 | const removalReason = `This image has been removed because text was automatically detected in it: \n\n>` + imageDetails.words + `\n\n` + removalReasonMessage; 33 | await action(submission, removalReason, subSettings, reddit, subredditName); 34 | return false; 35 | } 36 | } 37 | 38 | return true; // continue 39 | } 40 | 41 | 42 | async function action(submission, removalReason, subSettings, reddit, subredditName){ 43 | if (subSettings.removeImagesWithText_hidden.action === 'warn') { 44 | await submission.report({'reason': 'Blacklisted text detected'}); 45 | } else { 46 | removePost(submission, removalReason, subSettings, reddit); 47 | } 48 | 49 | logRemoveText(subredditName, null); 50 | } 51 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/precheck/removeSmallImages.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye modules 9 | import { removePost, printSubmission } from '../../../../reddit_utils'; 10 | import { logRemoveSmall } from '../../../../master_stats'; 11 | //===================================== 12 | 13 | // 330px https://i.imgur.com/7jTFozp.png 14 | 15 | export async function removeSmallImages(reddit, submission, imageDetails, subSettings, subredditName, submissionType) { 16 | if (!subSettings.removeSmallImages || submissionType !== 'image') { 17 | return true; 18 | } 19 | 20 | const smallDimension = subSettings.removeSmallImages.smallDimension; 21 | const widthMinimum = subSettings.removeSmallImages.widthMinimum; 22 | const heightMinimum = subSettings.removeSmallImages.heightMinimum; 23 | 24 | if (isImageTooSmall(imageDetails, smallDimension, widthMinimum, heightMinimum)) { 25 | log.info(`[${subredditName}]`, 'Image is too small, removing - removing submission: ', await printSubmission(submission)); 26 | 27 | let removalReason = ''; 28 | if (subSettings.removeSmallImages.fullRemovalMessage) { 29 | removalReason = subSettings.removeSmallImages.fullRemovalMessage; 30 | if (!!smallDimension) removalReason = removalReason.split('{{dimension}}').join(smallDimension).split('{{smallDimension}}').join(smallDimension); 31 | if (!!widthMinimum) removalReason = removalReason.split('{{widthMinimum}}').join(widthMinimum); 32 | if (!!heightMinimum) removalReason = removalReason.split('{{heightMinimum}}').join(heightMinimum); 33 | } else { 34 | const messageBase = `Your image has been removed because it is too small. Image submissions to this subreddit must be larger than`; 35 | if (!!smallDimension) removalReason = `${messageBase} ${smallDimension}px*${smallDimension}px.`; 36 | if (!!widthMinimum) removalReason = `${messageBase} ${widthMinimum}px wide.`; 37 | if (!!heightMinimum) removalReason = `${messageBase} ${widthMinimum}px wide.`; 38 | if (!!widthMinimum && !!heightMinimum) removalReason = `${messageBase} ${widthMinimum}px*${heightMinimum}px.`; 39 | } 40 | 41 | removePost(submission, removalReason, subSettings, reddit); 42 | logRemoveSmall(subredditName, null); 43 | return false; 44 | } 45 | 46 | return true; 47 | } 48 | 49 | function isImageTooSmall(imageDetails, smallDimension, widthMinimum, heightMinimum) { 50 | if (imageDetails.height == null || imageDetails.width == null) { 51 | return false; 52 | } 53 | 54 | if (!!widthMinimum && imageDetails.width < widthMinimum) return true; 55 | 56 | if (!!heightMinimum && imageDetails.height < heightMinimum) return true; 57 | 58 | if (!!smallDimension && imageDetails.height * imageDetails.width < smallDimension * smallDimension) return true; 59 | 60 | return false; 61 | } 62 | -------------------------------------------------------------------------------- /src/processing_modules/submission_modules/image/precheck/removeUncroppedImages.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye modules 9 | import { removePost, printSubmission } from '../../../../reddit_utils'; 10 | import { logRemoveUncropped } from '../../../../master_stats'; 11 | 12 | //===================================== 13 | 14 | export async function removeUncroppedImages(reddit, submission, imageDetails, subSettings, subredditName, submissionType) { 15 | if (!subSettings.removeUncroppedImages || submissionType !== 'image') { 16 | return true; 17 | } 18 | 19 | if (imageHasVerticalBars(imageDetails) && subSettings.removeUncroppedImages.removeVertical !== false) { 20 | log.info(`[${subredditName}]`, 'Image is uncropped vertically - border bars. Removing - removing submission: ', await printSubmission(submission)); 21 | 22 | let removalReason = ''; 23 | if (subSettings.removeUncroppedImages.fullRemovalMessage) { 24 | removalReason = subSettings.removeUncroppedImages.fullRemovalMessage; 25 | } else { 26 | removalReason = `This image appears to be uncropped (i.e. black/white bars at the top and bottom). Images must be cropped before posting to this subreddit.`; 27 | } 28 | 29 | removePost(submission, removalReason, subSettings, reddit); 30 | logRemoveUncropped(subredditName, null); 31 | return false; 32 | } 33 | 34 | if (subSettings.removeUncroppedImages.removeHorizontal && imageHasHorizontalBars(imageDetails)) { 35 | log.info(`[${subredditName}]`, 'Image is uncropped horizontally - border bars. Removing - removing submission: ', await printSubmission(submission)); 36 | 37 | let removalReason = ''; 38 | if (subSettings.removeUncroppedImages.fullRemovalMessage) { 39 | removalReason = subSettings.removeUncroppedImages.fullRemovalMessage; 40 | } else { 41 | removalReason = `This image appears to be uncropped (i.e. black/white bars at the sides). Images must be cropped before posting to this subreddit.`; 42 | } 43 | 44 | removePost(submission, removalReason, subSettings, reddit); 45 | logRemoveUncropped(subredditName, null); 46 | return false; 47 | } 48 | 49 | if (subSettings.removeUncroppedImages.removeAllVertical && imageIsVertical(imageDetails)) { 50 | const removalReason = `This image appears to be uncropped because it's a long image (typically a vertical cellphone pic). Images posted to this subreddit should generally first be cropped to a square (rule 7), i.e.: 51 | 52 | * [Example of an uncropped image](https://i.imgur.com/XAjzOF0.png) 53 | * [Example image properly cropped](https://i.imgur.com/qND6Vb1.png) 54 | `; 55 | removePost(submission, removalReason, subSettings, reddit); 56 | return false; 57 | } 58 | 59 | return true; 60 | } 61 | 62 | function imageHasVerticalBars(imageDetails) { 63 | const isSquarish = imageDetails.height < imageDetails.width * 1.2; 64 | 65 | if (isSquarish || imageDetails.trimmedHeight == null) { 66 | // Image is already squarish, not checking for crop 67 | return false; 68 | } 69 | 70 | return imageDetails.trimmedHeight / imageDetails.height < 0.81; // https://i.imgur.com/tfDO06G.png 71 | } 72 | 73 | function imageIsVertical(imageDetails) { 74 | return imageDetails.height > imageDetails.width * 1.8; 75 | } 76 | 77 | function imageHasHorizontalBars(imageDetails) { 78 | const isSquarish = imageDetails.width < imageDetails.height * 1.2; 79 | 80 | if (isSquarish || imageDetails.trimmedWidth == null) { 81 | // Image is already squarish, not checking for crop 82 | return false; 83 | } 84 | 85 | return imageDetails.trimmedWidth / imageDetails.width < 0.81; // https://i.imgur.com/VrL2mGp.png 86 | } 87 | -------------------------------------------------------------------------------- /src/reddit.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | require('dotenv').config(); 3 | 4 | // Create a new snoowrap requester with OAuth credentials 5 | // See here: https://github.com/not-an-aardvark/reddit-oauth-helper 6 | const snoowrap = require('snoowrap'); 7 | export const reddit = new snoowrap({ 8 | userAgent: 'THE_MAGIC_EYE:v1.0.1', 9 | clientId: process.env.CLIENT_ID, 10 | clientSecret: process.env.CLIENT_SECRET, 11 | username: process.env.ACCOUNT_USERNAME, 12 | password: process.env.PASSWORD 13 | }); 14 | 15 | reddit.config({requestDelay: 1000, continueAfterRatelimitError: true}); 16 | 17 | if (process.env.LOG_LEVEL == 'debug') { 18 | reddit.config({debug: true}) 19 | } 20 | -------------------------------------------------------------------------------- /src/reddit_utils.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | const outdent = require('outdent'); 3 | require('dotenv').config(); 4 | const log = require('loglevel'); 5 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 6 | 7 | // reddit modules 8 | const chalk = require('chalk'); 9 | 10 | export async function getModComment(reddit, submissionId) { 11 | const submission = reddit.getSubmission(submissionId); 12 | const comments = await submission.comments; 13 | return comments.find(comment => { 14 | return comment.distinguished === 'moderator' && comment.removed != true && comment.author.name !== 'AutoModerator'; 15 | }); 16 | } 17 | 18 | export async function isMagicIgnore(modComment) { 19 | if (modComment == null) { 20 | return false; 21 | } 22 | const commentBody = await modComment.body; 23 | return commentBody.includes('[](#magic_ignore)') || commentBody.includes('[](#ignore_removal)'); // mod wants removal ignored 24 | } 25 | 26 | export async function isRepostRemoval(modComment) { 27 | return modComment != null && (await modComment.body).includes('[](#repost)'); // mod has told them to resubmit an altered/cropped version 28 | } 29 | 30 | export async function isBlacklistRemoval(modComment) { 31 | return modComment != null && (await modComment.body).includes('[](#start_removal)') && (await modComment.body).includes('[](#end_removal)'); 32 | } 33 | 34 | export async function isRepostOnlyByUserRemoval(modComment) { 35 | return modComment != null && (await modComment.body).includes('[](#repost_only_by_user)'); // mod has told them to resubmit an altered/cropped version 36 | } 37 | 38 | export async function isAnyTagRemoval(modComment) { 39 | const isRepostOnlyByUser = await isRepostOnlyByUserRemoval(modComment); 40 | const isBlacklisted = await isBlacklistRemoval(modComment); 41 | const isRepost = await isRepostRemoval(modComment); 42 | return isRepostOnlyByUser || isBlacklisted || isRepost; 43 | } 44 | 45 | export function sliceSubmissionId(submissionId) { 46 | return submissionId.slice(3, submissionId.length); // id is prefixed with "id_" 47 | } 48 | 49 | export async function removePost(submission, removalReason, subSettings, reddit, silent = false) { 50 | try { 51 | await submission.remove(); 52 | 53 | if (silent || subSettings.removalMethod === 'silent') { 54 | return; 55 | } else if (subSettings.removalMethod === 'replyAsSubreddit') { 56 | await removePostWithPrivateMessage(submission, removalReason, subSettings, reddit); 57 | } else { 58 | await removePostWithReply(submission, removalReason, subSettings); 59 | } 60 | } catch (e) { 61 | log.error('Tried to remove post but failed: ', await printSubmission(submission, 'unknown'), e); 62 | } 63 | } 64 | 65 | export async function removePostWithPrivateMessage(submission, removalReason, subSettings, reddit) { 66 | const footerText = subSettings.customFooter ? subSettings.customFooter : ''; 67 | const removalFooter = outdent` 68 | 69 | 70 | ----------------------- 71 | 72 | ([link to your submission](${await submission.permalink})) 73 | 74 | ${footerText}`; 75 | 76 | reddit.composeMessage({ 77 | to: await submission.author.name, 78 | subject: 'Your post has been automatically removed', 79 | text: removalReason + removalFooter, 80 | fromSubreddit: await submission.subreddit 81 | }); 82 | } 83 | 84 | export async function removePostWithReply(submission, removalReason, subSettings) { 85 | const footerText = subSettings.customFooter ? subSettings.customFooter : "*I'm a bot so if I was wrong, reply to me and a moderator will check it.*"; 86 | const removalFooter = outdent` 87 | 88 | 89 | ----------------------- 90 | 91 | ${footerText}`; 92 | 93 | const replyable = await submission.reply(removalReason + removalFooter); 94 | replyable.distinguish(); 95 | } 96 | 97 | export async function printSubmission(submission, submissionType?: string) { 98 | const username = (await submission.author) ? await submission.author.name : null; 99 | const idForLog = await submission.id; 100 | const type = submissionType ? ` [${submissionType}]` : ''; 101 | return `http://redd.it/${idForLog} by ${username}${type}`; 102 | } 103 | -------------------------------------------------------------------------------- /src/scripts/hamming_compare.ts: -------------------------------------------------------------------------------- 1 | var hammingDistance = require("hamming"); 2 | var phashLibrary = require("phash-imagemagick"); 3 | const chalk = require('chalk'); 4 | const { promisify } = require('util'); 5 | const phashGet = promisify(phashLibrary.get); 6 | const fs = require('fs'); 7 | const imageDownloader = require('image-downloader'); 8 | const imageMagick = require('imagemagick'); 9 | 10 | import { dhash_gen } from "../dhash_gen"; 11 | const dhashGet = promisify(dhash_gen); 12 | 13 | async function generateDHash(imagePath, logUrl) { 14 | try { 15 | return await dhashGet(imagePath); 16 | } catch (e) { 17 | console.error('Could not generate dhash for: ', logUrl, ', ', e); 18 | return null; 19 | } 20 | } 21 | 22 | async function generatePHash(imagePath, logUrl) { 23 | try { 24 | return await phashGet(imagePath); 25 | } catch (e) { 26 | console.error('Could not generate phash for: ', logUrl, ', ', e); 27 | return null; 28 | } 29 | } 30 | 31 | async function runHammingCompare() { 32 | 33 | console.log(__dirname); 34 | let image1 = __dirname + '/../../tmp/' + ( process.argv[2] ? process.argv[2] : "1.jpg"); 35 | if (!image1.endsWith('.jpg') && !image1.endsWith('.png')) { 36 | image1 = image1 + '.jpg'; // can be lazy 37 | } 38 | 39 | let image2 = __dirname + '/../../tmp/' + ( process.argv[3] ? process.argv[3] : "2.jpg"); 40 | if (!image2.endsWith('.jpg') && !image2.endsWith('.png')) { 41 | image2 = image2 + '.jpg'; // can be lazy 42 | } 43 | 44 | const dhash1 = await generateDHash(image1, image1); 45 | const dhash2 = await generateDHash(image2, image2); 46 | const distance = await hammingDistance(dhash1, dhash2); // hamming threshold 47 | console.log(chalk.blue(process.argv[2] + 'dhash:'), dhash1); 48 | console.log(chalk.blue(process.argv[3] + 'dhash:'), dhash2); 49 | console.log(chalk.green('dhash hamming distance:'), distance); 50 | 51 | 52 | const phash1 = await generatePHash(image1, image1); 53 | const phash2 = await generatePHash(image2, image2); 54 | const phash_distance = await hammingDistance(phash1.pHash, phash2.pHash); // hamming threshold 55 | console.log(chalk.blue(process.argv[2] + 'phash:'), phash1); 56 | console.log(chalk.blue(process.argv[3] + 'phash:'), phash2); 57 | console.log(chalk.green('hamming distance:'), phash_distance); 58 | // 165 / 168 max 59 | } 60 | 61 | runHammingCompare(); 62 | 63 | -------------------------------------------------------------------------------- /src/scripts/ping_mongo.ts: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk'); 2 | const { promisify } = require('util'); 3 | const fs = require('fs'); 4 | const MongoClient = require('mongodb').MongoClient; 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | async function runDBPing() { 9 | const name = 'database'; 10 | const connectionUrl = 'mongodb+srv://STUFF'; 11 | 12 | log.info(chalk.blue('Connecting to database...', name, '-', connectionUrl)); 13 | try { 14 | const client = await MongoClient.connect(connectionUrl, { useNewUrlParser: true, connectTimeoutMS: 5000 }); 15 | const connection = await client.db(); 16 | log.info(chalk.green('Finished connecting to: '), name); 17 | 18 | connection.listCollections().toArray(function (err, collInfos) { 19 | if (err) { 20 | log.err(chalk.green('Collection err: '), err); 21 | } else { 22 | log.info(chalk.green('Num collections: '), collInfos.length); 23 | } 24 | }); 25 | } catch (err) { 26 | log.info(chalk.red('********* Fatal MongoDb connection error for ********* : '), name, err, connectionUrl); 27 | return null; 28 | } 29 | } 30 | 31 | runDBPing(); 32 | -------------------------------------------------------------------------------- /src/server.ts: -------------------------------------------------------------------------------- 1 | // standard server modules 2 | import express = require('express'); 3 | const app = express(); 4 | const chalk = require('chalk'); 5 | const fs = require('fs'); 6 | require('dotenv').config(); 7 | const log = require('loglevel'); 8 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 9 | 10 | if ( 11 | !process.env.ACCOUNT_USERNAME || 12 | !process.env.PASSWORD || 13 | !process.env.CLIENT_ID || 14 | !process.env.CLIENT_SECRET || 15 | !process.env.NODE_ENV || 16 | !process.env.MONGODB_URI || 17 | !process.env.NODE_ENV || 18 | !process.env.DAYS_EXPIRY 19 | ) { 20 | log.error( 21 | process.env.ACCOUNT_USERNAME, 22 | process.env.PASSWORD, 23 | process.env.CLIENT_ID, 24 | process.env.CLIENT_SECRET, 25 | process.env.NODE_ENV, 26 | process.env.MONGODB_URI, 27 | process.env.NODE_ENV, 28 | process.env.DAYS_EXPIRY 29 | ); 30 | throw 'Missing essential config. Fatal error.'; 31 | } 32 | 33 | // magic eye imports 34 | import { initMasterDatabase, refreshAvailableDatabases } from './master_database_manager'; 35 | import { mainQueue, haltQueue } from './submission_queue'; 36 | import { mainInboxProcessor } from './inbox_processor'; 37 | import { mainProcessor } from './subreddit_processor'; 38 | import { mainSettingsProcessor } from './settings_processor'; 39 | import { getModdedSubredditsMulti } from './modded_subreddits'; 40 | import { mainUnmoderated } from './unmoderated_processor'; 41 | import { reddit } from './reddit'; 42 | 43 | const garbageCollectSeconds = 60 * 10; 44 | async function manualGarbageCollect() { 45 | if (!global.gc) { 46 | log.warn(chalk.red('WARN: Garbage collection is not exposed')); 47 | return; 48 | } 49 | global.gc(); 50 | log.info('[GARBAGE] Ran GC'); 51 | setTimeout(manualGarbageCollect, garbageCollectSeconds * 1000); // run again in timeoutTimeSeconds 52 | } 53 | 54 | async function startServer() { 55 | try { 56 | log.info('The magic eye is booting...'); 57 | app.listen(process.env.PORT || 3000, () => log.info(chalk.bgGreenBright('Magic Eye listening on port 3000'))); 58 | 59 | const tempDir = './tmp'; 60 | if (!fs.existsSync(tempDir)) { 61 | fs.mkdirSync(tempDir); 62 | } 63 | 64 | await initMasterDatabase(); 65 | await refreshAvailableDatabases(); 66 | await getModdedSubredditsMulti(); // init cache 67 | setTimeout(manualGarbageCollect, garbageCollectSeconds * 1000); 68 | 69 | log.info('The magic eye is ONLINE.'); 70 | mainQueue(); // start queue to get submissions 71 | mainProcessor(1); // start main loop 72 | mainInboxProcessor(); // start checking inbox 73 | setTimeout(mainSettingsProcessor, 300 * 1000); // check for wiki updates 74 | mainUnmoderated(); 75 | } catch (e) { 76 | log.error(chalk.red(e)); 77 | } 78 | } 79 | 80 | startServer(); 81 | 82 | app.get('/keepalive', async function (req, res) { 83 | res.setHeader('Content-Type', 'application/json'); 84 | res.send(JSON.stringify({ status: 'ok' })); 85 | }); 86 | 87 | app.get('/shutdown', async function (req, res) { 88 | res.setHeader('Content-Type', 'application/json'); 89 | let password = req.query ? req.query.password : null; 90 | if (password === process.env.SHUTDOWN_PASSWORD) { 91 | haltQueue(); 92 | res.send(JSON.stringify({ status: 'ok' })); 93 | } else { 94 | res.send(JSON.stringify({ status: 'failed' })); 95 | } 96 | }); 97 | 98 | app.get('/demod', async function (req, res) { 99 | res.setHeader('Content-Type', 'application/json'); 100 | let password = req.query ? req.query.password : null; 101 | let demodSub = req.query ? req.query.sub : null; 102 | if (password === process.env.SHUTDOWN_PASSWORD && !!demodSub) { 103 | if (demodSub) { 104 | log.info('[DEMOD] Demodding from: ', demodSub); 105 | await reddit.getSubreddit(demodSub).leaveModerator(); 106 | res.send(JSON.stringify({ status: 'ok' })); 107 | } 108 | } else { 109 | res.send(JSON.stringify({ status: 'failed' })); 110 | } 111 | }); 112 | 113 | process.on('unhandledRejection', (reason: any, p: any) => { 114 | log.warn('ERROR: Unhandled promise Rejection at: Promise', p.message, 'reason:', reason.message); 115 | }); 116 | 117 | process.on('uncaughtException', function (err) { 118 | log.warn('UNCAUGHT EXCEPTION - keeping process alive:', err); 119 | }); 120 | -------------------------------------------------------------------------------- /src/settings_processor.ts: -------------------------------------------------------------------------------- 1 | require('dotenv').config(); 2 | const chalk = require('chalk'); 3 | const log = require('loglevel'); 4 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 5 | import { getMasterProperty, setMasterProperty } from './master_database_manager'; 6 | import { doUpdateSettings } from './wiki_utils'; 7 | import { getModdedSubredditsMulti } from './modded_subreddits'; 8 | import { reddit } from './reddit'; 9 | 10 | export async function mainSettingsProcessor() { 11 | try { 12 | log.info(chalk.blue('[UPDATE_SETTINGS] === Starting update settings')); 13 | const startCycleTime = new Date().getTime(); 14 | 15 | const moddedSubs = await getModdedSubredditsMulti(); 16 | if (!moddedSubs || moddedSubs.length == 0) { 17 | log.warn('No subreddits found. Sleeping.'); 18 | setTimeout(mainSettingsProcessor, 30 * 1000); // run again in 30 seconds 19 | } 20 | 21 | const chunkSize = 10; // chunk the requests because it can strain reddit asking for 200+ subs mod actions 22 | let remainingSubreddits = moddedSubs.slice(); 23 | while (remainingSubreddits.length > 0) { 24 | let subredditsToProcess = remainingSubreddits.slice(0, chunkSize); 25 | remainingSubreddits = remainingSubreddits.slice(chunkSize); 26 | const subredditsMultiString = subredditsToProcess 27 | .map((sub) => sub + '+') 28 | .join('') 29 | .slice(0, -1); // rarepuppers+pics+MEOW_IRL 30 | const subredditMulti = await reddit.getSubreddit(subredditsMultiString); 31 | const wikiChanges = await subredditMulti.getModerationLog({ type: 'wikirevise' }); 32 | const newChanges = wikiChanges.filter((change) => change.details.includes('Page magic_eye edited') && change.mod != process.env.ACCOUNT_USERNAME); 33 | const unprocessedChanges = await consumeUnprocessedWikiChanges(newChanges); 34 | for (const change of unprocessedChanges) { 35 | const subredditName = await change.subreddit.display_name; 36 | await doUpdateSettings(subredditName, change, reddit); 37 | } 38 | } 39 | 40 | const endCycleTime = new Date().getTime(); 41 | const cycleTimeTaken = (endCycleTime - startCycleTime) / 1000; 42 | log.info(chalk.blue('[UPDATE_SETTINGS] === Update settings finished, time was ', cycleTimeTaken, 'seconds')); 43 | } catch (e) { 44 | log.error(chalk.red('Failed to update settings: ', e)); 45 | } 46 | 47 | setTimeout(mainSettingsProcessor, 30 * 60 * 1000); 48 | } 49 | 50 | // overkill, but well tested 51 | async function consumeUnprocessedWikiChanges(latestItems) { 52 | latestItems.sort((a, b) => { 53 | return a.created_utc - b.created_utc; 54 | }); // oldest first 55 | 56 | const maxCheck = 500; 57 | if (latestItems.length > maxCheck) { 58 | log.info('Passed more than maxCheck items:', latestItems.length); 59 | latestItems = latestItems.slice(latestItems.length - maxCheck, latestItems.length); 60 | } 61 | 62 | // don't process anything over 3 hours old for safeguard. created_utc is in seconds/getTime is in millis. 63 | const threeHoursAgo = new Date().getTime() - 1000 * 60 * 60 * 3; 64 | latestItems = latestItems.filter((item) => item.created_utc * 1000 > threeHoursAgo); 65 | 66 | const processedIds = await getMasterProperty('processed_wiki_changes'); 67 | if (!processedIds) { 68 | log.warn(chalk.magenta('Could not find the last processed id list when retrieving unprocessed wiki changes. Regenerating...')); 69 | const intialProcessedIds = latestItems.map((submission) => submission.id); 70 | await setMasterProperty('processed_wiki_changes', intialProcessedIds); 71 | return []; 72 | } 73 | 74 | // update the processed list before processing so we don't retry any submissions that cause exceptions 75 | const newItems = latestItems.filter((item) => !processedIds.includes(item.id)); 76 | let updatedProcessedIds = processedIds.concat(newItems.map((submission) => submission.id)); // [3,2,1] + [new] = [3,2,1,new] 77 | const processedCacheSize = maxCheck * 5; // larger size for any weird/future edge-cases where a mod removes a lot of submissions 78 | if (updatedProcessedIds.length > processedCacheSize) { 79 | updatedProcessedIds = updatedProcessedIds.slice(updatedProcessedIds.length - processedCacheSize); // [3,2,1,new] => [2,1,new] 80 | } 81 | await setMasterProperty('processed_wiki_changes', updatedProcessedIds); 82 | 83 | return newItems; 84 | } 85 | -------------------------------------------------------------------------------- /src/submission_processor.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 7 | 8 | // magic eye general 9 | import { getImageDetails, getImageUrl } from './image_utils'; 10 | import { MagicSubmission, updateMagicSubmission } from './database_manager'; 11 | import { getModComment, isMagicIgnore, isAnyTagRemoval, removePost, printSubmission } from './reddit_utils'; 12 | import { logRemoveBroken } from './master_stats'; 13 | 14 | // precheck modules 15 | import { removeImagesWithText } from './processing_modules/submission_modules/image/precheck/removeImagesWithText'; 16 | import { removeSmallImages } from './processing_modules/submission_modules/image/precheck/removeSmallImages'; 17 | import { removeUncroppedImages } from './processing_modules/submission_modules/image/precheck/removeUncroppedImages'; 18 | // modules 19 | import { allowRepostsOnlyByUser } from './processing_modules/submission_modules/image/existing_submission/allowRepostOnlyByUser'; 20 | import { removeBlacklisted } from './processing_modules/submission_modules/image/existing_submission/removeBlacklisted'; 21 | import { removeReposts } from './processing_modules/submission_modules/image/existing_submission/removeReposts'; 22 | 23 | let brokenImageRemovalGuard = 0; 24 | let permanentGuard = false; 25 | 26 | export async function processSubmission(submission, masterSettings, database, reddit, activeMode) { 27 | const subredditName = masterSettings._id; 28 | 29 | // check if we have already processed submission 30 | const existingMagicSubmissionById = await database.getMagicSubmissionById(submission.id); 31 | 32 | if (existingMagicSubmissionById === undefined) return; // error occurred 33 | 34 | if (existingMagicSubmissionById) { 35 | log.info(`[${subredditName}]`, 'Submission is already in database, - ignoring submission:', await printSubmission(submission)); 36 | return; 37 | } 38 | 39 | // ignore approved submissions 40 | if ((await submission.approved) && activeMode) { 41 | log.info(`[${subredditName}]`, 'Submission is already approved, - ignoring submission:', await printSubmission(submission)); 42 | return; 43 | } 44 | 45 | // first time init logging 46 | if (!activeMode) { 47 | log.info(chalk.yellow(`[${subredditName}][first_time_init]`, 'Starting process for submission: '), await printSubmission(submission)); 48 | } 49 | 50 | // get image info 51 | const imageUrlInfo = await getImageUrl(submission); 52 | if (!imageUrlInfo) { 53 | if (activeMode) { 54 | log.info(`[${subredditName}]`, 'Submission was not a supported format - ignoring submission:', await printSubmission(submission)); 55 | } else { 56 | log.info(`[${subredditName}][first_time_init]`, 'Submission was not a supported format - ignoring submission:', await printSubmission(submission)); 57 | } 58 | return; 59 | } 60 | 61 | const { imageUrl, submissionType } = imageUrlInfo; 62 | const isRemoveImagesWithText = masterSettings.settings.removeImagesWithText_hidden; 63 | const imageDetails = await getImageDetails( 64 | imageUrl, 65 | activeMode && isRemoveImagesWithText, 66 | isRemoveImagesWithText ? masterSettings.settings.removeImagesWithText_hidden.blacklistedWords : null 67 | ); 68 | if (imageDetails == null) { 69 | if (activeMode && submissionType == 'image' && masterSettings.settings.removeBrokenImages) { 70 | // todo: put this code in its own processor 71 | const removalMessage = masterSettings.settings.removeBrokenImages.fullRemovalMessage 72 | ? masterSettings.settings.removeBrokenImages.fullRemovalMessage 73 | : 'This post has been automatically removed because the link is broken or deleted. You will need to fix it and resubmit.'; 74 | if (brokenImageRemovalGuard < 10 && !permanentGuard) { 75 | await removePost(submission, removalMessage, masterSettings.settings, reddit); 76 | log.info(`[${subredditName}]`, 'Could not download image - removing as broken: ', await printSubmission(submission)); 77 | } else { 78 | permanentGuard = true; 79 | log.info(`[${subredditName}]`, 'Broken image guard triggered - not removing: ', await printSubmission(submission)); 80 | } 81 | 82 | logRemoveBroken(subredditName, null); 83 | brokenImageRemovalGuard++; 84 | } else if (activeMode && masterSettings.settings.removeBrokenImages) { 85 | log.info(`[${subredditName}]`, 'Could not download image - ignoring as appears to be gif: ', await printSubmission(submission)); 86 | } 87 | return; 88 | } else if (imageDetails.tooLarge || imageDetails.ignore) { 89 | log.info(`[${subredditName}]`, 'Image is too large/ignore problem image: ', await printSubmission(submission)); 90 | return; 91 | } 92 | 93 | brokenImageRemovalGuard = 0; 94 | 95 | // only run on approved media 96 | const processImages = masterSettings.settings.processImages === true || masterSettings.settings.processImages === undefined; 97 | const processAnimatedMedia = masterSettings.settings.processAnimatedMedia === true; 98 | const isImageToProcess = processImages && submissionType == 'image'; 99 | const isAnimatedMediaToProcess = processAnimatedMedia && submissionType == 'animated'; 100 | if (!isImageToProcess && !isAnimatedMediaToProcess) { 101 | log.info(chalk.yellow(`[${subredditName}]`, 'Ignoring: ', await printSubmission(submission, submissionType), ' - media type not active')); 102 | return; 103 | } 104 | 105 | // run the precheck processors 106 | if (activeMode) { 107 | const precheckProcessors = [removeImagesWithText, removeSmallImages, removeUncroppedImages]; 108 | 109 | for (const processor of precheckProcessors) { 110 | const shouldContinue = await processor(reddit, submission, imageDetails, masterSettings.settings, subredditName, submissionType); 111 | if (!shouldContinue) { 112 | return; 113 | } 114 | } 115 | } 116 | 117 | // process submission as new or existing 118 | const existingMagicSubmission = await database.getMagicSubmission(imageDetails.dhash, masterSettings.settings.similarityTolerance); 119 | if (existingMagicSubmission == null) { 120 | await processNewSubmission(submission, imageDetails, database, activeMode, subredditName, submissionType); 121 | } else if (activeMode) { 122 | await processExistingSubmission(submission, existingMagicSubmission, masterSettings, reddit, subredditName, submissionType); 123 | await database.saveMagicSubmission(existingMagicSubmission); // save here to cover all updates 124 | } else { 125 | log.info(chalk.yellow(`[${subredditName}][first_time_init]`, 'Ignoring existing submission for dhash, matched: ' + existingMagicSubmission._id)); 126 | } 127 | } 128 | 129 | async function processExistingSubmission(submission, existingMagicSubmission, masterSettings, reddit, subredditName, submissionType) { 130 | const existingMagicSubmissionType = existingMagicSubmission.type ? existingMagicSubmission.type : 'image'; // legacy data 131 | const originalExistingSubmissionRedditId = existingMagicSubmission.reddit_id; 132 | 133 | if (existingMagicSubmissionType !== submissionType) { 134 | log.warn( 135 | chalk.yellow( 136 | `[${subredditName}]`, 137 | 'Incompatable types found for existing submission ', 138 | await printSubmission(submission, submissionType), 139 | ', matched:', 140 | existingMagicSubmission.reddit_id, 141 | ' - ignoring' 142 | ) 143 | ); 144 | return; 145 | } 146 | 147 | const lastSubmission = await reddit.getSubmission(existingMagicSubmission.reddit_id); 148 | const lastSubmissionRemoved = (await lastSubmission.removed) || (await lastSubmission.spam); 149 | 150 | existingMagicSubmission.highest_score = Math.max(existingMagicSubmission.highest_score, await lastSubmission.score); 151 | existingMagicSubmission.duplicates.push(submission.id); 152 | 153 | const modWhoRemoved = await lastSubmission.banned_by; 154 | if (!!modWhoRemoved && (modWhoRemoved === 'AutoModerator' || modWhoRemoved.name === 'AutoModerator')) { 155 | // can happen in cases where automod is slow for some reason 156 | log.info(`[${subredditName}]`, 'Ignoring automoderator removal for: ', await printSubmission(submission, submissionType)); 157 | return; 158 | } 159 | 160 | let modComment; 161 | if (lastSubmissionRemoved) { 162 | modComment = await getModComment(reddit, existingMagicSubmission.reddit_id); 163 | const magicIgnore = await isMagicIgnore(modComment); 164 | if (magicIgnore) { 165 | log.info( 166 | `[${subredditName}]`, 167 | 'Found repost of removed submission (http://redd.it/' + existingMagicSubmission.reddit_id, 168 | '), but magicIgnore/ignoreRemoval exists. Ignoring submission: ', 169 | await printSubmission(submission, submissionType) 170 | ); 171 | await updateMagicSubmission(existingMagicSubmission, submission); 172 | return; 173 | } 174 | 175 | const hasRemovalTags = await isAnyTagRemoval(modComment); 176 | if (modComment == null || !hasRemovalTags) { 177 | log.info( 178 | `[${subredditName}]`, 179 | 'Found repost of removed submission (http://redd.it/' + existingMagicSubmission.reddit_id, 180 | '), but no relevant removal message exists. Ignoring submission: ', 181 | await printSubmission(submission, submissionType) 182 | ); 183 | await updateMagicSubmission(existingMagicSubmission, submission); 184 | return; 185 | } 186 | } 187 | 188 | // run the submission processors 189 | const imageProcessors = [allowRepostsOnlyByUser, removeBlacklisted, removeReposts]; 190 | 191 | let tookAction = false; 192 | for (const processor of imageProcessors) { 193 | const shouldContinue = await processor(reddit, modComment, submission, lastSubmission, existingMagicSubmission, masterSettings.settings, subredditName, submissionType); 194 | if (!shouldContinue) { 195 | tookAction = true; 196 | break; 197 | } 198 | } 199 | if (!tookAction) { 200 | log.info( 201 | `[${subredditName}]`, 202 | 'Found repost of removed submission (http://redd.it/' + originalExistingSubmissionRedditId, 203 | '), but no processor was configured to action repost, or post is allowed through. Ignoring submission: ', 204 | await printSubmission(submission, submissionType) 205 | ); 206 | } 207 | } 208 | 209 | async function processNewSubmission(submission, imageDetails, database, activeMode, subredditName, submissionType) { 210 | if (activeMode) { 211 | log.info(`[${subredditName}]`, chalk.green('Processing new submission: ', await printSubmission(submission, submissionType))); 212 | } else { 213 | log.info(`[${subredditName}][first_time_init]`, chalk.green('Processing new submission: ', await printSubmission(submission, submissionType))); 214 | } 215 | 216 | const newMagicSubmission = new MagicSubmission(imageDetails.dhash, submission, await submission.score, submissionType); 217 | await database.saveMagicSubmission(newMagicSubmission, true); 218 | } 219 | -------------------------------------------------------------------------------- /src/submission_queue.ts: -------------------------------------------------------------------------------- 1 | // standard server modules 2 | import express = require('express'); 3 | const app = express(); 4 | const chalk = require('chalk'); 5 | const fs = require('fs'); 6 | require('dotenv').config(); 7 | const log = require('loglevel'); 8 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 9 | import { getModdedSubredditsMulti } from './modded_subreddits'; 10 | 11 | const util = require('util'); 12 | const sleep = util.promisify(setTimeout); 13 | 14 | // magic eye modules 15 | import { getMasterProperty, setMasterProperty } from './master_database_manager'; 16 | import { reddit } from './reddit'; 17 | 18 | let submissionQueue = []; 19 | 20 | let submissionRequests = 1000; // request max on restart 21 | 22 | let haltProcessing = false; 23 | 24 | let loopCount = 0; 25 | 26 | export async function mainQueue() { 27 | const minimumTimeoutSeconds = 5; // default time between ingest requests 28 | let timeoutTimeSeconds = minimumTimeoutSeconds; 29 | 30 | if (haltProcessing) { 31 | setTimeout(mainQueue, 300 * 1000); // recover if sigterm doesn't kill process 32 | return; 33 | } 34 | 35 | try { 36 | log.debug(chalk.blue('[QUEUE] Starting queue cycle')); 37 | const startCycleTime = new Date().getTime(); 38 | 39 | const moddedSubs = await getModdedSubredditsMulti(); 40 | if (moddedSubs.length == 0) { 41 | log.warn('[QUEUE] No subreddits found. Sleeping.'); 42 | setTimeout(mainQueue, 30 * 1000); // run again in 30 seconds 43 | } 44 | 45 | let submissions = []; 46 | 47 | const moddedSubredditsMultiString = getNextSubList(moddedSubs); 48 | log.info(`Requesting for ${moddedSubredditsMultiString}`); 49 | const subredditMulti = await reddit.getSubreddit(moddedSubredditsMultiString); 50 | const newSubmissions = await subredditMulti.getNew({ limit: 100 }); 51 | submissions = submissions.concat(newSubmissions); 52 | await sleep(1000); 53 | 54 | if (process.env.MODQUEUE_SUBREDDITS && loopCount % 10 === 0) { 55 | log.info(`[QUEUE] Requesting modqueue subreddits: `, process.env.MODQUEUE_SUBREDDITS); 56 | const modqueueSubredditMulti = await reddit.getSubreddit(process.env.MODQUEUE_SUBREDDITS); 57 | const modqueueSubmissions = await modqueueSubredditMulti.getModqueue({ limit: 100, only: 'links' }); 58 | submissions = submissions.concat(modqueueSubmissions); 59 | log.info(`[QUEUE] Modque subreddits request complete`); 60 | } 61 | 62 | if (!submissions) { 63 | log.error(chalk.red('[QUEUE] Cannot get new submissions to process - api is probably down for maintenance.')); 64 | setTimeout(mainQueue, 60 * 1000); // run again in 60 seconds 65 | return; 66 | } 67 | 68 | const unprocessedSubmissions = await consumeUnprocessedSubmissions(submissions); 69 | 70 | submissionQueue = submissionQueue.concat(unprocessedSubmissions); 71 | 72 | // end cycle 73 | const endCycleTime = new Date().getTime(); 74 | const cycleTimeTaken = (endCycleTime - startCycleTime) / 1000; 75 | timeoutTimeSeconds = Math.max(minimumTimeoutSeconds - cycleTimeTaken, 0); 76 | 77 | if (unprocessedSubmissions.length > submissionRequests) { 78 | log.warn('[QUEUE] HEAVY LOAD: unprocessedSubmissions length was ', unprocessedSubmissions.length, ', submissions may have been missed'); 79 | submissionRequests = 1000; 80 | } else { 81 | submissionRequests = unprocessedSubmissions.length < 50 ? 100 : unprocessedSubmissions.length + 100; 82 | } 83 | 84 | log.info(chalk.red(`[QUEUE] Ingested ${unprocessedSubmissions.length} new submissions, next request: ${submissionRequests} in ${timeoutTimeSeconds} seconds`)); 85 | } catch (err) { 86 | log.error(chalk.red('[QUEUE] Queue loop error: ', err)); 87 | } 88 | 89 | loopCount++; 90 | setTimeout(mainQueue, timeoutTimeSeconds * 1000); // run again in timeoutTimeSeconds 91 | } 92 | 93 | function getNextSubList(moddedSubs: string[]) { 94 | const numSubsToRequest = 50; 95 | const numBrackets = Math.ceil(moddedSubs.length / numSubsToRequest); 96 | const currentBracket = loopCount % numBrackets; 97 | return moddedSubs 98 | .slice(currentBracket * numSubsToRequest, (currentBracket + 1) * numSubsToRequest) 99 | .map((sub) => sub + '+') 100 | .join('') 101 | .slice(0, -1); // rarepuppers+pics+MEOW_IRL 102 | } 103 | 104 | export async function consumeQueue() { 105 | const queue = submissionQueue; 106 | submissionQueue = []; 107 | return queue; 108 | } 109 | 110 | async function consumeUnprocessedSubmissions(latestItems) { 111 | latestItems.sort((a, b) => { 112 | return a.created_utc - b.created_utc; 113 | }); // oldest first 114 | 115 | const maxCheck = 1500; 116 | if (latestItems.length > maxCheck) { 117 | log.info('[QUEUE] Passed more than maxCheck items:', latestItems.length); 118 | latestItems = latestItems.slice(latestItems.length - maxCheck, latestItems.length); 119 | } 120 | 121 | // don't process anything over 3 hours old for safeguard. created_utc is in seconds/getTime is in millis. 122 | const threeHoursAgo = new Date().getTime() - 1000 * 60 * 60 * 3; 123 | latestItems = latestItems.filter((item) => item.created_utc * 1000 > threeHoursAgo); 124 | 125 | const processedIds = await getMasterProperty('new_processed_ids'); 126 | if (!processedIds) { 127 | log.warn(chalk.magenta('[QUEUE] Could not find the last processed id list when retrieving unprocessed submissions. Regenerating...')); 128 | const intialProcessedIds = latestItems.map((submission) => submission.id); 129 | await setMasterProperty('new_processed_ids', intialProcessedIds); 130 | return []; 131 | } 132 | 133 | // update the processed list before processing so we don't retry any submissions that cause exceptions 134 | const newItems = latestItems.filter((item) => !processedIds.includes(item.id)); 135 | let updatedProcessedIds = processedIds.concat(newItems.map((submission) => submission.id)); // [3,2,1] + [new] = [3,2,1,new] 136 | const processedCacheSize = 10000; // larger size for any weird/future edge-cases where a mod removes a lot of submissions 137 | if (updatedProcessedIds.length > processedCacheSize) { 138 | updatedProcessedIds = updatedProcessedIds.slice(updatedProcessedIds.length - processedCacheSize); // [3,2,1,new] => [2,1,new] 139 | } 140 | await setMasterProperty('new_processed_ids', updatedProcessedIds); 141 | 142 | return newItems; 143 | } 144 | 145 | export function haltQueue() { 146 | log.info('[SHUTDOWN] Halting queue ingest'); 147 | haltProcessing = true; 148 | setTimeout(() => { 149 | haltProcessing = false; 150 | }, 120 * 1000); // recover if not shutdown 151 | } 152 | -------------------------------------------------------------------------------- /src/subreddit_processor.ts: -------------------------------------------------------------------------------- 1 | // standard server modules 2 | import express = require('express'); 3 | const app = express(); 4 | const chalk = require('chalk'); 5 | const fs = require('fs'); 6 | require('dotenv').config(); 7 | const log = require('loglevel'); 8 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 9 | 10 | // magic eye modules 11 | import { initDatabase, databaseConnectionListSize } from './database_manager'; 12 | import { processSubmission } from './submission_processor'; 13 | import { processUnmoderated } from './unmoderated_processor'; 14 | import { firstTimeInit, isAnythingInitialising } from './first_time_init'; 15 | import { SubredditSettings, getSubredditSettings, setSubredditSettings, getMasterProperty, setMasterProperty } from './master_database_manager'; 16 | import { createDefaultSettings, writeSettings } from './wiki_utils'; 17 | import { logProcessPost } from './master_stats'; 18 | import { reddit } from './reddit'; 19 | import { consumeQueue } from './submission_queue'; 20 | import { getModdedSubredditsMulti } from './modded_subreddits'; 21 | import moment = require('moment'); 22 | 23 | let threadMonitor = new Date(); 24 | let currentThread = 0; 25 | 26 | export async function mainProcessor(threadCount: number) { 27 | // guard against thread dying 28 | if (threadCount > currentThread) { 29 | currentThread = threadCount; 30 | } else if (threadCount < currentThread) { 31 | log.info('***** Finishing thread: ', threadCount); 32 | return; 33 | } 34 | 35 | const minimumTimeoutTimeSeconds = 15; 36 | 37 | let timeoutTimeSeconds = minimumTimeoutTimeSeconds; 38 | threadMonitor = new Date(); 39 | log.info(chalk.blue('Starting submission processing cycle for thread: ', threadCount)); 40 | try { 41 | const startCycleTime = new Date().getTime(); 42 | 43 | const moddedSubs = await getModdedSubredditsMulti(); 44 | if (!moddedSubs || moddedSubs.length == 0) { 45 | log.warn('No subreddits found. Sleeping.'); 46 | setTimeout(() => { 47 | mainProcessor(threadCount); 48 | }, 30 * 1000); // run again in timeoutTimeSeconds 49 | } 50 | 51 | const unprocessedSubmissions = await consumeQueue(); 52 | for (const subredditName of moddedSubs) { 53 | const unprocessedForSub = unprocessedSubmissions.filter((submission) => submission.subreddit.display_name == subredditName); 54 | try { 55 | await processSubreddit(subredditName, unprocessedForSub, reddit); 56 | threadMonitor = new Date(); 57 | } catch (e) { 58 | const possibleErrorIds = unprocessedForSub.map((item) => item.id); 59 | log.error('Error processing subreddit: ', subredditName, ',', e.message, ', possible error threads:', possibleErrorIds); 60 | } 61 | } 62 | 63 | // end cycle 64 | const endCycleTime = new Date().getTime(); 65 | const cycleTimeTaken = (endCycleTime - startCycleTime) / 1000; 66 | timeoutTimeSeconds = Math.max(minimumTimeoutTimeSeconds - cycleTimeTaken, 0); 67 | 68 | threadMonitor = new Date(); 69 | const used = process.memoryUsage().heapUsed / 1024 / 1024; 70 | if (unprocessedSubmissions.length > 0) { 71 | log.info( 72 | chalk.blue( 73 | `========= Processed ${ 74 | unprocessedSubmissions.length 75 | } new submissions, took ${cycleTimeTaken} seconds. databaseConnectionListSize: ${databaseConnectionListSize()}, memory usage is: ${ 76 | Math.round(used * 100) / 100 77 | } MB` 78 | ) 79 | ); 80 | } 81 | } catch (err) { 82 | log.error(chalk.red('Main loop error: ', err)); 83 | } 84 | 85 | threadMonitor = new Date(); 86 | setTimeout(() => { 87 | mainProcessor(threadCount); 88 | }, timeoutTimeSeconds * 1000); // run again in timeoutTimeSeconds 89 | } 90 | 91 | setInterval(() => { 92 | const restart = moment().isAfter(moment(threadMonitor).add('minutes', 5)); 93 | if (restart) { 94 | log.info('RESTARTING MAIN PROCESSOR'); 95 | mainProcessor(currentThread + 1); 96 | } 97 | }, 10000); 98 | 99 | async function processSubreddit(subredditName: string, unprocessedSubmissions, reddit) { 100 | if (subredditName.startsWith('u_')) { 101 | return; 102 | } 103 | let masterSettings = await getSubredditSettings(subredditName); 104 | if (!masterSettings) { 105 | masterSettings = await initialiseNewSubreddit(subredditName); 106 | } 107 | 108 | // safe check 109 | if (!masterSettings.settings || !masterSettings.config) { 110 | log.warn(`[${subredditName}]`, chalk.yellow('Missing settings for '), subredditName, ' - ignoring subreddit'); 111 | return; 112 | } 113 | 114 | // first time init 115 | if (!masterSettings.config.firstTimeInit) { 116 | if (!isAnythingInitialising()) { 117 | const database = await initDatabase(subredditName, masterSettings.config.databaseUrl, masterSettings.config.expiryDays); 118 | firstTimeInit(reddit, subredditName, database, masterSettings).then( 119 | () => { 120 | log.info(`[${subredditName}]`, chalk.green('Initialisation processing exited for ', subredditName)); 121 | }, 122 | (e) => { 123 | log.error(`[${subredditName}]`, chalk.red('First time init failed for:', subredditName, e)); 124 | } 125 | ); 126 | } 127 | return; 128 | } 129 | 130 | // submissions 131 | if (unprocessedSubmissions.length > 0) { 132 | const database = await initDatabase(subredditName, masterSettings.config.databaseUrl, masterSettings.config.expiryDays); 133 | if (database) { 134 | for (let submission of unprocessedSubmissions) { 135 | const startTime = new Date().getTime(); 136 | try { 137 | await processSubmission(submission, masterSettings, database, reddit, true); 138 | } catch (err) { 139 | log.error(`[${subredditName}]`, chalk.red(`Failed to process submission: ${submission.id}.`), ' error message: ', err.message); 140 | } 141 | const endTime = new Date().getTime(); 142 | const timeTaken = (endTime - startTime) / 1000; 143 | logProcessPost(subredditName, timeTaken); 144 | } 145 | await database.closeDatabase(); 146 | } else { 147 | log.error(`[${subredditName}]`, chalk.red(`Failed to init database, ignoring ${unprocessedSubmissions.length} posts for subreddit.`)); 148 | } 149 | } 150 | } 151 | 152 | export async function initialiseNewSubreddit(subredditName: string) { 153 | // find the database with least use 154 | log.info(`[${subredditName}]`, chalk.yellow('No master settings for'), subredditName, ' - searching for least used database'); 155 | const databaseList = await getMasterProperty('databases'); 156 | let selectedDatabase = null; 157 | let databaseCount = 99999; 158 | for (const databaseKey of Object.keys(databaseList)) { 159 | const database = databaseList[databaseKey]; 160 | if (database.count < databaseCount) { 161 | selectedDatabase = database; 162 | databaseCount = database.count; 163 | } 164 | } 165 | if (!selectedDatabase) { 166 | log.warn(`[${subredditName}]`, 'No databases available to house: ', subredditName); 167 | return; 168 | } 169 | const masterSettings = new SubredditSettings(subredditName); 170 | await createDefaultSettings(subredditName, masterSettings, reddit); 171 | 172 | masterSettings.config.databaseUrl = selectedDatabase.url; 173 | await setSubredditSettings(subredditName, masterSettings); 174 | selectedDatabase.count++; 175 | await setMasterProperty('databases', databaseList); 176 | return masterSettings; 177 | } 178 | -------------------------------------------------------------------------------- /src/unmoderated_processor.ts: -------------------------------------------------------------------------------- 1 | import { getModdedSubredditsMulti } from "./modded_subreddits"; 2 | import { getSubredditSettings } from "./master_database_manager"; 3 | import { reddit } from "./reddit"; 4 | import { printSubmission } from "./reddit_utils"; 5 | 6 | // standard modules 7 | require('dotenv').config(); 8 | const outdent = require('outdent'); 9 | const chalk = require('chalk'); 10 | const log = require('loglevel'); 11 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 12 | 13 | 14 | export async function mainUnmoderated() { 15 | let timeoutTimeSeconds = 600; 16 | try { 17 | log.debug(chalk.blue("Starting submission processing cycle")); 18 | 19 | const moddedSubreddits = await getModdedSubredditsMulti(); 20 | 21 | for (const subredditName of moddedSubreddits) { 22 | let masterSettings = await getSubredditSettings(subredditName); 23 | if (masterSettings) { 24 | if (masterSettings.settings.reportUnmoderated) { 25 | const subForUnmoderated = await reddit.getSubreddit(subredditName); 26 | const topSubmissionsDay = await subForUnmoderated.getTop({time: 'day'}).fetchAll({amount: 100}); 27 | await processUnmoderated(topSubmissionsDay, masterSettings.settings); 28 | } 29 | } 30 | } 31 | } catch (err) { 32 | log.error(chalk.red("Main loop error: ", err)); 33 | } 34 | 35 | setTimeout(mainUnmoderated, timeoutTimeSeconds * 1000); // run again in timeoutTimeSeconds 36 | } 37 | 38 | export async function processUnmoderated(submissions, settings) { 39 | for (const submission of submissions) { 40 | let alreadyReported = submission.mod_reports && submission.mod_reports.length > 0; 41 | if (!submission.approved && !alreadyReported && submission.score > settings.reportUnmoderated.reportUnmoderatedScore) { 42 | submission.report({'reason': 'Unmoderated post - check for rules'}); 43 | log.info("+ Reporing unmoderated post:", await printSubmission(submission)); 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/wiki_utils.ts: -------------------------------------------------------------------------------- 1 | // standard modules 2 | require('dotenv').config(); 3 | const outdent = require('outdent'); 4 | const chalk = require('chalk'); 5 | const log = require('loglevel'); 6 | const indentString = require('indent-string'); 7 | const Validator = require('jsonschema').Validator; 8 | log.setLevel(process.env.LOG_LEVEL ? process.env.LOG_LEVEL : 'info'); 9 | import { getSubredditSettings, setSubredditSettings } from './master_database_manager'; 10 | 11 | export async function createDefaultSettings(subredditName, masterSettings, reddit) { 12 | log.info(`[${subredditName}]`, 'Creating default settings for', subredditName, '...'); 13 | const wikiPage = await reddit.getSubreddit(subredditName).getWikiPage('magic_eye'); 14 | 15 | try { 16 | const settings = JSON.parse(await wikiPage.content_md); 17 | if (settings) { 18 | log.info(chalk.magenta(`[${subredditName}]`, 'Wiki settings already exist when trying to create defaults. Ignoring and using existing settings for '), subredditName); 19 | masterSettings.settings = settings; 20 | return; 21 | } 22 | } catch (e) { 23 | log.info(`[${subredditName}]`, 'Creating new settings mode.'); 24 | } 25 | 26 | const stringSettings = JSON.stringify(masterSettings.settings, null, 4); 27 | const indentedSettings = indentString(stringSettings, 4); 28 | try { 29 | await wikiPage.edit({ text: indentedSettings, reason: 'Create default Magic Eye settings.' }); 30 | await wikiPage.editSettings({ listed: false, permission_level: 2 }); // mod only, not listed 31 | log.info(`[${subredditName}]`, 'Finished creating default settings for', subredditName, '...'); 32 | } catch (e) { 33 | if (e.message && e.message.includes('WIKI_DISABLED')) { 34 | throw `[${subredditName}] Cannot create settings because WIKI_DISABLED`; 35 | } else { 36 | log.info('[DEMOD] Failed to create wiki page, Demodding from: ', subredditName); 37 | await reddit.getSubreddit(subredditName).leaveModerator(); 38 | 39 | await reddit.composeMessage({ 40 | to: await `/r/${subredditName}`, 41 | subject: `Initialisation failed.`, 42 | text: outdent` 43 | Hello. It looks like you have failed to add me with the correct permissions. I have demodded, so to fix this you will need to remod me with the correct permissions listed in the documentation: 44 | 45 | https://github.com/downfromthetrees/the_magic_eye/blob/master/README.md#setup 46 | `, 47 | }); 48 | throw e; 49 | } 50 | } 51 | } 52 | 53 | export async function writeSettings(subredditName, masterSettings, reddit) { 54 | log.info(`[${subredditName}]`, 'Upgrading settings for', subredditName, '...'); 55 | const wikiPage = await reddit.getSubreddit(subredditName).getWikiPage('magic_eye'); 56 | 57 | const stringSettings = JSON.stringify(masterSettings.settings, null, 4); 58 | const indentedSettings = indentString(stringSettings, 4); 59 | try { 60 | await wikiPage.edit({ text: indentedSettings, reason: 'Updating Magic Eye settings (new settings version)' }); 61 | log.info(`[${subredditName}]`, 'Settings upgrade complete for', subredditName); 62 | } catch (e) { 63 | if (e.message && e.message.includes('WIKI_DISABLED')) { 64 | throw `[${subredditName}] Cannot update settings because WIKI_DISABLED`; 65 | } else { 66 | throw e; 67 | } 68 | } 69 | } 70 | 71 | export async function doUpdateSettings(subredditName, change, reddit) { 72 | log.info('Updating settings for', subredditName); 73 | const subreddit = await reddit.getSubreddit(subredditName); 74 | const wikiPage = await subreddit.getWikiPage('magic_eye'); 75 | let settings; 76 | try { 77 | settings = JSON.parse(await wikiPage.content_md); 78 | } catch (e) { 79 | sendFailureReply(change.mod, reddit, subredditName); 80 | log.warn('Failed to update new settings for sub'); 81 | return; 82 | } 83 | 84 | const masterSettings = await getSubredditSettings(subredditName); 85 | masterSettings.settings = settings; 86 | await setSubredditSettings(subredditName, masterSettings); 87 | await sendSuccessReply(change.mod, reddit, subredditName); 88 | log.info('Update settings for successful for ', subredditName); 89 | return settings; 90 | } 91 | 92 | async function sendSuccessReply(username, reddit, subredditName: string) { 93 | await reddit.composeMessage({ 94 | to: await username, 95 | subject: 'Settings update successful', 96 | text: `Settings update successful for r/${subredditName}. Let's nuke some posts!`, 97 | }); 98 | } 99 | 100 | async function sendFailureReply(username, reddit, subredditName: string) { 101 | await reddit.composeMessage({ 102 | to: await username, 103 | subject: 'Settings update failed', 104 | text: `The changes you made to the settings for r/${subredditName} aren't formatted right so I haven't updated them. 105 | 106 | Use https://jsonlint.com/ to find the issue (typically a trailing comma, missing comma, or missing quotation marks). Either that or restore the last settings using the wiki page history.`, 107 | }); 108 | } 109 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "./build", 4 | "target": "es5", 5 | "moduleResolution": "node", 6 | "allowJs": true, 7 | // Enable strictest settings like strictNullChecks & noImplicitAny. 8 | //"strict": true, 9 | // Disallow features that require cross-file information for emit. 10 | // "isolatedModules": true, 11 | // Import non-ES modules as default imports. 12 | "esModuleInterop": true 13 | }, 14 | "lib": [ 15 | "es2015" 16 | ], 17 | "module": "commonjs", 18 | "rootDir": "./", 19 | "include": [ 20 | "./src/**/*" 21 | ], 22 | "exclude": [ 23 | "node_modules" 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | var webpack = require('webpack'); 3 | var HtmlWebpackPlugin = require('html-webpack-plugin'); 4 | var LiveReloadPlugin = require('webpack-livereload-plugin'); 5 | 6 | module.exports = { 7 | entry: path.resolve(__dirname, './src/client/index.jsx'), 8 | output: { 9 | path: '/', 10 | filename: 'bundle.js' 11 | }, 12 | module: { 13 | rules: [ 14 | { 15 | test: /\.jsx?$/, 16 | 17 | use: [ 18 | { 19 | loader: 'babel-loader', 20 | query: { 21 | presets: ['es2015'] 22 | } 23 | }, 24 | ], 25 | exclude: /node_modules/, 26 | } 27 | ] 28 | }, 29 | plugins: [ 30 | new HtmlWebpackPlugin( 31 | { 32 | template: path.resolve(__dirname, './src/client/index.html'), 33 | title: 'The Magic Eye' 34 | } 35 | ), 36 | new LiveReloadPlugin() 37 | ], 38 | stats: { 39 | chunks: false 40 | }, 41 | devtool: 'source-map' 42 | }; 43 | --------------------------------------------------------------------------------