├── .flake8 ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── prawtools ├── __init__.py ├── alert.py ├── helpers.py ├── mod.py └── stats.py ├── setup.py └── tests ├── __init__.py ├── cassettes ├── StatsTest.recent.json └── StatsTest.top.json ├── conftest.py └── test_stats.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203 E501 W503 W504 3 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | jobs: 2 | lint: 3 | runs-on: ubuntu-latest 4 | steps: 5 | - uses: actions/checkout@v1 6 | - uses: actions/setup-python@v1 7 | with: 8 | python-version: 3.x 9 | - uses: actions/cache@v1 10 | with: 11 | key: v0-${{ runner.os }}-pip-lint-${{ hashFiles('setup.py') }} 12 | path: ~/.cache/pip 13 | restore-keys: | 14 | v0-${{ runner.os }}-pip-lint- 15 | v0-${{ runner.os }}-pip- 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install .[lint] 20 | - name: Run black 21 | run: black --check --verbose . 22 | - name: Run flake8 23 | run: flake8 . --statistics 24 | - name: Run pydocstyle 25 | run: pydocstyle 26 | test: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v1 30 | - uses: actions/setup-python@v1 31 | with: 32 | python-version: ${{ matrix.python-version }} 33 | - uses: actions/cache@v1 34 | with: 35 | key: v0-${{ runner.os }}-pip-test-${{ hashFiles('setup.py') }} 36 | path: ~/.cache/pip 37 | restore-keys: | 38 | v0-${{ runner.os }}-pip-test- 39 | v0-${{ runner.os }}-pip- 40 | - name: Install dependencies 41 | run: | 42 | python -m pip install --upgrade pip 43 | pip install .[test] 44 | pip install https://github.com/bboe/coveralls-python/archive/github_actions.zip 45 | - name: Test with pytest 46 | run: coverage run --source prawtools --module pytest 47 | - env: 48 | COVERALLS_PARALLEL: true 49 | COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }} 50 | name: Submit to coveralls 51 | run: coveralls 52 | strategy: 53 | matrix: 54 | python-version: [3.5, 3.6, 3.7, 3.8] 55 | complete_coverals: 56 | needs: test 57 | runs-on: ubuntu-latest 58 | steps: 59 | - name: Coveralls Finished 60 | uses: coverallsapp/github-action@master 61 | with: 62 | github-token: ${{ secrets.GITHUB_TOKEN }} 63 | parallel-finished: true 64 | 65 | name: CI 66 | on: [pull_request, push] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg 2 | *.egg-info/ 3 | *.eggs/ 4 | *.pyc 5 | *~ 6 | .coverage 7 | _build/ 8 | build/ 9 | dist/ 10 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012, Bryce Boe 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 17 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 18 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 19 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 20 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 21 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 22 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md *.txt 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BBOE's PRAWtools 2 | 3 | PRAWtools is a collection of tools that utilize reddit's API through 4 | [PRAW](https://praw.readthedocs.io/). PRAWtools is currently made up of three 5 | utillities: 6 | 7 | * modutils 8 | * reddit_alert 9 | * subreddit_stats 10 | 11 | ## PRAWtools Installation 12 | 13 | ### Ubuntu/debian installation 14 | 15 | sudo apt-get install python-setuptools 16 | sudo easy_install pip 17 | sudo pip install prawtools 18 | 19 | ### Arch Linux installation 20 | sudo pacman -S python-pip 21 | sudo easy_install pip 22 | sudo pip install prawtools 23 | 24 | ### Mac OS X installation 25 | 26 | sudo easy_install pip 27 | sudo pip install prawtools 28 | 29 | 30 | ## modutils 31 | 32 | modutils is a tool to assist reddit community moderators in moderating 33 | their community. At present, it is mostly useful for automatically building 34 | flair templates from existing user flair, however, it can also be used to 35 | quickly list banned users, contributors, and moderators. 36 | 37 | ### modutils examples 38 | 39 | Note: all examples require you to be a moderator for the subreddit 40 | 41 | 0. List banned users for subreddit __foo__ 42 | 43 | modutils -l banned foo 44 | 45 | 0. Get current flair for subreddit __bar__ 46 | 47 | modutils -f bar 48 | 49 | 0. Synchronize flair templates with existing flair for subreddit __baz__, 50 | building non-editable templates for any flair whose flair-text is common among 51 | at least 2 users. 52 | 53 | modutils --sync --ignore-css --limit=2 baz 54 | 55 | 0. Send a message to approved submitters of subreddit __blah__. You will be 56 | prompted for the message, and asked to verify prior to sending the messages. 57 | 58 | modutils --message contributors --subject "The message subject" blah 59 | 60 | 61 | ## reddit_alert 62 | 63 | reddit_alert will notify you when certain keywords are used in comments. For 64 | instance, to be notified whenever your username is mentioned you might run it 65 | as: 66 | 67 | reddit_alert bboe 68 | 69 | You can receive multiple alerts by specifying multiple keywords separated by 70 | spaces. If you want to be alerted for keyphrases (those containing spaces) you 71 | must put quotes around the term: 72 | 73 | reddit_alert bboe praw "reddit api" 74 | 75 | By default reddit_alert will only provide links to the same terminal screen (or 76 | command prompt) it's running in. To be notified via a reddit message specify 77 | the `-m USER` option: 78 | 79 | reddit_alert -m bboe bboe praw "reddit_api" 80 | 81 | When using the `-m USER` you will be prompted to login. 82 | 83 | By default comments from __all__ subreddits are considered. If you want to 84 | restrict the notifications to only a few subreddits use one or more `-s 85 | SUBREDDIT` options: 86 | 87 | reddit_alert -m bboe -s redditdev -s learnpython bboe praw "reddit_api" 88 | 89 | Finally, you may want to ignore notifications from certain users. You can use 90 | the `-I USER` option to ignore comments from a certain user: 91 | 92 | reddit_alert -m bboe -I bizarrobboe bboe 93 | 94 | To see a complete set of available options run: 95 | 96 | reddit_alert --help 97 | 98 | 99 | ## subreddit_stats 100 | 101 | subreddit_stats is a tool to provide basic statistics on a subreddit. 102 | To see the what sort of output subreddit stats generates check out 103 | [/r/subreddit_stats](http://www.reddit.com/r/subreddit_stats). 104 | 105 | The tool will only analyze up to 1,000 submissions. 106 | 107 | ### Preparation 108 | 109 | In order to run subreddit_stats you will need to create a `praw.ini` file in 110 | the same directory that you run the scripts in. This file should look like: 111 | 112 | ``` 113 | [DEFAULT] 114 | client_id: XXX 115 | client_secret: XXX 116 | password: XXX 117 | username: XXX 118 | ``` 119 | 120 | ### subreddit_stats examples 121 | 122 | 0. Generate stats for subreddit __foo__ for the last 30 days. 123 | 124 | subreddit_stats foo 30 125 | 126 | Passing a number, `N`, as the second argument will get all submissions from the 127 | last N days, but will ignore submissions from the most recent 24 hours. As a 128 | result, you may experience messages like "No submissions found". To analyze the 129 | top submissions made within the last month run: 130 | 131 | subreddit_stats foo month 132 | 133 | Similarly, to analyze the top submissions to __foo__ over the last year, run: 134 | 135 | subreddit_stats foo year 136 | 137 | 0. To see other possible options 138 | 139 | subreddit_stats --help 140 | -------------------------------------------------------------------------------- /prawtools/__init__.py: -------------------------------------------------------------------------------- 1 | """A collection of tools that interact with reddit's API. 2 | 3 | These tools provide moderator utilities, subreddit statistics, and keyword 4 | alerts. 5 | 6 | """ 7 | 8 | __version__ = "1.0.1" 9 | -------------------------------------------------------------------------------- /prawtools/alert.py: -------------------------------------------------------------------------------- 1 | """prawtools.alert provides the reddit_alert command. 2 | 3 | This command will alert you when chosen keywords appear in reddit comments. 4 | 5 | """ 6 | from __future__ import print_function 7 | 8 | import re 9 | import sys 10 | 11 | import praw 12 | 13 | from .helpers import AGENT, arg_parser, check_for_updates 14 | 15 | 16 | def quick_url(comment): 17 | """Return the URL for the comment without fetching its submission.""" 18 | 19 | def to_id(fullname): 20 | return fullname.split("_", 1)[1] 21 | 22 | return "http://www.reddit.com/r/{}/comments/{}/_/{}?context=3".format( 23 | comment.subreddit.display_name, to_id(comment.link_id), comment.id 24 | ) 25 | 26 | 27 | def main(): 28 | """Provide the entry point into the reddit_alert program.""" 29 | usage = "Usage: %prog [options] KEYWORD..." 30 | parser = arg_parser(usage=usage) 31 | parser.add_option( 32 | "-s", 33 | "--subreddit", 34 | action="append", 35 | help=( 36 | "When at least one `-s` option is provided " 37 | "(multiple can be) only alert for comments in the " 38 | "indicated subreddit(s)." 39 | ), 40 | ) 41 | parser.add_option( 42 | "-I", 43 | "--ignore-user", 44 | action="append", 45 | metavar="USER", 46 | help=( 47 | "Ignore comments from the provided user. Can be " "supplied multiple times." 48 | ), 49 | ) 50 | parser.add_option( 51 | "-m", 52 | "--message", 53 | metavar="USER", 54 | help=("When set, send a reddit message to USER with the " "alert."), 55 | ) 56 | options, args = parser.parse_args() 57 | if not args: 58 | parser.error("At least one KEYWORD must be provided.") 59 | 60 | session = praw.Reddit(options.site, check_for_updates=False, user_agent=AGENT) 61 | 62 | if options.message: 63 | msg_to = session.redditor(options.message) 64 | 65 | check_for_updates(options) 66 | 67 | # Build regex 68 | args = [x.lower() for x in args] 69 | reg_prefix = r"(?:^|[^a-z])" # Any character (or start) can precede 70 | reg_suffix = r"(?:$|[^a-z])" # Any character (or end) can follow 71 | regex = re.compile( 72 | r"{}({}){}".format(reg_prefix, "|".join(args), reg_suffix), re.IGNORECASE 73 | ) 74 | 75 | # Determine subreddit or multireddit 76 | if options.subreddit: 77 | subreddit = "+".join(sorted(options.subreddit)) 78 | else: 79 | subreddit = "all" 80 | 81 | print("Alerting on:") 82 | for item in sorted(args): 83 | print(" * {}".format(item)) 84 | print( 85 | "using the comment stream: https://www.reddit.com/r/{}/comments".format( 86 | subreddit 87 | ) 88 | ) 89 | 90 | # Build ignore set 91 | if options.ignore_user: 92 | ignore_users = set(x.lower() for x in options.ignore_user) 93 | else: 94 | ignore_users = set() 95 | 96 | try: 97 | for comment in session.subreddit(subreddit).stream.comments(): 98 | if comment.author and comment.author.name.lower() in ignore_users: 99 | continue 100 | match = regex.search(comment.body) 101 | if match: 102 | keyword = match.group(1).lower() 103 | url = quick_url(comment) 104 | print("{}: {}".format(keyword, url)) 105 | if options.message: 106 | msg_to.message( 107 | "Reddit Alert: {}".format(keyword), 108 | "{}\n\nby /u/{}\n\n---\n\n{}".format( 109 | url, comment.author, comment.body 110 | ), 111 | ) 112 | except KeyboardInterrupt: 113 | sys.stderr.write("\n") 114 | print("Goodbye!\n") 115 | -------------------------------------------------------------------------------- /prawtools/helpers.py: -------------------------------------------------------------------------------- 1 | """prawtools.helpers provides functions useful in other prawtools modules.""" 2 | from optparse import OptionGroup, OptionParser 3 | 4 | from update_checker import update_check 5 | 6 | from . import __version__ 7 | 8 | 9 | AGENT = "prawtools/{}".format(__version__) 10 | 11 | 12 | def arg_parser(*args, **kwargs): 13 | """Return a parser with common options used in the prawtools commands.""" 14 | msg = { 15 | "site": "The site to connect to defined in your praw.ini file.", 16 | "update": "Prevent the checking for prawtools package updates.", 17 | } 18 | 19 | kwargs["version"] = "BBoe's PRAWtools {}".format(__version__) 20 | parser = OptionParser(*args, **kwargs) 21 | parser.add_option( 22 | "-v", 23 | "--verbose", 24 | action="count", 25 | default=0, 26 | help="Increase the verbosity by 1 each time", 27 | ) 28 | parser.add_option( 29 | "-U", "--disable-update-check", action="store_true", help=msg["update"] 30 | ) 31 | 32 | group = OptionGroup(parser, "Site/Authentication options") 33 | group.add_option("-S", "--site", help=msg["site"]) 34 | parser.add_option_group(group) 35 | 36 | return parser 37 | 38 | 39 | def check_for_updates(options): 40 | """Check for package updates.""" 41 | if not options.disable_update_check: # Check for updates 42 | update_check("prawtools", __version__) 43 | -------------------------------------------------------------------------------- /prawtools/mod.py: -------------------------------------------------------------------------------- 1 | """prawtools.mod provides the modutils command. 2 | 3 | This command allows you to view and change some subreddit options. 4 | 5 | """ 6 | from __future__ import print_function 7 | import json 8 | import re 9 | import sys 10 | from collections import Counter 11 | from optparse import OptionGroup 12 | 13 | from praw import Reddit 14 | from six.moves import input 15 | 16 | from .helpers import AGENT, arg_parser, check_for_updates 17 | 18 | 19 | class ModUtils(object): 20 | """Class that provides all the modutils functionality.""" 21 | 22 | def __init__(self, subreddit, site=None, verbose=None): 23 | """Initialize the ModUtils class by passing in config options.""" 24 | self.reddit = Reddit(site, check_for_updates=False, user_agent=AGENT) 25 | self.sub = self.reddit.subreddit(subreddit) 26 | self.verbose = verbose 27 | self._current_flair = None 28 | 29 | def add_users(self, category): 30 | """Add users to 'banned', 'contributor', or 'moderator'.""" 31 | mapping = { 32 | "banned": "ban", 33 | "contributor": "make_contributor", 34 | "moderator": "make_moderator", 35 | } 36 | 37 | if category not in mapping: 38 | print("{!r} is not a valid option for --add".format(category)) 39 | return 40 | func = getattr(self.sub, mapping[category]) 41 | print("Enter user names (any separation should suffice):") 42 | data = sys.stdin.read().strip() 43 | for name in re.split("[^A-Za-z0-9_]+", data): 44 | func(name) 45 | print("Added {!r} to {}".format(name, category)) 46 | 47 | def clear_empty(self): 48 | """Remove flair that is not visible or has been set to empty.""" 49 | for flair in self.current_flair(): 50 | if not flair["flair_text"] and not flair["flair_css_class"]: 51 | print(self.reddit.flair.update(flair["user"])) 52 | print("Removed flair for {0}".format(flair["user"])) 53 | 54 | def current_flair(self): 55 | """Generate the flair, by user, for the subreddit.""" 56 | if self._current_flair is None: 57 | self._current_flair = [] 58 | if self.verbose: 59 | print("Fetching flair list for {}".format(self.sub)) 60 | for flair in self.sub.flair: 61 | self._current_flair.append(flair) 62 | yield flair 63 | else: 64 | for item in self._current_flair: 65 | yield item 66 | 67 | def flair_template_sync( 68 | self, editable, limit, static, sort, use_css, use_text # pylint: disable=R0912 69 | ): 70 | """Synchronize templates with flair that already exists on the site. 71 | 72 | :param editable: Indicates that all the options should be editable. 73 | :param limit: The minimum number of users that must share the flair 74 | before it is added as a template. 75 | :param static: A list of flair templates that will always be added. 76 | :param sort: The order to sort the flair templates. 77 | :param use_css: Include css in the templates. 78 | :param use_text: Include text in the templates. 79 | 80 | """ 81 | # Parameter verification 82 | if not use_text and not use_css: 83 | raise Exception("At least one of use_text or use_css must be True") 84 | sorts = ("alpha", "size") 85 | if sort not in sorts: 86 | raise Exception("Sort must be one of: {}".format(", ".join(sorts))) 87 | 88 | # Build current flair list along with static values 89 | counter = {} 90 | if static: 91 | for key in static: 92 | if use_css and use_text: 93 | parts = tuple(x.strip() for x in key.split(",")) 94 | if len(parts) != 2: 95 | raise Exception( 96 | "--static argument {!r} must have two " 97 | "parts (comma separated) when using " 98 | "both text and css.".format(parts) 99 | ) 100 | key = parts 101 | counter[key] = limit 102 | if self.verbose: 103 | sys.stdout.write("Retrieving current flair\n") 104 | sys.stdout.flush() 105 | for flair in self.current_flair(): 106 | if self.verbose: 107 | sys.stdout.write(".") 108 | sys.stdout.flush() 109 | if use_text and use_css: 110 | key = (flair["flair_text"], flair["flair_css_class"]) 111 | elif use_text: 112 | key = flair["flair_text"] 113 | else: 114 | key = flair["flair_css_class"] 115 | if key in counter: 116 | counter[key] += 1 117 | else: 118 | counter[key] = 1 119 | if self.verbose: 120 | print() 121 | 122 | # Sort flair list items according to the specified sort 123 | if sort == "alpha": 124 | items = sorted(counter.items()) 125 | else: 126 | items = sorted(counter.items(), key=lambda x: x[1], reverse=True) 127 | 128 | # Clear current templates and store flair according to the sort 129 | if self.verbose: 130 | print("Clearing current flair templates") 131 | self.sub.flair.templates.clear() 132 | for key, count in items: 133 | if not key or count < limit: 134 | print("a") 135 | continue 136 | if use_text and use_css: 137 | text, css = key 138 | elif use_text: 139 | text, css = key, "" 140 | else: 141 | text, css = "", key 142 | if self.verbose: 143 | print("Adding template: text: {!r} css: {!r}".format(text, css)) 144 | self.sub.flair.templates.add(text, css, editable) 145 | 146 | def message(self, category, subject, msg_file): 147 | """Send message to all users in `category`.""" 148 | users = getattr(self.sub, category) 149 | if not users: 150 | print("There are no {} users on {}.".format(category, self.sub)) 151 | return 152 | 153 | if msg_file: 154 | try: 155 | msg = open(msg_file).read() 156 | except IOError as error: 157 | print(str(error)) 158 | return 159 | else: 160 | print("Enter message:") 161 | msg = sys.stdin.read() 162 | 163 | print( 164 | "You are about to send the following message to the users {}:".format( 165 | ", ".join([str(x) for x in users]) 166 | ) 167 | ) 168 | print("---BEGIN MESSAGE---\n{}\n---END MESSAGE---".format(msg)) 169 | if input("Are you sure? yes/[no]: ").lower() not in ["y", "yes"]: 170 | print("Message sending aborted.") 171 | return 172 | for user in users: 173 | user.send_message(subject, msg) 174 | print("Sent to: {}".format(user)) 175 | 176 | def output_current_flair(self, as_json=False): 177 | """Display the current flair for all users in the subreddit.""" 178 | flair_list = sorted(self.current_flair(), key=lambda x: x["user"].name) 179 | if as_json: 180 | print(json.dumps(flair_list, sort_keys=True, indent=4)) 181 | return 182 | 183 | for flair in flair_list: 184 | print(flair["user"]) 185 | print( 186 | " Text: {}\n CSS: {}".format( 187 | flair["flair_text"], flair["flair_css_class"] 188 | ) 189 | ) 190 | 191 | def output_flair_stats(self): 192 | """Display statistics (number of users) for each unique flair item.""" 193 | css_counter = Counter() 194 | text_counter = Counter() 195 | for flair in self.current_flair(): 196 | if flair["flair_css_class"]: 197 | css_counter[flair["flair_css_class"]] += 1 198 | if flair["flair_text"]: 199 | text_counter[flair["flair_text"]] += 1 200 | 201 | print("Flair CSS Statistics") 202 | for flair, count in sorted(css_counter.items(), key=lambda x: (x[1], x[0])): 203 | print("{0:3} {1}".format(count, flair)) 204 | 205 | print("Flair Text Statistics") 206 | for flair, count in sorted( 207 | text_counter.items(), key=lambda x: (x[1], x[0]), reverse=True 208 | ): 209 | print("{0:3} {1}".format(count, flair)) 210 | 211 | def output_list(self, category): 212 | """Display the list of users in `category`.""" 213 | print("{} users:".format(category)) 214 | for user in getattr(self.sub, category): 215 | print(" {}".format(user)) 216 | 217 | 218 | def main(): 219 | """Provide the entry point in the the modutils command.""" 220 | mod_choices = ("banned", "contributor", "moderator") 221 | mod_choices_dsp = ", ".join(["`{}`".format(x) for x in mod_choices]) 222 | msg = { 223 | "add": ( 224 | "Add users to one of the following categories: {}".format(mod_choices_dsp) 225 | ), 226 | "clear": "Remove users who have no flair set.", 227 | "css": "Ignore the CSS field when synchronizing flair.", 228 | "edit": "When adding flair templates, mark them as editable.", 229 | "file": "The file containing contents for --message", 230 | "flair": "List flair for the subreddit.", 231 | "flair_stats": "Display the number of users with each flair.", 232 | "json": "Output the results as json. Applies to --flair", 233 | "limit": ( 234 | "The minimum number of users that must have the specified " 235 | "flair in order to add as a template. default: %default" 236 | ), 237 | "list": ( 238 | "List the users in one of the following categories: " 239 | "{}. May be specified more than once.".format(mod_choices_dsp) 240 | ), 241 | "msg": ( 242 | "Send message to users of one of the following categories: " 243 | "{}. Message subject provided via --subject, content provided " 244 | "via --file or STDIN." 245 | ).format(mod_choices_dsp), 246 | "sort": ( 247 | "The order to add flair templates. Available options are " 248 | "`alpha` to add alphabetically, and `size` to first add " 249 | "flair that is shared by the most number of users. " 250 | "default: %default" 251 | ), 252 | "static": ( 253 | "Add this template when syncing flair templates. When " 254 | "syncing text and css use a comma to separate the two." 255 | ), 256 | "subject": "The subject of the message to send for --message.", 257 | "sync": "Synchronize flair templates with current user flair.", 258 | "text": "Ignore the text field when synchronizing flair.", 259 | } 260 | 261 | usage = "Usage: %prog [options] SUBREDDIT" 262 | parser = arg_parser(usage=usage) 263 | parser.add_option("-a", "--add", help=msg["add"]) 264 | parser.add_option( 265 | "-l", 266 | "--list", 267 | action="append", 268 | help=msg["list"], 269 | choices=mod_choices, 270 | metavar="CATEGORY", 271 | default=[], 272 | ) 273 | parser.add_option("-c", "--clear-empty", action="store_true", help=msg["clear"]) 274 | parser.add_option("-F", "--file", help=msg["file"]) 275 | parser.add_option("-f", "--flair", action="store_true", help=msg["flair"]) 276 | parser.add_option("", "--flair-stats", action="store_true", help=msg["flair_stats"]) 277 | parser.add_option("-m", "--message", choices=mod_choices, help=msg["msg"]) 278 | parser.add_option("", "--subject", help=msg["subject"]) 279 | 280 | group = OptionGroup(parser, "Format options") 281 | group.add_option("-j", "--json", action="store_true", help=msg["json"]) 282 | parser.add_option_group(group) 283 | 284 | group = OptionGroup(parser, "Sync options") 285 | group.add_option("", "--sync", action="store_true", help=msg["sync"]) 286 | group.add_option("-s", "--static", action="append", help=msg["static"]) 287 | group.add_option("", "--editable", action="store_true", help=msg["edit"]) 288 | group.add_option( 289 | "", "--ignore-css", action="store_true", default=False, help=msg["css"] 290 | ) 291 | group.add_option( 292 | "", "--ignore-text", action="store_true", default=False, help=msg["text"] 293 | ) 294 | group.add_option("", "--limit", type="int", help=msg["limit"], default=2) 295 | group.add_option( 296 | "", 297 | "--sort", 298 | action="store", 299 | choices=("alpha", "size"), 300 | default="alpha", 301 | help=msg["sort"], 302 | ) 303 | parser.add_option_group(group) 304 | 305 | options, args = parser.parse_args() 306 | if len(args) == 0: 307 | parser.error("Must provide subreddit name.") 308 | if options.message and not options.subject: 309 | parser.error("Must provide --subject when providing --message.") 310 | subreddit = args[0] 311 | 312 | check_for_updates(options) 313 | 314 | modutils = ModUtils(subreddit, options.site, options.verbose) 315 | 316 | if options.add: 317 | modutils.add_users(options.add) 318 | if options.clear_empty: 319 | modutils.clear_empty() 320 | for category in options.list: 321 | modutils.output_list(category) 322 | if options.flair: 323 | modutils.output_current_flair(as_json=options.json) 324 | if options.flair_stats: 325 | modutils.output_flair_stats() 326 | if options.sync: 327 | modutils.flair_template_sync( 328 | editable=options.editable, 329 | limit=options.limit, 330 | static=options.static, 331 | sort=options.sort, 332 | use_css=not options.ignore_css, 333 | use_text=not options.ignore_text, 334 | ) 335 | if options.message: 336 | modutils.message(options.message, options.subject, options.file) 337 | -------------------------------------------------------------------------------- /prawtools/stats.py: -------------------------------------------------------------------------------- 1 | """Utility to provide submission and comment statistics in a subreddit.""" 2 | from __future__ import print_function 3 | from collections import defaultdict 4 | from datetime import datetime 5 | from tempfile import mkstemp 6 | import codecs 7 | import gc 8 | import logging 9 | import os 10 | import re 11 | import time 12 | 13 | 14 | from praw import Reddit 15 | from prawcore.exceptions import RequestException 16 | from six import iteritems, text_type as tt 17 | 18 | from .helpers import AGENT, arg_parser, check_for_updates 19 | 20 | SECONDS_IN_A_DAY = 60 * 60 * 24 21 | RE_WHITESPACE = re.compile(r"\s+") 22 | TOP_VALUES = {"all", "day", "month", "week", "year"} 23 | 24 | logger = logging.getLogger(__package__) 25 | 26 | 27 | class MiniComment(object): 28 | """Provides a memory optimized version of a Comment.""" 29 | 30 | __slots__ = ("author", "created_utc", "id", "score", "submission") 31 | 32 | def __init__(self, comment, submission): 33 | """Initialize an instance of MiniComment.""" 34 | for attribute in self.__slots__: 35 | if attribute in {"author", "submission"}: 36 | continue 37 | setattr(self, attribute, getattr(comment, attribute)) 38 | self.author = str(comment.author) if comment.author else None 39 | self.submission = submission 40 | 41 | 42 | class MiniSubmission(object): 43 | """Provides a memory optimized version of a Submission.""" 44 | 45 | __slots__ = ( 46 | "author", 47 | "created_utc", 48 | "distinguished", 49 | "id", 50 | "num_comments", 51 | "permalink", 52 | "score", 53 | "title", 54 | "url", 55 | ) 56 | 57 | def __init__(self, submission): 58 | """Initialize an instance of MiniSubmission.""" 59 | for attribute in self.__slots__: 60 | if attribute == "author": 61 | continue 62 | setattr(self, attribute, getattr(submission, attribute)) 63 | self.author = str(submission.author) if submission.author else None 64 | 65 | 66 | class SubredditStats(object): 67 | """Contain all the functionality of the subreddit_stats command.""" 68 | 69 | post_footer = tt( 70 | ">Generated with [BBoe](/u/bboe)'s [Subreddit Stats]" 71 | "(https://github.com/praw-dev/prawtools)" 72 | ) 73 | post_header = tt("---\n###{}\n") 74 | post_prefix = tt("Subreddit Stats:") 75 | 76 | @staticmethod 77 | def _permalink(item): 78 | if isinstance(item, MiniSubmission): 79 | return tt("/comments/{}").format(item.id) 80 | else: 81 | return tt("/comments/{}/_/{}?context=1").format(item.submission.id, item.id) 82 | 83 | @staticmethod 84 | def _points(points): 85 | return "1 point" if points == 1 else "{} points".format(points) 86 | 87 | @staticmethod 88 | def _rate(items, duration): 89 | return 86400.0 * items / duration if duration else items 90 | 91 | @staticmethod 92 | def _safe_title(submission): 93 | """Return titles with whitespace replaced by spaces and stripped.""" 94 | return RE_WHITESPACE.sub(" ", submission.title).strip() 95 | 96 | @staticmethod 97 | def _save_report(title, body): 98 | descriptor, filename = mkstemp(".md", dir=".") 99 | os.close(descriptor) 100 | with codecs.open(filename, "w", "utf-8") as fp: 101 | fp.write("{}\n\n{}".format(title, body)) 102 | logger.info("Report saved to {}".format(filename)) 103 | 104 | @staticmethod 105 | def _user(user): 106 | return "_deleted_" if user is None else tt("/u/{}").format(user) 107 | 108 | def __init__(self, subreddit, site, distinguished, output_subreddit, reddit=None): 109 | """Initialize the SubredditStats instance with config options.""" 110 | self.commenters = defaultdict(list) 111 | self.comments = [] 112 | self.distinguished = distinguished 113 | self.min_date = 0 114 | self.max_date = time.time() - SECONDS_IN_A_DAY 115 | self.reddit = reddit or Reddit(site, check_for_updates=False, user_agent=AGENT) 116 | self.submissions = {} 117 | self.submitters = defaultdict(list) 118 | self.submit_subreddit = self.reddit.subreddit(output_subreddit) 119 | self.subreddit = self.reddit.subreddit(subreddit) 120 | 121 | def basic_stats(self): 122 | """Return a markdown representation of simple statistics.""" 123 | comment_score = sum(comment.score for comment in self.comments) 124 | if self.comments: 125 | comment_duration = ( 126 | self.comments[-1].created_utc - self.comments[0].created_utc 127 | ) 128 | comment_rate = self._rate(len(self.comments), comment_duration) 129 | else: 130 | comment_rate = 0 131 | 132 | submission_duration = self.max_date - self.min_date 133 | submission_rate = self._rate(len(self.submissions), submission_duration) 134 | submission_score = sum(sub.score for sub in self.submissions.values()) 135 | 136 | values = [ 137 | ("Total", len(self.submissions), len(self.comments)), 138 | ( 139 | "Rate (per day)", 140 | "{:.2f}".format(submission_rate), 141 | "{:.2f}".format(comment_rate), 142 | ), 143 | ("Unique Redditors", len(self.submitters), len(self.commenters)), 144 | ("Combined Score", submission_score, comment_score), 145 | ] 146 | 147 | retval = "Period: {:.2f} days\n\n".format(submission_duration / 86400.0) 148 | retval += "||Submissions|Comments|\n:-:|--:|--:\n" 149 | for quad in values: 150 | retval += "__{}__|{}|{}\n".format(*quad) 151 | return retval + "\n" 152 | 153 | def fetch_recent_submissions(self, max_duration): 154 | """Fetch recent submissions in subreddit with boundaries. 155 | 156 | Does not include posts within the last day as their scores may not be 157 | representative. 158 | 159 | :param max_duration: When set, specifies the number of days to include 160 | 161 | """ 162 | if max_duration: 163 | self.min_date = self.max_date - SECONDS_IN_A_DAY * max_duration 164 | for submission in self.subreddit.new(limit=None): 165 | if submission.created_utc <= self.min_date: 166 | break 167 | if submission.created_utc > self.max_date: 168 | continue 169 | self.submissions[submission.id] = MiniSubmission(submission) 170 | 171 | def fetch_submissions(self, submissions_callback, *args): 172 | """Wrap the submissions_callback function.""" 173 | logger.debug("Fetching submissions") 174 | 175 | submissions_callback(*args) 176 | 177 | logger.info("Found {} submissions".format(len(self.submissions))) 178 | if not self.submissions: 179 | return 180 | 181 | self.min_date = min(x.created_utc for x in self.submissions.values()) 182 | self.max_date = max(x.created_utc for x in self.submissions.values()) 183 | 184 | self.process_submitters() 185 | self.process_commenters() 186 | 187 | def fetch_top_submissions(self, top): 188 | """Fetch top submissions by some top value. 189 | 190 | :param top: One of week, month, year, all 191 | :returns: True if any submissions were found. 192 | 193 | """ 194 | for submission in self.subreddit.top(limit=None, time_filter=top): 195 | self.submissions[submission.id] = MiniSubmission(submission) 196 | 197 | def process_commenters(self): 198 | """Group comments by author.""" 199 | for index, submission in enumerate(self.submissions.values()): 200 | if submission.num_comments == 0: 201 | continue 202 | real_submission = self.reddit.submission(id=submission.id) 203 | real_submission.comment_sort = "top" 204 | 205 | for i in range(3): 206 | try: 207 | real_submission.comments.replace_more(limit=0) 208 | break 209 | except RequestException: 210 | if i >= 2: 211 | raise 212 | logger.debug( 213 | "Failed to fetch submission {}, retrying".format(submission.id) 214 | ) 215 | 216 | self.comments.extend( 217 | MiniComment(comment, submission) 218 | for comment in real_submission.comments.list() 219 | if self.distinguished or comment.distinguished is None 220 | ) 221 | 222 | if index % 50 == 49: 223 | logger.debug( 224 | "Completed: {:4d}/{} submissions".format( 225 | index + 1, len(self.submissions) 226 | ) 227 | ) 228 | 229 | # Clean up to reduce memory usage 230 | submission = None 231 | gc.collect() 232 | 233 | self.comments.sort(key=lambda x: x.created_utc) 234 | for comment in self.comments: 235 | if comment.author: 236 | self.commenters[comment.author].append(comment) 237 | 238 | def process_submitters(self): 239 | """Group submissions by author.""" 240 | for submission in self.submissions.values(): 241 | if submission.author and ( 242 | self.distinguished or submission.distinguished is None 243 | ): 244 | self.submitters[submission.author].append(submission) 245 | 246 | def publish_results(self, view, submitters, commenters): 247 | """Submit the results to the subreddit. Has no return value (None).""" 248 | 249 | def timef(timestamp, date_only=False): 250 | """Return a suitable string representaation of the timestamp.""" 251 | dtime = datetime.fromtimestamp(timestamp) 252 | if date_only: 253 | retval = dtime.strftime("%Y-%m-%d") 254 | else: 255 | retval = dtime.strftime("%Y-%m-%d %H:%M PDT") 256 | return retval 257 | 258 | basic = self.basic_stats() 259 | top_commenters = self.top_commenters(commenters) 260 | top_comments = self.top_comments() 261 | top_submissions = self.top_submissions() 262 | 263 | # Decrease number of top submitters if body is too large. 264 | body = None 265 | while body is None or len(body) > 40000 and submitters > 0: 266 | body = ( 267 | basic 268 | + self.top_submitters(submitters) 269 | + top_commenters 270 | + top_submissions 271 | + top_comments 272 | + self.post_footer 273 | ) 274 | submitters -= 1 275 | 276 | title = "{} {} {}posts from {} to {}".format( 277 | self.post_prefix, 278 | str(self.subreddit), 279 | "top " if view in TOP_VALUES else "", 280 | timef(self.min_date, True), 281 | timef(self.max_date), 282 | ) 283 | 284 | try: # Attempt to make the submission 285 | return self.submit_subreddit.submit(title, selftext=body) 286 | except Exception: 287 | logger.exception("Failed to submit to {}".format(self.submit_subreddit)) 288 | self._save_report(title, body) 289 | 290 | def run(self, view, submitters, commenters): 291 | """Run stats and return the created Submission.""" 292 | logger.info("Analyzing subreddit: {}".format(self.subreddit)) 293 | 294 | if view in TOP_VALUES: 295 | callback = self.fetch_top_submissions 296 | else: 297 | callback = self.fetch_recent_submissions 298 | view = int(view) 299 | self.fetch_submissions(callback, view) 300 | 301 | if not self.submissions: 302 | logger.warning("No submissions were found.") 303 | return 304 | 305 | return self.publish_results(view, submitters, commenters) 306 | 307 | def top_commenters(self, num): 308 | """Return a markdown representation of the top commenters.""" 309 | num = min(num, len(self.commenters)) 310 | if num <= 0: 311 | return "" 312 | 313 | top_commenters = sorted( 314 | iteritems(self.commenters), 315 | key=lambda x: (-sum(y.score for y in x[1]), -len(x[1]), str(x[0])), 316 | )[:num] 317 | 318 | retval = self.post_header.format("Top Commenters") 319 | for author, comments in top_commenters: 320 | retval += "1. {} ({}, {} comment{})\n".format( 321 | self._user(author), 322 | self._points(sum(x.score for x in comments)), 323 | len(comments), 324 | "s" if len(comments) != 1 else "", 325 | ) 326 | return "{}\n".format(retval) 327 | 328 | def top_submitters(self, num): 329 | """Return a markdown representation of the top submitters.""" 330 | num = min(num, len(self.submitters)) 331 | if num <= 0: 332 | return "" 333 | 334 | top_submitters = sorted( 335 | iteritems(self.submitters), 336 | key=lambda x: (-sum(y.score for y in x[1]), -len(x[1]), str(x[0])), 337 | )[:num] 338 | 339 | retval = self.post_header.format("Top Submitters' Top Submissions") 340 | for (author, submissions) in top_submitters: 341 | retval += "1. {}, {} submission{}: {}\n".format( 342 | self._points(sum(x.score for x in submissions)), 343 | len(submissions), 344 | "s" if len(submissions) != 1 else "", 345 | self._user(author), 346 | ) 347 | for sub in sorted(submissions, key=lambda x: (-x.score, x.title))[:10]: 348 | title = self._safe_title(sub) 349 | if sub.permalink in sub.url: 350 | retval += tt(" 1. {}").format(title) 351 | else: 352 | retval += tt(" 1. [{}]({})").format(title, sub.url) 353 | retval += " ({}, [{} comment{}]({}))\n".format( 354 | self._points(sub.score), 355 | sub.num_comments, 356 | "s" if sub.num_comments != 1 else "", 357 | self._permalink(sub), 358 | ) 359 | retval += "\n" 360 | return retval 361 | 362 | def top_submissions(self): 363 | """Return a markdown representation of the top submissions.""" 364 | num = min(10, len(self.submissions)) 365 | if num <= 0: 366 | return "" 367 | 368 | top_submissions = sorted( 369 | [ 370 | x 371 | for x in self.submissions.values() 372 | if self.distinguished or x.distinguished is None 373 | ], 374 | key=lambda x: (-x.score, -x.num_comments, x.title), 375 | )[:num] 376 | 377 | if not top_submissions: 378 | return "" 379 | 380 | retval = self.post_header.format("Top Submissions") 381 | for sub in top_submissions: 382 | title = self._safe_title(sub) 383 | if sub.permalink in sub.url: 384 | retval += tt("1. {}").format(title) 385 | else: 386 | retval += tt("1. [{}]({})").format(title, sub.url) 387 | 388 | retval += " by {} ({}, [{} comment{}]({}))\n".format( 389 | self._user(sub.author), 390 | self._points(sub.score), 391 | sub.num_comments, 392 | "s" if sub.num_comments != 1 else "", 393 | self._permalink(sub), 394 | ) 395 | return tt("{}\n").format(retval) 396 | 397 | def top_comments(self): 398 | """Return a markdown representation of the top comments.""" 399 | num = min(10, len(self.comments)) 400 | if num <= 0: 401 | return "" 402 | 403 | top_comments = sorted(self.comments, key=lambda x: (-x.score, str(x.author)))[ 404 | :num 405 | ] 406 | retval = self.post_header.format("Top Comments") 407 | for comment in top_comments: 408 | title = self._safe_title(comment.submission) 409 | retval += tt("1. {}: {}'s [comment]({}) in {}\n").format( 410 | self._points(comment.score), 411 | self._user(comment.author), 412 | self._permalink(comment), 413 | title, 414 | ) 415 | return tt("{}\n").format(retval) 416 | 417 | 418 | def main(): 419 | """Provide the entry point to the subreddit_stats command.""" 420 | parser = arg_parser(usage="usage: %prog [options] SUBREDDIT VIEW") 421 | parser.add_option( 422 | "-c", 423 | "--commenters", 424 | type="int", 425 | default=10, 426 | help="Number of top commenters to display " "[default %default]", 427 | ) 428 | parser.add_option( 429 | "-d", 430 | "--distinguished", 431 | action="store_true", 432 | help=( 433 | "Include distinguished subissions and " 434 | "comments (default: False). Note that regular " 435 | "comments of distinguished submissions will still " 436 | "be included." 437 | ), 438 | ) 439 | parser.add_option( 440 | "-s", 441 | "--submitters", 442 | type="int", 443 | default=10, 444 | help="Number of top submitters to display " "[default %default]", 445 | ) 446 | parser.add_option( 447 | "-o", 448 | "--output", 449 | type="string", 450 | default="subreddit_stats", 451 | help="Subreddit to publish results to " "[default %default]", 452 | ) 453 | 454 | options, args = parser.parse_args() 455 | 456 | if options.verbose == 1: 457 | logger.setLevel(logging.INFO) 458 | elif options.verbose > 1: 459 | logger.setLevel(logging.DEBUG) 460 | else: 461 | logger.setLevel(logging.NOTSET) 462 | logger.addHandler(logging.StreamHandler()) 463 | 464 | if len(args) != 2: 465 | parser.error("SUBREDDIT and VIEW must be provided") 466 | subreddit, view = args 467 | check_for_updates(options) 468 | srs = SubredditStats(subreddit, options.site, options.distinguished, options.output) 469 | result = srs.run(view, options.submitters, options.commenters) 470 | if result: 471 | print(result.permalink) 472 | return 0 473 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """prawtools setup.py.""" 2 | 3 | import re 4 | from codecs import open 5 | from os import path 6 | from setuptools import setup 7 | 8 | 9 | PACKAGE_NAME = "prawtools" 10 | HERE = path.abspath(path.dirname(__file__)) 11 | with open(path.join(HERE, "README.md"), encoding="utf-8") as fp: 12 | README = fp.read() 13 | with open(path.join(HERE, PACKAGE_NAME, "__init__.py"), encoding="utf-8") as fp: 14 | VERSION = re.search('__version__ = "([^"]+)', fp.read()).group(1) 15 | 16 | 17 | extras = { 18 | "ci": ["coveralls"], 19 | "lint": ["black", "flake8", "pydocstyle"], 20 | "test": [ 21 | "betamax >=0.7.1, <0.8", 22 | "betamax-serializers >=0.2.0, <0.3", 23 | "mock ==1.0.1", 24 | "pytest", 25 | ], 26 | } 27 | required = ["praw >=4.0.0, <7", "six >=1, <2"] 28 | 29 | 30 | setup( 31 | name=PACKAGE_NAME, 32 | author="Bryce Boe", 33 | author_email="bbzbryce@gmail.com", 34 | classifiers=[ 35 | "Environment :: Console", 36 | "Intended Audience :: Developers", 37 | "License :: OSI Approved :: BSD License", 38 | "Natural Language :: English", 39 | "Operating System :: OS Independent", 40 | "Programming Language :: Python :: 3", 41 | "Programming Language :: Python :: 3.5", 42 | "Programming Language :: Python :: 3.6", 43 | "Programming Language :: Python :: 3.7", 44 | "Programming Language :: Python :: 3.8", 45 | "Topic :: Utilities", 46 | ], 47 | description="A collection of utilities that utilize the reddit API.", 48 | entry_points={ 49 | "console_scripts": [ 50 | "modutils = prawtools.mod:main", 51 | "reddit_alert = prawtools.alert:main", 52 | "subreddit_stats = prawtools.stats:main", 53 | ] 54 | }, 55 | extras_require=extras, 56 | install_requires=required, 57 | keywords="reddit mod moderator subreddit statistics tools", 58 | license="Simplified BSD License", 59 | long_description=README, 60 | packages=[PACKAGE_NAME], 61 | url="https://github.com/praw-dev/prawtools", 62 | version=VERSION, 63 | ) 64 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Test prawtools.""" 2 | import unittest 3 | 4 | from betamax import Betamax 5 | 6 | 7 | class IntegrationTest(unittest.TestCase): 8 | """Base class for prawtools integration tests.""" 9 | 10 | def setUp(self, http): 11 | """Run before all test cases.""" 12 | self.recorder = Betamax(http) 13 | 14 | # Disable response compression in order to see the response bodies in 15 | # the betamax cassettes. 16 | http.headers["Accept-Encoding"] = "identity" 17 | -------------------------------------------------------------------------------- /tests/cassettes/StatsTest.top.json: -------------------------------------------------------------------------------- 1 | { 2 | "http_interactions": [ 3 | { 4 | "recorded_at": "2016-07-27T15:44:47", 5 | "request": { 6 | "body": { 7 | "encoding": "utf-8", 8 | "string": "grant_type=password&password=&username=" 9 | }, 10 | "headers": { 11 | "Accept": "*/*", 12 | "Accept-Encoding": "identity", 13 | "Authorization": "Basic ", 14 | "Connection": "keep-alive", 15 | "Content-Length": "57", 16 | "Content-Type": "application/x-www-form-urlencoded", 17 | "User-Agent": "prawtools/0.19.2 PRAW/4.0.0b11 prawcore/0.0.14" 18 | }, 19 | "method": "POST", 20 | "uri": "https://www.reddit.com/api/v1/access_token" 21 | }, 22 | "response": { 23 | "body": { 24 | "encoding": "UTF-8", 25 | "string": "{\"access_token\": \"SEemGYK4e4WSBpjE3K0XZW1fSkA\", \"token_type\": \"bearer\", \"expires_in\": 3600, \"scope\": \"*\"}" 26 | }, 27 | "headers": { 28 | "CF-RAY": "2c9136f70f60226a-LAX", 29 | "Connection": "keep-alive", 30 | "Content-Length": "105", 31 | "Content-Type": "application/json; charset=UTF-8", 32 | "Date": "Wed, 27 Jul 2016 15:44:47 GMT", 33 | "Server": "cloudflare-nginx", 34 | "Set-Cookie": "__cfduid=d6b8ad51e17ce9f8aab973dc63fc236631469634287; expires=Thu, 27-Jul-17 15:44:47 GMT; path=/; domain=.reddit.com; HttpOnly", 35 | "Strict-Transport-Security": "max-age=15552000; includeSubDomains; preload", 36 | "X-Moose": "majestic", 37 | "cache-control": "max-age=0, must-revalidate", 38 | "x-content-type-options": "nosniff", 39 | "x-frame-options": "SAMEORIGIN", 40 | "x-xss-protection": "1; mode=block" 41 | }, 42 | "status": { 43 | "code": 200, 44 | "message": "OK" 45 | }, 46 | "url": "https://www.reddit.com/api/v1/access_token" 47 | } 48 | }, 49 | { 50 | "recorded_at": "2016-07-27T15:44:48", 51 | "request": { 52 | "body": { 53 | "encoding": "utf-8", 54 | "string": "" 55 | }, 56 | "headers": { 57 | "Accept": "*/*", 58 | "Accept-Encoding": "identity", 59 | "Authorization": "bearer SEemGYK4e4WSBpjE3K0XZW1fSkA", 60 | "Connection": "keep-alive", 61 | "Cookie": "__cfduid=d6b8ad51e17ce9f8aab973dc63fc236631469634287", 62 | "User-Agent": "prawtools/0.19.2 PRAW/4.0.0b11 prawcore/0.0.14" 63 | }, 64 | "method": "GET", 65 | "uri": "https://oauth.reddit.com/r/redditdev/top?raw_json=1&limit=1024&t=week" 66 | }, 67 | "response": { 68 | "body": { 69 | "encoding": "UTF-8", 70 | "string": "{\"kind\": \"Listing\", \"data\": {\"modhash\": null, \"children\": [{\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EThis is post is what it says in the title, is there any w ay to create a new reddit user via Praw or via the Reddit API in general?\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"This is post is what it says in the title, is there any w ay to create a new reddit user via Praw or via the Reddit API in general?\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4u9qr0\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"FlammableMarshmallow\", \"media\": null, \"score\": 3, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 5, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4u9qr0/praw3_create_a_new_reddit_user/\", \"locked\": false, \"name\": \"t3_4u9qr0\", \"created\": 1469331882.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4u9qr0/praw3_create_a_new_reddit_user/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"[PRAW3] Create a new reddit user?\", \"created_utc\": 1469303082.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 3}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EIs it okay to use Snoo in Reddit App? Does it violate copyright? \\u003C/p\\u003E\\n\\n\\u003Cp\\u003EI couldn\\u0026#39;t find any relevant doc for the same. If you could point it out, that would be great. \\u003C/p\\u003E\\n\\n\\u003Cp\\u003EMany Thanks :)\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Is it okay to use Snoo in Reddit App? Does it violate copyright? \\n\\nI couldn't find any relevant doc for the same. If you could point it out, that would be great. \\n\\nMany Thanks :)\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ut39s\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"anonuser404\", \"media\": null, \"score\": 3, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 6, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ut39s/can_we_use_photo_of_snoo_in_app/\", \"locked\": false, \"name\": \"t3_4ut39s\", \"created\": 1469626637.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ut39s/can_we_use_photo_of_snoo_in_app/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Can we use photo of Snoo in App?\", \"created_utc\": 1469597837.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 3}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EHey there.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EI\\u0026#39;m trying to build a bot (on this account), and I intend to make it send PMs to users (which I specify). However, when trying to run a script to do this, the program breaks when authenticating the user. This is the part that doesn\\u0026#39;t work:\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003Er.set_oauth_app_info(client_id, client_secret, redirect_uri)\\n\\n#url = r.get_authorize_url(\\u0026#39;uniqueKey\\u0026#39;, \\u0026#39;identity\\u0026#39;, True)\\n#webbrowser.open(url)\\naccess_information = r.get_access_information(\\u0026#39;key I got from the url\\u0026#39;)\\n\\nauthenticated_user = r.get_me()\\nprint authenticated_user.name, authenticated_user.link_karma \\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EBasically, I run the part where the webrowser opens once, to get the code and use it in the \\u0026quot;access_information\\u0026quot;. The script runs once, and it works! But it doesn\\u0026#39;t work the second time.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EAny ideias? Thanks. \\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Hey there.\\n\\nI'm trying to build a bot (on this account), and I intend to make it send PMs to users (which I specify). However, when trying to run a script to do this, the program breaks when authenticating the user. This is the part that doesn't work:\\n\\n r.set_oauth_app_info(client_id, client_secret, redirect_uri)\\n\\n #url = r.get_authorize_url('uniqueKey', 'identity', True)\\n #webbrowser.open(url)\\n access_information = r.get_access_information('key I got from the url')\\n\\n authenticated_user = r.get_me()\\n print authenticated_user.name, authenticated_user.link_karma \\n\\nBasically, I run the part where the webrowser opens once, to get the code and use it in the \\\"access_information\\\". The script runs once, and it works! But it doesn't work the second time.\\n\\nAny ideias? Thanks. \", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4uk6vk\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"RecentNewsBot\", \"media\": null, \"score\": 3, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 11, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4uk6vk/having_some_issues_with_oauth2_rgetmename_only/\", \"locked\": false, \"name\": \"t3_4uk6vk\", \"created\": 1469500654.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4uk6vk/having_some_issues_with_oauth2_rgetmename_only/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Having some issues with OAuth2: (r.getme()).name only works once.\", \"created_utc\": 1469471854.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 3}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EHere is link \\u003Ca href=\\\"http://pythonforengineers.com/build-a-reddit-bot-part-1/\\\"\\u003Ehttp://pythonforengineers.com/build-a-reddit-bot-part-1/\\u003C/a\\u003E\\u003C/p\\u003E\\n\\n\\u003Cp\\u003ESpecific point: \\u003Ca href=\\\"http://puu.sh/qcZJJ/83d56b9131.png\\\"\\u003Ehttp://puu.sh/qcZJJ/83d56b9131.png\\u003C/a\\u003E\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EBasically I have a bot that copies and stores comments from /all, it has already collected 200,000 comments and it\\u0026#39;s been running for about an hour. My question is am I allowed to do this this fast? Or is this bot making too many calls?\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EI can supply the code if needs be.\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Here is link http://pythonforengineers.com/build-a-reddit-bot-part-1/\\n\\nSpecific point: http://puu.sh/qcZJJ/83d56b9131.png\\n\\nBasically I have a bot that copies and stores comments from /all, it has already collected 200,000 comments and it's been running for about an hour. My question is am I allowed to do this this fast? Or is this bot making too many calls?\\n\\nI can supply the code if needs be.\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ueovt\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"MatthewBetts\", \"media\": null, \"score\": 3, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 7, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ueovt/does_the_rate_limit_mentioned_this_bot_tutorial/\", \"locked\": false, \"name\": \"t3_4ueovt\", \"created\": 1469418459.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ueovt/does_the_rate_limit_mentioned_this_bot_tutorial/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Does the rate limit mentioned this bot tutorial (link in comments) mean just comments or is it any sort of call?\", \"created_utc\": 1469389659.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 3}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EHelp, instead of getting more comments thread.replace_more_comments(limit=None, threshold=0) just removes the more comments section altogether. So \\u003Ca href=\\\"https://www.reddit.com/r/Thread_crawler/comments/4u2jlt/comment_test/\\\"\\u003Ehere\\u003C/a\\u003E it only shows up to \\u0026quot;This is my reply. There are many like it, but this one is mine.9\\u0026quot;\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Help, instead of getting more comments thread.replace_more_comments(limit=None, threshold=0) just removes the more comments section altogether. So [here](https://www.reddit.com/r/Thread_crawler/comments/4u2jlt/comment_test/) it only shows up to \\\"This is my reply. There are many like it, but this one is mine.9\\\"\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4u3zf0\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Anatoly_Korenchkin\", \"media\": null, \"score\": 3, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 13, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4u3zf0/replace_more_comments_just_removes_more_comments/\", \"locked\": false, \"name\": \"t3_4u3zf0\", \"created\": 1469238108.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4u3zf0/replace_more_comments_just_removes_more_comments/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"replace_more_comments() just removes more comments.\", \"created_utc\": 1469209308.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 3}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EI am trying to copy the list of multireddits from one account to another.\\nI imagine that the way to achieve this would be :\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003E storage = {}\\n multis = reddit_client.get_my_multireddits( )\\n for multi in multis:\\n multireddit = multi.display_name\\n subs = multi.get_subreddits() # I know this does not exist today\\n storage[multireddit] = subs\\n\\n # Here comes the code to create a multireddit in new account with the list of subs\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EI am looking for ideas/pointers on how to implement that function get_subreddits() to get list of subreddits in my multireddit or to achieve equivalent functionality otherwise. \\nThank you in advance.\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"I am trying to copy the list of multireddits from one account to another.\\nI imagine that the way to achieve this would be :\\n\\n storage = {}\\n multis = reddit_client.get_my_multireddits( )\\n for multi in multis:\\n multireddit = multi.display_name\\n subs = multi.get_subreddits() # I know this does not exist today\\n storage[multireddit] = subs\\n\\n # Here comes the code to create a multireddit in new account with the list of subs\\n\\nI am looking for ideas/pointers on how to implement that function get_subreddits() to get list of subreddits in my multireddit or to achieve equivalent functionality otherwise. \\nThank you in advance.\\n\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ufsav\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Have_No_Name\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 4, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ufsav/praw_getting_a_list_of_subreddits_in_a_multireddit/\", \"locked\": false, \"name\": \"t3_4ufsav\", \"created\": 1469433575.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ufsav/praw_getting_a_list_of_subreddits_in_a_multireddit/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"[PRAW] Getting a list of subreddits in a multireddit\", \"created_utc\": 1469404775.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EBeyond the previous year that is listed under /about/traffic\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Beyond the previous year that is listed under /about/traffic\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4uesuh\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"theZcuber\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 3, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4uesuh/is_it_possible_to_get_old_traffic_data/\", \"locked\": false, \"name\": \"t3_4uesuh\", \"created\": 1469419886.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4uesuh/is_it_possible_to_get_old_traffic_data/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Is it possible to get old traffic data?\", \"created_utc\": 1469391086.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EI am using the replace_more_comments function to get comments.\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003Esubmission = r.get_submission(\\u0026#39;https://www.reddit.com/r/redditdev/comments/4tdnj7/responsive_auth_page_for_mobile_apps/?sort=qa\\u0026#39;)\\n#submission.replace_more_comments(limit=16,threshold=10)\\nsubmission.replace_more_comments(limit=None,threshold=10) \\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EHowever all of the comments dont seem to be ordered by QnA. \\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003Efor comment in flat_comments:\\n #print \\u0026quot;comment\\u0026quot; + str(x)\\n #print comment\\n if comment.body != \\u0026#39;\\u0026#39;:\\n newfile.write(\\u0026quot;author is \\u0026quot; +str(len(str(comment.author))))\\n f.write(str(comment.body.encode(\\u0026quot;utf-8\\u0026quot;)))\\n for author in str(comment.author):\\n multis = r.get_multireddits(str(comment.author))\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EAm I missing something here ?\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"I am using the replace_more_comments function to get comments.\\n\\n submission = r.get_submission('https://www.reddit.com/r/redditdev/comments/4tdnj7/responsive_auth_page_for_mobile_apps/?sort=qa')\\n #submission.replace_more_comments(limit=16,threshold=10)\\n submission.replace_more_comments(limit=None,threshold=10) \\n\\nHowever all of the comments dont seem to be ordered by QnA. \\n\\n for comment in flat_comments:\\n \\t\\t\\t#print \\\"comment\\\" + str(x)\\n \\t\\t\\t#print comment\\n \\tif comment.body != '':\\n \\t\\t\\tnewfile.write(\\\"author is \\\" +str(len(str(comment.author))))\\n \\t\\t\\tf.write(str(comment.body.encode(\\\"utf-8\\\")))\\n \\t\\t\\tfor author in str(comment.author):\\n \\t\\t\\t\\tmultis = r.get_multireddits(str(comment.author))\\n\\nAm I missing something here ?\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ue8ie\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"adionahigh\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 1, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ue8ie/how_do_you_get_the_comments_in_a_qna_format/\", \"locked\": false, \"name\": \"t3_4ue8ie\", \"created\": 1469412350.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ue8ie/how_do_you_get_the_comments_in_a_qna_format/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"How do you get the comments in a QnA format ?\", \"created_utc\": 1469383550.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cpre\\u003E\\u003Ccode\\u003Epublic function createStory($kind = null,$title = null, $subreddit = null, $captcha = null, $iden = null, $link = null, $text=null){\\n $urlSubmit = \\u0026quot;{$this-\\u0026gt;apiHost}/api/submit\\u0026quot;;\\n\\n //data checks and pre-setup\\n if ($title == null || $subreddit == null){ return null; }\\n $kind = ($link == null) ? \\u0026quot;self\\u0026quot; : \\u0026quot;link\\u0026quot;;\\n\\n $postData = sprintf(\\u0026quot;kind=%s\\u0026amp;sr=%s\\u0026amp;title=%s\\u0026amp;captcha=%s\\u0026amp;iden=%s\\u0026quot;,\\n $kind,\\n $subreddit,\\n urlencode($title),\\n $captcha,\\n $iden\\n );\\n\\n //if link was present, add to POST data\\n if ($link != null){ $postData .= \\u0026quot;\\u0026amp;url=\\u0026quot; . urlencode($link); }\\n if ($text != null){ $postData .= \\u0026quot;\\u0026amp;text=\\u0026quot; . urlencode($text); }\\n\\n $response = $this-\\u0026gt;runCurl($urlSubmit, $postData);\\n//Example of my request\\n//$urlSubmit -\\u0026gt; string(35) \\u0026quot;https://oauth.reddit.com/api/submit\\u0026quot;\\n//$postData -\\u0026gt; string(81) \\u0026quot;kind=self\\u0026amp;sr=test\\u0026amp;title=text\\u0026amp;captcha=TPVENT\\u0026amp;iden=rrYfz6Wmc870kTzaycc4BWgTmiINY1HZ\\u0026quot;\\n\\nprivate function runCurl($url, $postVals = null, $headers = null, $auth = false){\\n $ch = curl_init($url);\\n\\n $options = array(\\n CURLOPT_RETURNTRANSFER =\\u0026gt; true,\\n CURLOPT_CONNECTTIMEOUT =\\u0026gt; 5,\\n CURLOPT_TIMEOUT =\\u0026gt; 10\\n );\\n\\n if (!empty($_SERVER[\\u0026#39;HTTP_USER_AGENT\\u0026#39;])){\\n $options[CURLOPT_USERAGENT] = $_SERVER[\\u0026#39;HTTP_USER_AGENT\\u0026#39;];\\n }\\n\\n if ($postVals != null){\\n $options[CURLOPT_POSTFIELDS] = $postVals;\\n $options[CURLOPT_CUSTOMREQUEST] = \\u0026quot;POST\\u0026quot;;\\n }\\n\\n if ($this-\\u0026gt;auth_mode == \\u0026#39;oauth\\u0026#39;){\\n $headers = array(\\u0026quot;Authorization: {$this-\\u0026gt;token_type} {$this-\\u0026gt;access_token}\\u0026quot;);\\n $options[CURLOPT_HEADER] = false;\\n $options[CURLINFO_HEADER_OUT] = false;\\n $options[CURLOPT_HTTPHEADER] = $headers;\\n }\\n\\n if ($auth){\\n $options[CURLOPT_HTTPAUTH] = CURLAUTH_BASIC;\\n $options[CURLOPT_USERPWD] = redditConfig::$CLIENT_ID . \\u0026quot;:\\u0026quot; . redditConfig::$CLIENT_SECRET;\\n $options[CURLOPT_SSLVERSION] = 4;\\n $options[CURLOPT_SSL_VERIFYPEER] = false;\\n $options[CURLOPT_SSL_VERIFYHOST] = 2;\\n }\\n\\n curl_setopt_array($ch, $options);\\n $apiResponse = curl_exec($ch);\\n $response = json_decode($apiResponse);\\n\\n //check if non-valid JSON is returned\\n if ($error = json_last_error()){\\n $response = $apiResponse;\\n }\\n curl_close($ch);\\n\\n return $response;\\n}\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EAlways return json with .error.BAD_CAPTCHA.field-captcha\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EFor displaying captcha i use link - \\u003Ca href=\\\"https://www.reddit.com/captcha/(iden).png\\\"\\u003Ehttps://www.reddit.com/captcha/(iden).png\\u003C/a\\u003E\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \" public function createStory($kind = null,$title = null, $subreddit = null, $captcha = null, $iden = null, $link = null, $text=null){\\n $urlSubmit = \\\"{$this-\\u003EapiHost}/api/submit\\\";\\n\\n //data checks and pre-setup\\n if ($title == null || $subreddit == null){ return null; }\\n $kind = ($link == null) ? \\\"self\\\" : \\\"link\\\";\\n\\n $postData = sprintf(\\\"kind=%s\\u0026sr=%s\\u0026title=%s\\u0026captcha=%s\\u0026iden=%s\\\",\\n $kind,\\n $subreddit,\\n urlencode($title),\\n $captcha,\\n $iden\\n );\\n\\n //if link was present, add to POST data\\n if ($link != null){ $postData .= \\\"\\u0026url=\\\" . urlencode($link); }\\n if ($text != null){ $postData .= \\\"\\u0026text=\\\" . urlencode($text); }\\n\\n $response = $this-\\u003ErunCurl($urlSubmit, $postData);\\n //Example of my request\\n //$urlSubmit -\\u003E string(35) \\\"https://oauth.reddit.com/api/submit\\\"\\n //$postData -\\u003E string(81) \\\"kind=self\\u0026sr=test\\u0026title=text\\u0026captcha=TPVENT\\u0026iden=rrYfz6Wmc870kTzaycc4BWgTmiINY1HZ\\\"\\n\\n private function runCurl($url, $postVals = null, $headers = null, $auth = false){\\n $ch = curl_init($url);\\n\\n $options = array(\\n CURLOPT_RETURNTRANSFER =\\u003E true,\\n CURLOPT_CONNECTTIMEOUT =\\u003E 5,\\n CURLOPT_TIMEOUT =\\u003E 10\\n );\\n\\n if (!empty($_SERVER['HTTP_USER_AGENT'])){\\n $options[CURLOPT_USERAGENT] = $_SERVER['HTTP_USER_AGENT'];\\n }\\n\\n if ($postVals != null){\\n $options[CURLOPT_POSTFIELDS] = $postVals;\\n $options[CURLOPT_CUSTOMREQUEST] = \\\"POST\\\";\\n }\\n\\n if ($this-\\u003Eauth_mode == 'oauth'){\\n $headers = array(\\\"Authorization: {$this-\\u003Etoken_type} {$this-\\u003Eaccess_token}\\\");\\n $options[CURLOPT_HEADER] = false;\\n $options[CURLINFO_HEADER_OUT] = false;\\n $options[CURLOPT_HTTPHEADER] = $headers;\\n }\\n\\n if ($auth){\\n $options[CURLOPT_HTTPAUTH] = CURLAUTH_BASIC;\\n $options[CURLOPT_USERPWD] = redditConfig::$CLIENT_ID . \\\":\\\" . redditConfig::$CLIENT_SECRET;\\n $options[CURLOPT_SSLVERSION] = 4;\\n $options[CURLOPT_SSL_VERIFYPEER] = false;\\n $options[CURLOPT_SSL_VERIFYHOST] = 2;\\n }\\n\\n curl_setopt_array($ch, $options);\\n $apiResponse = curl_exec($ch);\\n $response = json_decode($apiResponse);\\n\\n //check if non-valid JSON is returned\\n if ($error = json_last_error()){\\n $response = $apiResponse;\\n }\\n curl_close($ch);\\n\\n return $response;\\n }\\n\\n\\nAlways return json with .error.BAD_CAPTCHA.field-captcha\\n\\nFor displaying captcha i use link - https://www.reddit.com/captcha/(iden).png\\n\\n\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ud47x\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Vitaliy_Test\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 2, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": 1469367482.0, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ud47x/api_captcha_problems/\", \"locked\": false, \"name\": \"t3_4ud47x\", \"created\": 1469395857.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ud47x/api_captcha_problems/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Api captcha problems\", \"created_utc\": 1469367057.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EI am trying the \\u003Ca href=\\\"http://praw.readthedocs.io/en/stable/pages/oauth.html#an-example-webserver\\\"\\u003Ewebserver example in praw docs\\u003C/a\\u003E and it does not work for me.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EThe only change from that code is the following ( apart from using my keys, of course):\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003Er = praw.Reddit(user_agent=\\u0026#39;prawtest\\u0026#39;, site_name=\\u0026#39;prawtest\\u0026#39;)\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EOutput:\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003EC:\\\\\\u0026gt;python exampleWebserver.py\\nTraceback (most recent call last):\\n File \\u0026quot;exampleWebserver.py\\u0026quot;, line 40, in \\u0026lt;module\\u0026gt;\\n r.set_oauth_app_info(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI)\\nAttributeError: \\u0026#39;Reddit\\u0026#39; object has no attribute \\u0026#39;set_oauth_app_info\\u0026#39;\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003EWhat am I missing?\\nSurely set_oauth_app_info would be available, isn\\u0026#39;t it?\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EFWIW, here is how installed it:\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003EC:\\\\\\u0026gt;python --version\\nPython 3.5.2\\n\\nC:\\\\\\u0026gt;python -m pip install --upgrade https://github.com/praw-dev/praw/archive/praw4.zip\\nCollecting https://github.com/praw-dev/praw/archive/praw4.zip\\n Downloading https://github.com/praw-dev/praw/archive/praw4.zip (6.6MB)\\n 100% |################################| 6.6MB 172kB/s\\nRequirement already up-to-date: decorator\\u0026lt;4.1,\\u0026gt;=4.0.9 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\nRequirement already up-to-date: prawcore==0.0.13 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\nRequirement already up-to-date: requests\\u0026gt;=2.3.0 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\nRequirement already up-to-date: six==1.10 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\nRequirement already up-to-date: update_checker==0.11 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\nInstalling collected packages: praw\\n Found existing installation: praw 4.0.0b10\\n Uninstalling praw-4.0.0b10:\\n Successfully uninstalled praw-4.0.0b10\\n Running setup.py install for praw ... done\\nSuccessfully installed praw-4.0.0b10\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\n\\u003Cp\\u003Eedit: I also tried changing to the default branch method which is the default anyway.\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003EC:\\\\\\u0026gt;python -m pip install --pre praw\\nCollecting praw\\n Using cached praw-4.0.0b10-py2.py3-none-any.whl\\nRequirement already satisfied (use --upgrade to upgrade): decorator\\u0026lt;4.1,\\u0026gt;=4.0.9 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\nRequirement already satisfied (use --upgrade to upgrade): update-checker==0.11 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\nRequirement already satisfied (use --upgrade to upgrade): requests\\u0026gt;=2.3.0 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\nCollecting prawcore==0.0.12 (from praw)\\n Using cached prawcore-0.0.12-py2.py3-none-any.whl\\nRequirement already satisfied (use --upgrade to upgrade): six==1.10 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\nInstalling collected packages: prawcore, praw\\n Found existing installation: prawcore 0.0.13\\n Uninstalling prawcore-0.0.13:\\n Successfully uninstalled prawcore-0.0.13\\nSuccessfully installed praw-4.0.0b10 prawcore-0.0.12\\n\\nC:\\\\\\u0026gt;python exampleWebserver.py\\nTraceback (most recent call last):\\n File \\u0026quot;exampleWebserver.py\\u0026quot;, line 40, in \\u0026lt;module\\u0026gt;\\n r.set_oauth_app_info(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI)\\nAttributeError: \\u0026#39;Reddit\\u0026#39; object has no attribute \\u0026#39;set_oauth_app_info\\u0026#39;\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"I am trying the [webserver example in praw docs](http://praw.readthedocs.io/en/stable/pages/oauth.html#an-example-webserver) and it does not work for me.\\n\\nThe only change from that code is the following ( apart from using my keys, of course):\\n\\n r = praw.Reddit(user_agent='prawtest', site_name='prawtest')\\n\\nOutput:\\n\\n C:\\\\\\u003Epython exampleWebserver.py\\n Traceback (most recent call last):\\n File \\\"exampleWebserver.py\\\", line 40, in \\u003Cmodule\\u003E\\n r.set_oauth_app_info(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI)\\n AttributeError: 'Reddit' object has no attribute 'set_oauth_app_info'\\n \\nWhat am I missing?\\nSurely set_oauth_app_info would be available, isn't it?\\n\\nFWIW, here is how installed it:\\n\\n\\n C:\\\\\\u003Epython --version\\n Python 3.5.2\\n\\n C:\\\\\\u003Epython -m pip install --upgrade https://github.com/praw-dev/praw/archive/praw4.zip\\n Collecting https://github.com/praw-dev/praw/archive/praw4.zip\\n Downloading https://github.com/praw-dev/praw/archive/praw4.zip (6.6MB)\\n 100% |################################| 6.6MB 172kB/s\\n Requirement already up-to-date: decorator\\u003C4.1,\\u003E=4.0.9 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\n Requirement already up-to-date: prawcore==0.0.13 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\n Requirement already up-to-date: requests\\u003E=2.3.0 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\n Requirement already up-to-date: six==1.10 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\n Requirement already up-to-date: update_checker==0.11 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw==4.0.0b10)\\n Installing collected packages: praw\\n Found existing installation: praw 4.0.0b10\\n Uninstalling praw-4.0.0b10:\\n Successfully uninstalled praw-4.0.0b10\\n Running setup.py install for praw ... done\\n Successfully installed praw-4.0.0b10\\n\\nedit: I also tried changing to the default branch method which is the default anyway.\\n\\n C:\\\\\\u003Epython -m pip install --pre praw\\n Collecting praw\\n Using cached praw-4.0.0b10-py2.py3-none-any.whl\\n Requirement already satisfied (use --upgrade to upgrade): decorator\\u003C4.1,\\u003E=4.0.9 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\n Requirement already satisfied (use --upgrade to upgrade): update-checker==0.11 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\n Requirement already satisfied (use --upgrade to upgrade): requests\\u003E=2.3.0 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\n Collecting prawcore==0.0.12 (from praw)\\n Using cached prawcore-0.0.12-py2.py3-none-any.whl\\n Requirement already satisfied (use --upgrade to upgrade): six==1.10 in c:\\\\programs\\\\python35\\\\lib\\\\site-packages (from praw)\\n Installing collected packages: prawcore, praw\\n Found existing installation: prawcore 0.0.13\\n Uninstalling prawcore-0.0.13:\\n Successfully uninstalled prawcore-0.0.13\\n Successfully installed praw-4.0.0b10 prawcore-0.0.12\\n \\n C:\\\\\\u003Epython exampleWebserver.py\\n Traceback (most recent call last):\\n File \\\"exampleWebserver.py\\\", line 40, in \\u003Cmodule\\u003E\\n r.set_oauth_app_info(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI)\\n AttributeError: 'Reddit' object has no attribute 'set_oauth_app_info'\\n\\n\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ucxhx\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Have_No_Name\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 4, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": 1469363714.0, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ucxhx/praw4_reddit_object_has_no_attribute_set_oauth/\", \"locked\": false, \"name\": \"t3_4ucxhx\", \"created\": 1469392223.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ucxhx/praw4_reddit_object_has_no_attribute_set_oauth/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"[PRAW4] 'Reddit' object has no attribute 'set_oauth_app_info'\", \"created_utc\": 1469363423.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EI want the toolbar that \\u003Ca href=\\\"https://github.com/reddit/reddit/blob/master/r2/r2/templates/redditheader.html#L66-L68\\\"\\u003Eredditheader.html\\u003C/a\\u003E has but on \\u003Ca href=\\\"https://github.com/reddit/reddit/blob/master/r2/r2/templates/multiinfobar.html\\\"\\u003Emultiinfobar.html\\u003C/a\\u003E\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"I want the toolbar that [redditheader.html](https://github.com/reddit/reddit/blob/master/r2/r2/templates/redditheader.html#L66-L68) has but on [multiinfobar.html](https://github.com/reddit/reddit/blob/master/r2/r2/templates/multiinfobar.html)\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4ubi13\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Thalinan\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 0, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4ubi13/how_do_i_create_a_toolbar_with_the_hot_new_rising/\", \"locked\": false, \"name\": \"t3_4ubi13\", \"created\": 1469359189.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4ubi13/how_do_i_create_a_toolbar_with_the_hot_new_rising/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"How do I create a toolbar with the \\\"hot, new, rising, etc\\\" tabs on multiinfobar.html?\", \"created_utc\": 1469330389.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EHi! I\\u0026#39;m new to reddit dev and I looked around for if this was asked before but couldn\\u0026#39;t find anything on it, sorry if I\\u0026#39;m just being dumb. I\\u0026#39;m curious about how I would have to go about registering the following app:\\u003C/p\\u003E\\n\\n\\u003Cul\\u003E\\n\\u003Cli\\u003EIt would exist as just a simple web page\\u003C/li\\u003E\\n\\u003Cli\\u003EA user could go to this site and submit a link to a reddit comment thread that they\\u0026#39;re curious to have analyzed\\u003C/li\\u003E\\n\\u003Cli\\u003EThe page would gather information on that thread such as how many comments have been made, the average age of each account on the thread, the average amount of comments made by some of the commenters, etc. and spit it out onto a nice chart or table\\u003C/li\\u003E\\n\\u003C/ul\\u003E\\n\\n\\u003Cp\\u003EI would even like to go so far as just having the API calls run straight from the browser js and held in local storage.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EWith other RESTful APIs I wouldn\\u0026#39;t need to go so far as getting OAUTH configured (which I\\u0026#39;m not super familiar with) because this app wouldn\\u0026#39;t involve actually accessing any user\\u0026#39;s accounts and making changes etc. It\\u0026#39;s not like anyone would have to login. So while the \\u0026quot;script app\\u0026quot; seems to make the most sense, I see that that would be publishing my information if I wanted to make this thing public.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EAnyways, am I too confused? How would I go about doing this? THANKS.\\u003C/p\\u003E\\n\\n\\u003Cp\\u003EAgain, sorry if this has been asked before.\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"Hi! I'm new to reddit dev and I looked around for if this was asked before but couldn't find anything on it, sorry if I'm just being dumb. I'm curious about how I would have to go about registering the following app:\\n\\n- It would exist as just a simple web page\\n- A user could go to this site and submit a link to a reddit comment thread that they're curious to have analyzed\\n- The page would gather information on that thread such as how many comments have been made, the average age of each account on the thread, the average amount of comments made by some of the commenters, etc. and spit it out onto a nice chart or table\\n\\nI would even like to go so far as just having the API calls run straight from the browser js and held in local storage.\\n\\nWith other RESTful APIs I wouldn't need to go so far as getting OAUTH configured (which I'm not super familiar with) because this app wouldn't involve actually accessing any user's accounts and making changes etc. It's not like anyone would have to login. So while the \\\"script app\\\" seems to make the most sense, I see that that would be publishing my information if I wanted to make this thing public.\\n\\nAnyways, am I too confused? How would I go about doing this? THANKS.\\n\\nAgain, sorry if this has been asked before.\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4u6ag5\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"Yancy_Fry_Sr\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 1, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4u6ag5/some_help_getting_started_do_i_need_to_register/\", \"locked\": false, \"name\": \"t3_4u6ag5\", \"created\": 1469269166.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4u6ag5/some_help_getting_started_do_i_need_to_register/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Some help getting started. Do I need to register as a web app?\", \"created_utc\": 1469240366.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EThe bot im trying to run is simple. I first read from a text file and save the lines in a list. I want to loop through comments on a sub and if I have not grabbed this comment before I want to append them back to the text file. It cycles through and writes 4-5 lines(each time it throws the error after a different number), and gets hung up and throws an error telling me that \\u0026quot;class \\u0026#39;praw.objects.submission\\u0026#39; has no attribute body on the line i have starred below. I assume that means im hitting an empty comment since I can flow through and the bot writes to my file after a random amount of cycles each time. Or is it something else?\\u003C/p\\u003E\\n\\n\\u003Cpre\\u003E\\u003Ccode\\u003Eimport praw\\n\\nr = praw.Reddit(user_agent = \\u0026#39;Testing out a Theory.\\u0026#39;)\\nr.login(username, password, disable_warning = True)\\n\\nsubreddit = r.get_subreddit(\\u0026#39;all\\u0026#39;)\\ncache = []\\n\\n#read from file\\nwith open(\\u0026#39;log_files/commentscraper.txt\\u0026#39;, \\u0026#39;r\\u0026#39;) as read:\\n for line in read:\\n cache.append(line.strip())\\n\\n#append unique comment\\nwith open(\\u0026#39;log_files/commentscraper.txt\\u0026#39;, \\u0026#39;a\\u0026#39;) as a:\\n comments = subreddit.get_comments(\\u0026#39;all\\u0026#39;)\\n for comment in comments:\\n if comment.body not in cache:*******************\\n a.write(comment.body + \\u0026#39;\\\\n\\u0026#39;)\\n\\u003C/code\\u003E\\u003C/pre\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"The bot im trying to run is simple. I first read from a text file and save the lines in a list. I want to loop through comments on a sub and if I have not grabbed this comment before I want to append them back to the text file. It cycles through and writes 4-5 lines(each time it throws the error after a different number), and gets hung up and throws an error telling me that \\\"class 'praw.objects.submission' has no attribute body on the line i have starred below. I assume that means im hitting an empty comment since I can flow through and the bot writes to my file after a random amount of cycles each time. Or is it something else?\\n\\n import praw\\n \\n r = praw.Reddit(user_agent = 'Testing out a Theory.')\\n r.login(username, password, disable_warning = True)\\n \\n subreddit = r.get_subreddit('all')\\n cache = []\\n \\n #read from file\\n with open('log_files/commentscraper.txt', 'r') as read:\\n \\tfor line in read:\\n \\t\\tcache.append(line.strip())\\n \\n #append unique comment\\n with open('log_files/commentscraper.txt', 'a') as a:\\n \\tcomments = subreddit.get_comments('all')\\n \\tfor comment in comments:\\n \\t\\tif comment.body not in cache:*******************\\n \\t\\t\\ta.write(comment.body + '\\\\n')\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4u110j\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"imnewhere24\", \"media\": null, \"score\": 1, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 9, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4u110j/file_loops_then_throws_an_error/\", \"locked\": false, \"name\": \"t3_4u110j\", \"created\": 1469189627.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4u110j/file_loops_then_throws_an_error/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"file loops then throws an error\", \"created_utc\": 1469160827.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 1}}, {\"kind\": \"t3\", \"data\": {\"domain\": \"self.redditdev\", \"banned_by\": null, \"media_embed\": {}, \"subreddit\": \"redditdev\", \"selftext_html\": \"\\u003C!-- SC_OFF --\\u003E\\u003Cdiv class=\\\"md\\\"\\u003E\\u003Cp\\u003EI\\u0026#39;ve bumped into a few, but i\\u0026#39;m at a lose for which ones work and which ones are the good ones.\\u003C/p\\u003E\\n\\u003C/div\\u003E\\u003C!-- SC_ON --\\u003E\", \"selftext\": \"I've bumped into a few, but i'm at a lose for which ones work and which ones are the good ones.\", \"likes\": null, \"suggested_sort\": null, \"user_reports\": [], \"secure_media\": null, \"link_flair_text\": null, \"id\": \"4u1uf1\", \"from_kind\": null, \"gilded\": 0, \"archived\": false, \"clicked\": false, \"report_reasons\": null, \"author\": \"SirTulip\", \"media\": null, \"score\": 0, \"approved_by\": null, \"over_18\": false, \"hidden\": false, \"num_comments\": 2, \"thumbnail\": \"\", \"subreddit_id\": \"t5_2qizd\", \"hide_score\": false, \"edited\": false, \"link_flair_css_class\": null, \"author_flair_css_class\": null, \"downs\": 0, \"secure_media_embed\": {}, \"saved\": false, \"removal_reason\": null, \"stickied\": false, \"from\": null, \"is_self\": true, \"from_id\": null, \"permalink\": \"/r/redditdev/comments/4u1uf1/best_reddit_post_scheduler_app_as_of_july_2016/\", \"locked\": false, \"name\": \"t3_4u1uf1\", \"created\": 1469205991.0, \"url\": \"https://www.reddit.com/r/redditdev/comments/4u1uf1/best_reddit_post_scheduler_app_as_of_july_2016/\", \"author_flair_text\": null, \"quarantine\": false, \"title\": \"Best reddit post scheduler app as of July, 2016?\", \"created_utc\": 1469177191.0, \"distinguished\": null, \"mod_reports\": [], \"visited\": false, \"num_reports\": null, \"ups\": 0}}], \"after\": null, \"before\": null}}" 71 | }, 72 | "headers": { 73 | "CF-RAY": "2c9136fb9c7113f5-LAX", 74 | "Connection": "keep-alive", 75 | "Content-Type": "application/json; charset=UTF-8", 76 | "Date": "Wed, 27 Jul 2016 15:44:48 GMT", 77 | "Server": "cloudflare-nginx", 78 | "Strict-Transport-Security": "max-age=15552000; includeSubDomains; preload", 79 | "Transfer-Encoding": "chunked", 80 | "Vary": "accept-encoding", 81 | "X-Moose": "majestic", 82 | "cache-control": "private, s-maxage=0, max-age=0, must-revalidate", 83 | "expires": "-1", 84 | "set-cookie": "loid=uq6RC9OpudEcGYXbQk; Domain=reddit.com; Max-Age=63071999; Path=/; expires=Fri, 27-Jul-2018 15:44:48 GMT; secure", 85 | "x-content-type-options": "nosniff", 86 | "x-frame-options": "SAMEORIGIN", 87 | "x-ratelimit-remaining": "599.0", 88 | "x-ratelimit-reset": "312", 89 | "x-ratelimit-used": "1", 90 | "x-reddit-tracking": "https://pixel.redditmedia.com/pixel/of_destiny.png?v=WccwFA2I5sY%2BSlvE%2FIq%2FRf3RevSe6%2FElwHcteZSIKpxssTFTTtXMpbp7wxlKzkKSB5I3Dsx4RzaDsghG0CeLnkQW4crRzUX3", 91 | "x-ua-compatible": "IE=edge", 92 | "x-xss-protection": "1; mode=block" 93 | }, 94 | "status": { 95 | "code": 200, 96 | "message": "OK" 97 | }, 98 | "url": "https://oauth.reddit.com/r/redditdev/top?raw_json=1&limit=1024&t=week" 99 | } 100 | } 101 | ], 102 | "recorded_with": "betamax/0.7.1" 103 | } -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Constants for the prawtools test suite.""" 2 | 3 | import os 4 | from base64 import b64encode 5 | 6 | from betamax import Betamax 7 | from betamax_serializers import pretty_json 8 | 9 | 10 | def b64_string(input_string): 11 | """Return a base64 encoded string (not bytes) from input_string.""" 12 | return b64encode(input_string.encode("utf-8")).decode("utf-8") 13 | 14 | 15 | def env_default(key): 16 | """Return environment variable or placeholder string. 17 | 18 | Set environment variable to placeholder if it doesn't exist. 19 | """ 20 | test_environ = "prawtest_{}".format(key) 21 | test_value = os.environ.get(test_environ, "placeholder_{}".format(key)) 22 | return os.environ.setdefault("praw_{}".format(key), test_value) 23 | 24 | 25 | os.environ["praw_check_for_updates"] = "False" 26 | 27 | 28 | placeholders = { 29 | x: env_default(x) for x in "client_id client_secret password username".split() 30 | } 31 | placeholders["basic_auth"] = b64_string( 32 | "{}:{}".format(placeholders["client_id"], placeholders["client_secret"]) 33 | ) 34 | 35 | 36 | Betamax.register_serializer(pretty_json.PrettyJSONSerializer) 37 | with Betamax.configure() as config: 38 | if os.getenv("TRAVIS"): 39 | config.default_cassette_options["record_mode"] = "none" 40 | config.cassette_library_dir = "tests/cassettes" 41 | config.default_cassette_options["serialize_with"] = "prettyjson" 42 | config.default_cassette_options["match_requests_on"].append("body") 43 | for key, value in placeholders.items(): 44 | config.define_cassette_placeholder("<{}>".format(key.upper()), value) 45 | -------------------------------------------------------------------------------- /tests/test_stats.py: -------------------------------------------------------------------------------- 1 | """Test subreddit_stats.""" 2 | import mock 3 | from prawtools.stats import SubredditStats 4 | 5 | from . import IntegrationTest 6 | 7 | 8 | class StatsTest(IntegrationTest): 9 | def setUp(self): 10 | """Setup runs before all test cases.""" 11 | self.srs = SubredditStats("redditdev", None, None, None) 12 | super(StatsTest, self).setUp(self.srs.reddit._core._requestor._http) 13 | 14 | def test_recent(self): 15 | with self.recorder.use_cassette("StatsTest.recent"): 16 | self.srs.max_date = 1466000000 # To work with current cassette 17 | self.srs.fetch_recent_submissions(7) 18 | self.assertTrue(len(self.srs.submissions) > 1) 19 | 20 | @mock.patch("time.sleep", return_value=None) 21 | def test_top(self, _sleep_mock): 22 | with self.recorder.use_cassette("StatsTest.top"): 23 | self.srs.fetch_top_submissions("week") 24 | self.assertTrue(len(self.srs.submissions) > 1) 25 | --------------------------------------------------------------------------------