├── .travis.yml ├── socialshares ├── platforms │ ├── facebook.py │ ├── linkedin.py │ ├── __init__.py │ ├── reddit.py │ ├── google.py │ └── pinterest.py ├── utils.py ├── __init__.py ├── tests.py └── command.py ├── Makefile ├── setup.py ├── .gitignore ├── README.md └── README.rst /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | - "3.4" 5 | install: 6 | - python setup.py install 7 | - pip install futures requests_futures 8 | script: python setup.py test 9 | sudo: false -------------------------------------------------------------------------------- /socialshares/platforms/facebook.py: -------------------------------------------------------------------------------- 1 | def fetch(session, url): 2 | return session.get('https://graph.facebook.com/', params={'id': url}) 3 | 4 | def parse(response): 5 | if response.status_code != 200: 6 | raise IOError() 7 | 8 | data = response.json() 9 | return data['share'] 10 | -------------------------------------------------------------------------------- /socialshares/platforms/linkedin.py: -------------------------------------------------------------------------------- 1 | def fetch(session, url): 2 | return session.get('http://www.linkedin.com/countserv/count/share', 3 | params={'url': url, 'format': 'json'}) 4 | 5 | def parse(response): 6 | if response.status_code != 200: 7 | raise IOError() 8 | 9 | result = response.json() 10 | return result['count'] -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | python -c "import inspect; import socialshares; print inspect.getdoc(socialshares.command)" \ 3 | > README.md 4 | pandoc -o README.rst README.md 5 | 6 | package: 7 | python setup.py sdist upload 8 | 9 | URL := http://www.theguardian.com/politics/2014/sep/08/pound-slumps-scottish-yes-campaign-poll-lead 10 | 11 | test: 12 | python setup.py test -------------------------------------------------------------------------------- /socialshares/platforms/__init__.py: -------------------------------------------------------------------------------- 1 | from . import facebook, google, linkedin, pinterest, reddit 2 | 3 | default = [ 4 | 'facebook', 5 | ] 6 | 7 | supported = default + [ 8 | 'google', 9 | 'linkedin', 10 | 'pinterest', 11 | 'reddit', 12 | ] 13 | 14 | _platforms = globals() 15 | 16 | def get(name): 17 | if name in supported: 18 | platform = _platforms[name] 19 | platform.name = name 20 | return platform 21 | else: 22 | raise ValueError("Could not find a platform matching " + name) 23 | -------------------------------------------------------------------------------- /socialshares/platforms/reddit.py: -------------------------------------------------------------------------------- 1 | def fetch(session, url): 2 | return session.get('http://buttons.reddit.com/button_info.json', 3 | params={'format': 'json', 'url': url}) 4 | 5 | def parse(response): 6 | if response.status_code != 200: 7 | raise IOError() 8 | 9 | data = response.json() 10 | ups = 0 11 | downs = 0 12 | for child in data['data']['children']: 13 | ups = ups + child['data']['ups'] 14 | downs = downs + child['data']['downs'] 15 | 16 | return { 17 | 'ups': ups, 18 | 'downs': downs, 19 | } -------------------------------------------------------------------------------- /socialshares/platforms/google.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | def fetch(session, url): 4 | body = json.dumps({ 5 | 'method': 'pos.plusones.get', 6 | 'id': 'p', 7 | 'key': 'p', 8 | 'params': { 9 | 'nolog': True, 10 | 'id': url, 11 | 'source': 'widget', 12 | }, 13 | 'jsonrpc': '2.0', 14 | 'apiVersion': 'v1' 15 | }) 16 | return session.post('https://clients6.google.com/rpc', data=body) 17 | 18 | def parse(response): 19 | if response.status_code != 200: 20 | raise IOError() 21 | 22 | data = response.json() 23 | return int(data['result']['metadata']['globalCounts']['count']) -------------------------------------------------------------------------------- /socialshares/platforms/pinterest.py: -------------------------------------------------------------------------------- 1 | import re 2 | import json 3 | 4 | def fetch(session, url): 5 | # pinterest doesn't like it when we urlencode the url 6 | return session.get('http://api.pinterest.com/v1/urls/count.json?url=' + url) 7 | 8 | def parse_jsonp(response): 9 | text = response.text 10 | if not re.match(r'[_a-zA-Z]', text): 11 | raise ValueError("Cannot unwrap incorrect JSONP.") 12 | 13 | start = text.index('(') + 1 14 | stop = text.rindex(')') 15 | data = text[start:stop] 16 | return json.loads(data) 17 | 18 | def parse(response): 19 | if response.status_code != 200: 20 | raise IOError() 21 | 22 | data = parse_jsonp(response) 23 | return data['count'] -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | 4 | setup(name='socialshares', 5 | description='A command-line and programmatic interface to various social sharecount endpoints.', 6 | long_description=open('README.rst').read(), 7 | author='Stijn Debrouwere', 8 | author_email='stijn@debrouwere.org', 9 | url='https://github.com/debrouwere/social-shares', 10 | download_url='https://github.com/debrouwere/social-shares/tarball/master', 11 | version='1.1.0', 12 | license='ISC', 13 | packages=find_packages(), 14 | keywords='data analytics facebook twitter googleplus pinterest', 15 | entry_points = { 16 | 'console_scripts': [ 17 | 'socialshares = socialshares.command:main', 18 | ], 19 | }, 20 | test_suite='socialshares.tests', 21 | install_requires=[ 22 | 'docopt', 23 | 'requests', 24 | ], 25 | classifiers=[ 26 | 'Development Status :: 5 - Production/Stable', 27 | 'Intended Audience :: Developers', 28 | 'License :: OSI Approved :: MIT License', 29 | 'Operating System :: OS Independent', 30 | 'Programming Language :: Python', 31 | 'Topic :: Scientific/Engineering :: Information Analysis', 32 | ], 33 | ) 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .AppleDouble 3 | .LSOverride 4 | 5 | # Icon must end with two \r 6 | Icon 7 | 8 | 9 | # Thumbnails 10 | ._* 11 | 12 | # Files that might appear on external disk 13 | .Spotlight-V100 14 | .Trashes 15 | 16 | # Directories potentially created on remote AFP share 17 | .AppleDB 18 | .AppleDesktop 19 | Network Trash Folder 20 | Temporary Items 21 | .apdisk 22 | # Byte-compiled / optimized / DLL files 23 | __pycache__/ 24 | *.py[cod] 25 | 26 | # C extensions 27 | *.so 28 | 29 | # Distribution / packaging 30 | .Python 31 | env/ 32 | build/ 33 | develop-eggs/ 34 | dist/ 35 | downloads/ 36 | eggs/ 37 | lib/ 38 | lib64/ 39 | parts/ 40 | sdist/ 41 | var/ 42 | *.egg-info/ 43 | .installed.cfg 44 | *.egg 45 | 46 | # PyInstaller 47 | # Usually these files are written by a python script from a template 48 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 49 | *.manifest 50 | *.spec 51 | 52 | # Installer logs 53 | pip-log.txt 54 | pip-delete-this-directory.txt 55 | 56 | # Unit test / coverage reports 57 | htmlcov/ 58 | .tox/ 59 | .coverage 60 | .cache 61 | nosetests.xml 62 | coverage.xml 63 | 64 | # Translations 65 | *.mo 66 | *.pot 67 | 68 | # Django stuff: 69 | *.log 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ -------------------------------------------------------------------------------- /socialshares/utils.py: -------------------------------------------------------------------------------- 1 | from requests import Session 2 | from textwrap import dedent 3 | 4 | 5 | def check_concurrency(): 6 | try: 7 | import requests_futures 8 | return True 9 | except ImportError: 10 | return False 11 | 12 | def create_session(concurrent=None, **options): 13 | """ 14 | Concurrency can be required (True), desired (None) or disabled (False) 15 | we only throw an import error if concurrency is required yet cannot be 16 | satisfied. 17 | """ 18 | 19 | if concurrent is False: 20 | session = Session() 21 | else: 22 | try: 23 | from requests_futures.sessions import FuturesSession 24 | session = FuturesSession() 25 | except ImportError: 26 | if concurrent is True: 27 | raise ImportError(dedent(""" 28 | Could not find requests_futures. 29 | Please disable concurrency or install this package. 30 | If using Python 2.x, additionally install the futures package. 31 | """)) 32 | else: 33 | session = Session() 34 | 35 | for key, value in options.items(): 36 | setattr(session, key, value) 37 | 38 | return session 39 | 40 | 41 | # provide a single interface futures and synchronous requests 42 | # (which we pass through untouched) 43 | def get_response(response): 44 | if hasattr(response, 'result'): 45 | return response.result() 46 | else: 47 | return response 48 | 49 | def get_responses(futures): 50 | return [get_response(future) for future in futures] 51 | -------------------------------------------------------------------------------- /socialshares/__init__.py: -------------------------------------------------------------------------------- 1 | import pkg_resources 2 | 3 | from . import utils 4 | from . import command 5 | from . import platforms 6 | 7 | 8 | __version__ = pkg_resources.get_distribution("socialshares").version 9 | 10 | 11 | headers = { 12 | 'User-Agent': 'Social Shares ', 13 | } 14 | 15 | fetchers = platforms 16 | 17 | def fetch_once(session, url, platforms): 18 | handlers = [] 19 | requests = [] 20 | 21 | for platform in platforms: 22 | if platform in fetchers.supported: 23 | handler = fetchers.get(platform) 24 | handlers.append(handler) 25 | requests.append(handler.fetch(session, url)) 26 | else: 27 | raise ValueError() 28 | 29 | responses = utils.get_responses(requests) 30 | 31 | counts = {} 32 | for handler, response in zip(handlers, responses): 33 | # * ValueErrors indicate no JSON could be decoded 34 | # * KeyErrors and IndexErrors indicate the JSON didn't 35 | # contain the data we were looking for 36 | # * IOErrors are raised on purpose for all other 37 | # error conditions 38 | try: 39 | counts[handler.name] = handler.parse(response) 40 | except (IOError, ValueError, KeyError, IndexError): 41 | pass 42 | 43 | return counts 44 | 45 | 46 | def fetch(url, platforms=platforms.default, attempts=2, strict=False, concurrent=None): 47 | session = utils.create_session(concurrent=concurrent, headers=headers) 48 | counts = {} 49 | attempt = 0 50 | todo = set(platforms) 51 | while len(todo) and attempt < attempts: 52 | attempt = attempt + 1 53 | partial = fetch_once(session, url, todo) 54 | todo = todo.difference(partial) 55 | counts.update(partial) 56 | 57 | if strict and len(counts) < len(platforms): 58 | failures = ", ".join(todo) 59 | raise IOError("Could not fetch all requested sharecounts. Failed: " + failures) 60 | 61 | session.close() 62 | return counts 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Social shares 2 | 3 | [![Build Status](https://travis-ci.org/debrouwere/social-shares.svg?branch=master)](https://travis-ci.org/debrouwere/social-shares) 4 | 5 | A command-line utility and Python library to access the social share counts for a particular URL. 6 | 7 | **Note:** unfortunately this utility can no longer be used to fetch tweet and retweet counts, 8 | as Twitter has removed the API this functionality relied on. Facebook and other platforms 9 | still work. 10 | 11 | ### Usage 12 | 13 | ``` 14 | Usage: 15 | socialshares [...] [options] 16 | 17 | Options: 18 | -h, --help Show this screen. 19 | -p, --plain Plain output. 20 | -r , --retry Retry fetching up to times [default: 1] 21 | -e, --exit Exit with an error code when not all counts could be fetched. 22 | ``` 23 | 24 | Some examples: 25 | 26 | ```sh 27 | # fetch count for all supported platforms, 28 | # try again once (the default) for platforms that fail 29 | $ socialshares http://www.kalzumeus.com/2010/06/17/falsehoods-programmers-believe-about-names/ 30 | 31 | # fetch only facebook 32 | $ socialshares http://www.theguardian.com/politics facebook --retry 2 33 | ``` 34 | 35 | ### Supported platforms 36 | 37 | Platform | Description 38 | ----------- | ----------- 39 | facebook | facebook shares and comments 40 | linkedin | linkedin shares 41 | google | google +1's 42 | pinterest | pinterest pins 43 | reddit | reddit ups and downs (summed across posts) 44 | 45 | Platforms are fetched in parallel and retried (once by default.) 46 | If no platforms are specified, just facebook and twitter will be returned. 47 | 48 | ### Unsupported platforms 49 | 50 | The following APIs unfortunately no longer exist, and have been removed from the interface. 51 | 52 | Platform | Description 53 | ----------- | ----------- 54 | twitter | twitter tweets and retweets containing the URL 55 | facebookfql | facebook likes, shares and comments 56 | 57 | ### Output 58 | 59 | By default, `socialshares` outputs JSON: 60 | 61 | ```json 62 | { 63 | "reddit": { 64 | "downs": 0, 65 | "ups": 6 66 | }, 67 | "google": 20, 68 | "facebook": 1498, 69 | "twitter": 300, 70 | "pinterest": 1 71 | } 72 | ``` 73 | 74 | Use the `--plain` flag if instead you'd like space-separated output. 75 | 76 | ```sh 77 | $ socialshares http://www.theguardian.com/politics twitter 78 | 57 79 | ``` 80 | 81 | ### Usage from Python 82 | 83 | ```python 84 | import socialshares 85 | counts = socialshares.fetch(url, ['facebook', 'pinterest']) 86 | ``` 87 | 88 | ### Installation 89 | 90 | ```sh 91 | pip install socialshares 92 | # optionally, for asynchronous fetching 93 | pip install grequests 94 | ``` 95 | 96 | If [requests_futures][requests_futures] and (for Python 2.x) [futures][futures] 97 | are installed, `social-shares` will use these packages to speed up share count 98 | fetching, by accessing the various social media APIs in parallel. 99 | 100 | [requests_futures]: https://github.com/ross/requests-futures 101 | [futures]: https://code.google.com/p/pythonfutures/ 102 | -------------------------------------------------------------------------------- /socialshares/tests.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import subprocess 4 | import json 5 | import socialshares 6 | 7 | 8 | url = 'http://www.theguardian.com/politics/2014/sep/08/pound-slumps-scottish-yes-campaign-poll-lead' 9 | 10 | 11 | # share counts can differ on repeated fetches, but not by much 12 | def is_close(a, b): 13 | return 0 <= abs(b - a) <= 5 14 | 15 | 16 | class VerboseTestCase(unittest.TestCase): 17 | @classmethod 18 | def setUpClass(cls): 19 | if cls.CONCURRENT: 20 | print('Running tests with concurrency.') 21 | else: 22 | print('Running tests without concurrency.') 23 | 24 | 25 | class PythonTestCase(object): 26 | @property 27 | def defaults(self): 28 | return dict( 29 | attempts=3, 30 | concurrent=self.CONCURRENT, 31 | ) 32 | 33 | @property 34 | def lax_defaults(self): 35 | return dict( 36 | attempts=9, 37 | concurrent=self.CONCURRENT, 38 | ) 39 | 40 | def test_facebook(self): 41 | counts = socialshares.fetch(url, ['facebook'], **self.defaults) 42 | self.assertIn('facebook', counts) 43 | self.assertIsInstance(counts['facebook']['share_count'], int) 44 | 45 | def test_google(self): 46 | counts = socialshares.fetch(url, ['google'], **self.defaults) 47 | self.assertIn('google', counts) 48 | self.assertIsInstance(counts['google'], int) 49 | 50 | def test_linkedin(self): 51 | counts = socialshares.fetch(url, ['linkedin'], **self.defaults) 52 | self.assertIn('linkedin', counts) 53 | self.assertIsInstance(counts['linkedin'], int) 54 | 55 | def test_pinterest(self): 56 | counts = socialshares.fetch(url, ['pinterest'], **self.defaults) 57 | self.assertIn('pinterest', counts) 58 | self.assertIsInstance(counts['pinterest'], int) 59 | 60 | def test_reddit(self): 61 | counts = socialshares.fetch(url, ['reddit'], **self.defaults) 62 | self.assertIn('reddit', counts) 63 | self.assertIsInstance(counts['reddit'], dict) 64 | 65 | def test_default(self): 66 | counts = socialshares.fetch(url, **self.lax_defaults) 67 | self.assertEqual(set(counts.keys()), set(socialshares.platforms.default)) 68 | 69 | def test_all(self): 70 | counts = socialshares.fetch(url, socialshares.platforms.supported, **self.lax_defaults) 71 | self.assertTrue(len(counts.keys())) 72 | 73 | # requires stubs / spies 74 | def test_attempts(self): 75 | pass 76 | 77 | def test_strict(self): 78 | pass 79 | 80 | 81 | class PythonSynchronousTestCase(VerboseTestCase, PythonTestCase): 82 | CONCURRENT = False 83 | 84 | 85 | class PythonAsynchronousTestCase(VerboseTestCase, PythonTestCase): 86 | CONCURRENT = True 87 | 88 | 89 | class CLITestCase(unittest.TestCase): 90 | def test_cli_json(self): 91 | py = socialshares.fetch(url, ['facebook'])['facebook'] 92 | cli_raw = subprocess.check_output('socialshares {url}'.format(url=url), shell=True) 93 | cli = json.loads(cli_raw.decode('utf-8'))['facebook'] 94 | 95 | for k, v in py.items(): 96 | self.assertIn(k, cli) 97 | self.assertTrue(is_close(py[k], cli[k])) 98 | 99 | def test_cli_plain(self): 100 | py = socialshares.fetch(url, ['pinterest']) 101 | cli_raw = subprocess.check_output('socialshares {url} pinterest --plain'.format(url=url), shell=True) 102 | cli = int(cli_raw) 103 | self.assertEqual(py['pinterest'], cli) 104 | 105 | 106 | # some platforms are banned or otherwise don't work reliably in our CI environment 107 | SKIP_PLATFORMS = os.environ.get('SKIP_PLATFORMS', '').split(' ') 108 | for platform in filter(None, SKIP_PLATFORMS): 109 | delattr(PythonTestCase, 'test_' + platform) 110 | -------------------------------------------------------------------------------- /socialshares/command.py: -------------------------------------------------------------------------------- 1 | """# Social shares 2 | 3 | [![Build Status](https://travis-ci.org/debrouwere/social-shares.svg?branch=master)](https://travis-ci.org/debrouwere/social-shares) 4 | 5 | A command-line utility and Python library to access the social share counts for a particular URL. 6 | 7 | **Note:** unfortunately this utility can no longer be used to fetch tweet and retweet counts, 8 | as Twitter has removed the API this functionality relied on. Facebook and other platforms 9 | still work. 10 | 11 | ### Usage 12 | 13 | ``` 14 | Usage: 15 | socialshares [...] [options] 16 | 17 | Options: 18 | -h, --help Show this screen. 19 | -p, --plain Plain output. 20 | -r , --retry Retry fetching up to times [default: 1] 21 | -e, --exit Exit with an error code when not all counts could be fetched. 22 | ``` 23 | 24 | Some examples: 25 | 26 | ```sh 27 | # fetch count for all supported platforms, 28 | # try again once (the default) for platforms that fail 29 | $ socialshares http://www.kalzumeus.com/2010/06/17/falsehoods-programmers-believe-about-names/ 30 | 31 | # fetch only facebook 32 | $ socialshares http://www.theguardian.com/politics facebook \ 33 | --retry 2 34 | ``` 35 | 36 | ### Supported platforms 37 | 38 | Platform | Description 39 | ----------- | ----------- 40 | facebook | facebook shares and comments 41 | linkedin | linkedin shares 42 | google | google +1's 43 | pinterest | pinterest pins 44 | reddit | reddit ups and downs (summed across posts) 45 | 46 | Platforms are fetched in parallel and retried (once by default.) 47 | If no platforms are specified, just facebook and twitter will be returned. 48 | 49 | ### Unsupported platforms 50 | 51 | The following APIs unfortunately no longer exist, and have been removed from the interface. 52 | 53 | Platform | Description 54 | ----------- | ----------- 55 | twitter | twitter tweets and retweets containing the URL 56 | facebookfql | facebook likes, shares and comments 57 | 58 | ### Output 59 | 60 | By default, `socialshares` outputs JSON: 61 | 62 | ```json 63 | { 64 | "reddit": { 65 | "downs": 0, 66 | "ups": 6 67 | }, 68 | "google": 20, 69 | "facebook": 1498, 70 | "twitter": 300, 71 | "pinterest": 1 72 | } 73 | ``` 74 | 75 | Use the `--plain` flag if instead you'd like space-separated output. 76 | 77 | ```sh 78 | $ socialshares http://www.theguardian.com/politics twitter 79 | 57 80 | ``` 81 | 82 | ### Usage from Python 83 | 84 | ```python 85 | import socialshares 86 | counts = socialshares.fetch(url, ['facebook', 'pinterest']) 87 | ``` 88 | 89 | ### Installation 90 | 91 | ```sh 92 | pip install socialshares 93 | # optionally, for asynchronous fetching 94 | pip install grequests 95 | ``` 96 | 97 | If [requests_futures][requests_futures] and (for Python 2.x) [futures][futures] 98 | are installed, `social-shares` will use these packages to speed up share count 99 | fetching, by accessing the various social media APIs in parallel. 100 | 101 | [requests_futures]: https://github.com/ross/requests-futures 102 | [futures]: https://code.google.com/p/pythonfutures/ 103 | """ 104 | 105 | import sys 106 | from docopt import docopt 107 | import json 108 | import socialshares 109 | 110 | 111 | def main(): 112 | arguments = docopt(__doc__, version='Social shares ' + socialshares.__version__) 113 | url = arguments[''] 114 | attempts = int(arguments['--retry']) + 1 115 | plain = arguments['--plain'] 116 | strict = arguments['--exit'] 117 | platforms = arguments[''] or socialshares.platforms.default 118 | 119 | try: 120 | counts = socialshares.fetch(url, platforms, attempts=attempts, strict=strict) 121 | except IOError: 122 | sys.exit(1) 123 | 124 | if plain: 125 | l = [] 126 | for platform in platforms: 127 | count = counts[platform] 128 | if isinstance(count, dict): 129 | l = l + count.values() 130 | else: 131 | l.append(count) 132 | print(" ".join(map(str, l))) 133 | else: 134 | print(json.dumps(counts, indent=2)) 135 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Social shares 2 | ============= 3 | 4 | |Build Status| 5 | 6 | A command-line utility and Python library to access the social share 7 | counts for a particular URL. 8 | 9 | **Note:** unfortunately this utility can no longer be used to fetch 10 | tweet and retweet counts, as Twitter has removed the API this 11 | functionality relied on. Facebook and other platforms still work. 12 | 13 | Usage 14 | ----- 15 | 16 | :: 17 | 18 | Usage: 19 | socialshares [...] [options] 20 | 21 | Options: 22 | -h, --help Show this screen. 23 | -p, --plain Plain output. 24 | -r , --retry Retry fetching up to times [default: 1] 25 | -e, --exit Exit with an error code when not all counts could be fetched. 26 | 27 | Some examples: 28 | 29 | .. code:: sh 30 | 31 | # fetch count for all supported platforms, 32 | # try again once (the default) for platforms that fail 33 | $ socialshares http://www.kalzumeus.com/2010/06/17/falsehoods-programmers-believe-about-names/ 34 | 35 | # fetch only facebook 36 | $ socialshares http://www.theguardian.com/politics facebook --retry 2 37 | 38 | Supported platforms 39 | ------------------- 40 | 41 | +-------------+----------------------------------------------+ 42 | | Platform | Description | 43 | +=============+==============================================+ 44 | | facebook | facebook shares and comments | 45 | +-------------+----------------------------------------------+ 46 | | linkedin | linkedin shares | 47 | +-------------+----------------------------------------------+ 48 | | google | google +1's | 49 | +-------------+----------------------------------------------+ 50 | | pinterest | pinterest pins | 51 | +-------------+----------------------------------------------+ 52 | | reddit | reddit ups and downs (summed across posts) | 53 | +-------------+----------------------------------------------+ 54 | 55 | Platforms are fetched in parallel and retried (once by default.) If no 56 | platforms are specified, just facebook and twitter will be returned. 57 | 58 | Unsupported platforms 59 | --------------------- 60 | 61 | The following APIs unfortunately no longer exist, and have been removed 62 | from the interface. 63 | 64 | +---------------+--------------------------------------------------+ 65 | | Platform | Description | 66 | +===============+==================================================+ 67 | | twitter | twitter tweets and retweets containing the URL | 68 | +---------------+--------------------------------------------------+ 69 | | facebookfql | facebook likes, shares and comments | 70 | +---------------+--------------------------------------------------+ 71 | 72 | Output 73 | ------ 74 | 75 | By default, ``socialshares`` outputs JSON: 76 | 77 | .. code:: json 78 | 79 | { 80 | "reddit": { 81 | "downs": 0, 82 | "ups": 6 83 | }, 84 | "google": 20, 85 | "facebook": 1498, 86 | "twitter": 300, 87 | "pinterest": 1 88 | } 89 | 90 | Use the ``--plain`` flag if instead you'd like space-separated output. 91 | 92 | .. code:: sh 93 | 94 | $ socialshares http://www.theguardian.com/politics twitter 95 | 57 96 | 97 | Usage from Python 98 | ----------------- 99 | 100 | .. code:: python 101 | 102 | import socialshares 103 | counts = socialshares.fetch(url, ['facebook', 'pinterest']) 104 | 105 | Installation 106 | ------------ 107 | 108 | .. code:: sh 109 | 110 | pip install socialshares 111 | # optionally, for asynchronous fetching 112 | pip install grequests 113 | 114 | If `requests\_futures `__ and 115 | (for Python 2.x) `futures `__ 116 | are installed, ``social-shares`` will use these packages to speed up 117 | share count fetching, by accessing the various social media APIs in 118 | parallel. 119 | 120 | .. |Build Status| image:: https://travis-ci.org/debrouwere/social-shares.svg?branch=master 121 | :target: https://travis-ci.org/debrouwere/social-shares 122 | --------------------------------------------------------------------------------