├── Twitter_Profiles ├── 0brainlab │ └── followers.csv └── 1paulowe │ └── followers.csv ├── __pycache__ └── twitterclient.cpython-38.pyc ├── .gitignore ├── README.md ├── twitter_map_sample.py ├── twitter_map_clustered.py ├── twitterclient.py ├── twitter_mention_frequency.py ├── twitter_hashtag_frequency.py ├── twitter_time_series.py ├── twitter_term_frequency.py ├── twitter_geojson.py ├── twitter_followers_stats_set.py ├── twitter_conversations.py ├── twitter_hashtag_stats.py ├── twitter_get_user.py ├── twitter_streaming.py ├── twitter_cluster_users.py ├── twitter_get_ff_csv.py ├── twitter_influence.py └── data ├── network-edges-demo.csv └── network-edges.csv /Twitter_Profiles/0brainlab/followers.csv: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /__pycache__/twitterclient.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulowe/mining-twitter/HEAD/__pycache__/twitterclient.cpython-38.pyc -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .env 3 | Twitter_Profiles/1paulowe/user_profile.json 4 | Twitter_Profiles/1paulowe/friends.jsonl 5 | Twitter_Profiles/1paulowe/followers.jsonl 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Data Mining from Twitter 2 | 3 | ## Instructions 4 | To follow along and have a better understanding of what this project repository is about please visit this [medium article](https://towardsdatascience.com/mining-twitter-data-ba4e44e6aecc) I wrote 5 | 6 | ## Developer Account 7 | You will need to create a twitter developer account [here](https://developer.twitter.com/en) 8 | 9 | ## Twitter Client 10 | Use tweepy - This is a Python library for accessing twitter's API. Documentation can be found here: https://www.tweepy.org 11 | 12 | ## Bots 13 | Each file is a Bot that pulls data using REST or Streaming APIs and performs some preprocessing steps on the data. 14 | 15 | ## Dynamic Map demonstration 16 | Here is the [demo](http://www.paulowe.com/tweet_map.html) of the dynamic map visualization 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /twitter_map_sample.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | import folium 3 | 4 | def get_parser(): 5 | parser = ArgumentParser() 6 | parser.add_argument('--map') 7 | return parser 8 | 9 | def make_map(map_file): 10 | #Create a Map array object centred at Latitude 50, Longitude 5 11 | sample_map = folium.Map(Location=[50, 5], zoom_start=50) 12 | 13 | #Create a marker for London 14 | london_marker = folium.Marker([51.5, -0.12], popup='London') 15 | 16 | london_marker.add_to(sample_map) 17 | 18 | #Create a marker for Paris 19 | 20 | paris_marker = folium.Marker([48.85, 2.35], popup='Paris') 21 | 22 | paris_marker.add_to(sample_map) 23 | 24 | #Save to HTML file 25 | sample_map.save(map_file) 26 | 27 | if __name__ == '__main__': 28 | parser = get_parser() 29 | args = parser.parse_args() 30 | make_map(args.map) 31 | -------------------------------------------------------------------------------- /twitter_map_clustered.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | import folium 3 | from folium.plugins import MarkerCluster 4 | import json 5 | 6 | def get_parser(): 7 | parser = ArgumentParser() 8 | parser.add_argument('--geojson') 9 | parser.add_argument('--map') 10 | return parser 11 | 12 | def make_map(geojson_file, map_file): 13 | 14 | tweet_map = folium.Map(Location=[50, 5], max_zoom=20) 15 | 16 | marker_cluster = MarkerCluster().add_to(tweet_map) 17 | 18 | geodata= json.load(open(geojson_file)) 19 | 20 | for tweet in geodata['features']: 21 | tweet['geometry']['coordinates'].reverse() 22 | marker = folium.Marker(tweet['geometry']['coordinates'], popup=tweet['properties']['text']) 23 | marker.add_to(marker_cluster) 24 | 25 | #Save to HTML map file 26 | tweet_map.save(map_file) 27 | 28 | if __name__ == '__main__': 29 | parser = get_parser() 30 | args = parser.parse_args() 31 | make_map(args.geojson, args.map) 32 | -------------------------------------------------------------------------------- /twitterclient.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from tweepy import API 4 | from tweepy import OAuthHandler 5 | from dotenv import load_dotenv 6 | 7 | def get_twitter_auth(): 8 | #set up twitter authentication 9 | 10 | # Return: tweepy.OAuthHandler object 11 | 12 | try: 13 | load_dotenv() 14 | consumer_key = os.environ['TWITTER_CONSUMER_KEY'] 15 | consumer_secret = os.environ['TWITTER_CONSUMER_SECRET'] 16 | access_token = os.environ['TWITTER_ACCESS_TOKEN'] 17 | access_secret= os.environ['TWITTER_ACCESS_SECRET'] 18 | except KeyError: 19 | sys.stderr.write("TWITTER_* environment variables not set\n") 20 | sys.exit(1) 21 | auth = OAuthHandler(consumer_key, consumer_secret) 22 | auth.set_access_token(access_token, access_secret) 23 | return auth 24 | 25 | 26 | def get_twitter_client(): 27 | #Setu twitter API client. 28 | 29 | # Return tweepy.API object 30 | 31 | auth = get_twitter_auth() 32 | client = API(auth) 33 | return client 34 | -------------------------------------------------------------------------------- /twitter_mention_frequency.py: -------------------------------------------------------------------------------- 1 | #This script can be run to find out the most common tags from a timeline, a users page or any jsonl file we posess 2 | 3 | import sys 4 | from collections import Counter 5 | import json 6 | 7 | def get_mentions(tweet): 8 | #receives list of hashtags from a tweet 9 | entities = tweet.get('entities', {}) #if no value is present, default {} to avoid key value error 10 | mentions = entities.get('user_mentions', []) 11 | return [tag['screen_name'].lower() for tag in mentions] 12 | 13 | if __name__ == '__main__': 14 | fname = sys.argv[1] #script takes one cli argument fname 15 | with open(fname, 'r') as f: 16 | mentionfreq = Counter() #special dictionary object with strings as key and respective frequency(count) as values. this is an ordered collection and a subclass of dict() 17 | for line in f: #reads each line, and each line contains a json document (tweet) 18 | tweet = json.loads(line) #loads each json document into tweet variable 19 | mentions_in_tweet = get_mentions(tweet) #helper function extracts a list of hashtags 20 | mentionfreq.update(mentions_in_tweet) #binds the keys, value pairs/ our dictionary 21 | for tag, count in mentionfreq.most_common(20): 22 | print("{}: {}".format(tag, count)) 23 | -------------------------------------------------------------------------------- /twitter_hashtag_frequency.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections import Counter 3 | import json 4 | import matplotlib.pyplot as plt 5 | 6 | def get_hastags(tweet): 7 | #receives list of hashtags from a tweet 8 | entities = tweet.get('entities', {}) #if no value is present, default {} to avoid key value error 9 | hashtags = entities.get('hashtags', []) 10 | return [tag['text'].lower() for tag in hashtags] 11 | 12 | if __name__ == '__main__': 13 | fname = sys.argv[1] #script takes one cli argument fname 14 | with open(fname, 'r') as f: 15 | hashtags = Counter() #special dictionary object with strings/tags as key and respective frequency as values. this is an ordered collection and a subclass of dict() 16 | for line in f: #reads each line, and each line contains a json document (tweet) 17 | tweet = json.loads(line) #loads each json document into tweet variable 18 | hashtags_in_tweet = get_hastags(tweet) #helper function extracts a list of hashtags 19 | hashtags.update(hashtags_in_tweet) 20 | for tag, count in hashtags.most_common(20): 21 | print("{}: {}".format(tag, count)) 22 | 23 | y = [count for tag, count in hashtags.most_common(20)] 24 | x = range(1, len(y)+1) 25 | 26 | plt.bar(x, y) 27 | plt.title("Term frequencies used in US-Iran Stream Data") 28 | plt.ylabel("Frequency") 29 | plt.savefig('us-iran-tag-distn.png') 30 | -------------------------------------------------------------------------------- /twitter_time_series.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | from datetime import datetime 4 | import matplotlib.pyplot as plt 5 | import matplotlib.dates as mdates 6 | import pandas as pd 7 | import numpy as np 8 | import pickle 9 | 10 | if __name__ == '__main__': 11 | fname = sys.argv[1] 12 | with open (fname, 'r') as f: 13 | all_dates = [] 14 | for line in f: 15 | tweet = json.loads(line) 16 | all_dates.append(tweet.get('created_at')) 17 | idx = pd.DatetimeIndex(all_dates) 18 | ones = np.ones(len(all_dates)) 19 | 20 | my_series = pd.Series(ones, index=idx) 21 | 22 | #Resampling/bucketing into 1-minute buckets 23 | 24 | per_minute = my_series.resample('1Min').sum().fillna(0) 25 | 26 | #Plot the Series 27 | fig, ax = plt.subplots() 28 | ax.grid(True) 29 | ax.set_title("Tweet Frequencies") 30 | hours = mdates.MinuteLocator(interval=20) 31 | date_formatter = mdates.DateFormatter('%H:%M') 32 | 33 | datemin = datetime(2020, 1, 9, 10, 0) 34 | datemax = datetime(2020, 1, 9, 12, 0) 35 | 36 | ax.xaxis.set_major_locator(hours) 37 | ax.xaxis.set_major_formatter(date_formatter) 38 | ax.set_xlim(datemin, datemax) 39 | max_freq = per_minute.max() 40 | ax.set_ylim(0, max_freq) 41 | ax.plot(per_minute.index, per_minute) 42 | 43 | plt.savefig('tweet_time_series.png') 44 | -------------------------------------------------------------------------------- /twitter_term_frequency.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import string 3 | import json 4 | from collections import Counter 5 | from nltk.tokenize import TweetTokenizer 6 | from nltk.corpus import stopwords 7 | import matplotlib.pyplot as plt 8 | 9 | def process(text, tokenizer=TweetTokenizer(), stopwords=[]): 10 | """ Process tweet text: 11 | - Lowercase 12 | - tokenize 13 | - stopword removal 14 | - digits removal 15 | Return: list of strings 16 | """ 17 | 18 | text = text.lower() 19 | tokens = tokenizer.tokenize(text) 20 | return [tok for tok in tokens if tok not in stopwords and not tok.isdigit()] 21 | 22 | if __name__ == '__main__': 23 | fname = sys.argv[1] 24 | tweet_tokenizer = TweetTokenizer() 25 | punct = list(string.punctuation) 26 | stopword_list = stopwords.words('english') + punct + ['rt', 'via', '...'] 27 | 28 | tf = Counter() 29 | with open(fname, 'r') as f: 30 | for line in f: 31 | tweet = json.loads(line) 32 | tokens = process(text=tweet['text'], tokenizer=tweet_tokenizer, stopwords=stopword_list) 33 | tf.update(tokens) 34 | for tag, count in tf.most_common(20): 35 | 36 | print("{}: {}".format(tag, count)) 37 | 38 | 39 | 40 | 41 | y = [count for tag, count in tf.most_common(20)] 42 | x = range(1, len(y)+1) 43 | 44 | plt.bar(x, y) 45 | plt.title("Term frequencies used in US-Iran Stream Data") 46 | plt.ylabel("Frequency") 47 | plt.savefig('us-iran-term-distn.png') 48 | -------------------------------------------------------------------------------- /twitter_geojson.py: -------------------------------------------------------------------------------- 1 | #read dataset of tweets in jsonl file and produce a geoson file associted with geographical data 2 | import json 3 | from argparse import ArgumentParser 4 | 5 | def get_parser(): 6 | parser = ArgumentParser() 7 | parser.add_argument('--tweets') 8 | parser.add_argument('--geojson') 9 | return parser 10 | 11 | if __name__ == "__main__": 12 | parser = get_parser() 13 | args = parser.parse_args() 14 | 15 | #Read tweet collection and build geo data structure. 16 | with open(args.tweets, 'r') as f: #read dataset of tweets 17 | geo_data = { 18 | 'type' : "FeatureCollection", 19 | 'features' : [] 20 | } 21 | 22 | for line in f: 23 | tweet = json.loads(line) 24 | try: 25 | if tweet['coordinates']: 26 | geo_json_feature = { 27 | "type" : "Feature", 28 | "geometry": { 29 | "type" : "Point", 30 | "coordinates" : tweet['coordinates']['coordinates'] 31 | }, 32 | "properties": { 33 | "text" : tweet['text'], 34 | "created_at" : tweet['created_at'] 35 | } 36 | } 37 | geo_data['features'].append(geo_json_feature) 38 | except KeyError: 39 | #Skip if json doc is not a tweet (errors, etc) 40 | continue 41 | #Save geo data 42 | with open(args.geojson, 'w') as fout: 43 | fout.write(json.dumps(geo_data, indent=4)) 44 | -------------------------------------------------------------------------------- /twitter_followers_stats_set.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | import time 4 | 5 | def usage(): 6 | print("Usage: ") 7 | print("python {} ".format(sys.argv[0])) 8 | 9 | if __name__ == '__main__': 10 | if len(sys.argv) != 2: 11 | usage() 12 | sys.exit(1) 13 | screen_name = sys.argv[1] 14 | followers_file = "Twitter_Profiles/{}/followers.jsonl".format(screen_name) 15 | friends_file = "Twitter_Profiles/{}/friends.jsonl".format(screen_name) 16 | with open(followers_file) as f1, open(friends_file) as f2: 17 | t0 = time.time() 18 | followers = set() 19 | friends = set() 20 | for line in f1: 21 | profile = json.loads(line) 22 | followers.add(profile['screen_name']) 23 | for line in f2: 24 | profile = json.loads(line) 25 | friends.add(profile['screen_name']) 26 | t1 = time.time() 27 | mutual_friends = friends.intersection(followers) 28 | followers_not_following = followers.difference(friends) 29 | friends_not_following = friends.difference(followers) 30 | t2 = time.time() 31 | print("------TIMING-----") 32 | print("Initialize data: {}".format(t1-t0)) 33 | print("Set-based operations: {}".format(t2-t1)) 34 | print("Total time: {}".format(t2-t0)) 35 | """print("-------STATISTICS-------") 36 | print("{} has {} followers".format(screen_name, len(followers))) 37 | print("{} is following {} friends".format(screen_name, len(friends))) 38 | print("{} has {} mutal followers".format(screen_name, len(mutual_friends))) 39 | print("{} friends are not following {} back".format(len(friends_not_following), screen_name)) 40 | print("{} is not following {} of his followers".format(screen_name, len(followers_not_following)))""" 41 | -------------------------------------------------------------------------------- /twitter_conversations.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | from operator import itemgetter 4 | import networkx as nx 5 | 6 | def usage(): 7 | print("Usage") 8 | print("python {} ".format(sys.argv[0])) 9 | 10 | if __name__ == "__main__": 11 | if len(sys.argv) != 2: 12 | usage() 13 | sys.exit(1) 14 | 15 | fname = sys.argv[1] 16 | with open(fname) as f: #takes in a jsonl file of tweets as input 17 | graph = nx.DiGraph() 18 | for line in f: 19 | tweet = json.loads(line) 20 | if 'id' in tweet: 21 | graph.add_node(tweet['id'], 22 | tweet=tweet['text'], 23 | author=tweet['user']['screen_name'], 24 | created_at=tweet['created_at']) 25 | if tweet['in_reply_to_status_id']: 26 | reply_to = tweet['in_reply_to_status_id'] 27 | if reply_to in graph and tweet['user']['screen_name'] != graph.node[reply_to]['author']: #if the user is not replying to themselves 28 | graph.add_edge(tweet['in_reply_to_status_id'], tweet['id']) 29 | #Print some basic stats 30 | 31 | print(nx.info(graph)) 32 | 33 | #Find most replied tweet (most indegree) 34 | sorted_replied = sorted(graph.degree(), key=itemgetter(1), reverse=True) 35 | 36 | most_replied_id, replies = sorted_replied[0] 37 | print("Most replied tweet ({} replies:".format(replies)) 38 | print(graph.node[most_replied_id]) 39 | 40 | #Find longest conversation (longest path) 41 | print("Longest discussion:") 42 | longest_path = nx.dag_longest_path(graph) 43 | for tweet_id in longest_path: 44 | node = graph.node[tweet_id] 45 | print("{} (by {} at {})".format(node['tweet'], node['author'], node['created_at'])) 46 | -------------------------------------------------------------------------------- /twitter_hashtag_stats.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections import defaultdict 3 | import json 4 | 5 | def get_hashtags(tweet): 6 | #receives list of hashtags from a tweet 7 | entities = tweet.get('entities', {}) #if no value is present, default {} to avoid key value error 8 | hashtags = entities.get('hashtags', []) 9 | return [tag['text'].lower() for tag in hashtags] 10 | 11 | def usage(): 12 | print("Usage:") 13 | print("python {} ".format(sys.argv[0])) 14 | 15 | if __name__ == '__main__': 16 | if len(sys.argv) != 2: 17 | usage() 18 | sys.exit(1) 19 | fname = sys.argv[1] 20 | with open(fname, 'r') as f: 21 | hashtag_count = defaultdict(int) 22 | for line in f: 23 | tweet = json.loads(line) 24 | hashtags_in_tweet = get_hashtags(tweet) 25 | n_of_hashtags = len(hashtags_in_tweet) 26 | hashtag_count[n_of_hashtags] += 1 27 | tweets_with_hashtags = sum([count for n_of_tags, count in hashtag_count.items() if n_of_tags > 0]) 28 | tweets_no_hashtags = hashtag_count[0] 29 | tweets_total = tweets_no_hashtags + tweets_with_hashtags 30 | tweets_with_hashtags_percent = "%.2f" % (tweets_with_hashtags / tweets_total * 100) 31 | tweets_no_hashtags_percent = "%.2f" % (tweets_no_hashtags / tweets_total * 100) 32 | 33 | print("{} tweets without hashtags ({}%)".format(tweets_no_hashtags, tweets_no_hashtags_percent)) 34 | print("{} tweets with at least one hashtag ({}%)".format(tweets_with_hashtags, tweets_with_hashtags_percent)) 35 | 36 | for tag_count, tweet_count in hashtag_count.items(): 37 | if tag_count > 0: 38 | percent_total = "%.2f" % (tweet_count / tweets_total * 100) 39 | percent_elite = "%.2f" % (tweet_count / tweets_with_hashtags * 100) 40 | print("{} tweets with {} hashtags ({}% total {}% elite)".format(tweet_count, tag_count, percent_total, percent_elite)) 41 | -------------------------------------------------------------------------------- /twitter_get_user.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | import json 5 | import math 6 | from twitterclient import get_twitter_client 7 | from tweepy import Cursor 8 | 9 | MAX_FRIENDS = 15000 10 | 11 | def usage(): 12 | print("Usage: ") 13 | print("python {} <".format(sys.argv[0])) 14 | 15 | def paginate(items, n): 16 | """ 17 | Generate n-sized chunks from Items 18 | 19 | """ 20 | for i in range(0, len(items), n): 21 | yield items[i:i+n] 22 | 23 | if __name__ == '__main__': 24 | if len(sys.argv) != 2: 25 | usage() 26 | sys.exit(1) 27 | screen_name = sys.argv[1] 28 | client = get_twitter_client() 29 | dirname = "Twitter_Profiles/{}".format(screen_name) 30 | max_pages = math.ceil(MAX_FRIENDS/ 5000) 31 | try: 32 | os.makedirs(dirname, mode=0o755, exist_ok=True) 33 | except OSError: 34 | print("Directory {} already exists".format(dirname)) 35 | except Exception as e: 36 | print("Error while creating directory {}".format(dirname)) 37 | print(e) 38 | sys.exit(1) 39 | 40 | #get followers for a given user 41 | 42 | fname = "Twitter_Profiles/{}/followers.jsonl".format(screen_name) 43 | with open(fname, 'w') as f: 44 | for followers in Cursor(client.followers_ids, screen_name=screen_name).pages(max_pages): 45 | for chunk in paginate(followers, 100): 46 | users = client.lookup_users(user_ids=chunk) 47 | for user in users: 48 | f.write(json.dumps(user._json)+"\n") 49 | if len(followers) == 5000: 50 | print("More results available. Sleeping for 60 seconds to avoid rate limit") 51 | time.sleep(60) 52 | 53 | #get friends for a given user 54 | 55 | fname = "Twitter_Profiles/{}/friends.jsonl".format(screen_name) 56 | with open(fname, 'w') as f: 57 | for friends in Cursor(client.friends_ids, screen_name=screen_name).pages(max_pages): 58 | for chunk in paginate(friends, 100): 59 | users = client.lookup_users(user_ids=chunk) 60 | for user in users: 61 | f.write(json.dumps(user._json)+"\n") 62 | if len(friends) == 5000: 63 | print("More results available. Sleeping for 60 seconds to avoid rate limit") 64 | time.sleep(60) 65 | 66 | # get user's profile 67 | fname = "Twitter_Profiles/{}/user_profile.json".format(screen_name) 68 | with open(fname, 'w') as f: 69 | profile = client.get_user(screen_name=screen_name) 70 | f.write(json.dumps(profile._json, indent=4)) 71 | 72 | # https://www.geeksforgeeks.org/convert-json-to-csv-in-python/ -------------------------------------------------------------------------------- /twitter_streaming.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import string 3 | import time 4 | from tweepy import Stream 5 | from tweepy.streaming import StreamListener 6 | from twitterclient import get_twitter_auth 7 | 8 | 9 | class CustomListener(StreamListener): 10 | """ Custom StreamListener for streaming twitter data.""" 11 | 12 | def __init__(self, fname): 13 | safe_fname = format_filename(fname) 14 | self.outfile = "stream_%s.jsonl" % safe_fname 15 | 16 | def on_data(self, data): 17 | #called when data is coming through. This method simply stores data as it is received in a .jsonl file. Each line in this file will contain a single tweet in json format 18 | # return True after data is written, any other errors will be caught and we will write it to our log file, put the application to sleep for 5 seconds and return true to continue execution 19 | try: 20 | with open(self.outfile, 'a') as f: 21 | f.write(data) 22 | return True 23 | except BaseException as e: 24 | sys.stderr.write("Error on_data: {}\n".format(e)) 25 | time.sleep(5) 26 | return True 27 | 28 | def on_error(self, status): 29 | #this method will deal with explicit errors from twitter. Theres a complete list of error codes and responses on the twitter API 30 | #we are specifically looking to stop execution only on error 420 - Rate limit 31 | if status == 420: 32 | sys.stderr.write("Rate limit exceeded\n") 33 | return False #stops execution only on fail 34 | else: 35 | sys.stderr.write("Error {}\n".format(status)) 36 | return True 37 | 38 | 39 | def format_filename(fname): 40 | """ Convert fname into a safe string fo a file name. 41 | Return string. 42 | """ 43 | 44 | return ''.join(convert_valid(one_char) for one_char in fname) 45 | 46 | def convert_valid(one_char): 47 | 48 | """ Converts a character into '_' if "invalid". 49 | Return string. 50 | """ 51 | 52 | valid_chars = "-_.%s%s" % (string.ascii_letters, string.digits) 53 | if one_char in valid_chars: 54 | return one_char 55 | else: 56 | return '_' 57 | 58 | # when we run this script we have to provide arguments in the command line, separated by whitespace. 59 | #For example in your CLI you can run: python3 twitter_streaming.py \#popularhashtag1 \#popularhashtag2 search_keyword** 60 | #Stream for about 1 hour and retrieve results in your jsonl file 61 | if __name__ == '__main__': 62 | query = sys.argv[1:] # list of CLI argumentsquery_fname 63 | query_fname = ' '.join(query) # string 64 | auth = get_twitter_auth() 65 | twitter_stream = Stream(auth, CustomListener(query_fname)) 66 | twitter_stream.filter(track=query, is_async=True) 67 | -------------------------------------------------------------------------------- /twitter_cluster_users.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | from argparse import ArgumentParser 4 | from collections import defaultdict 5 | from sklearn.feature_extraction.text import TfidfVectorizer 6 | from sklearn.cluster import KMeans 7 | 8 | def get_parser(): 9 | parser = ArgumentParser("Clustering for followers") 10 | parser.add_argument('--filename') #path to the jsonl file that we want to analyze 11 | parser.add_argument('--k', type=int) #number of clusters we want 12 | parser.add_argument('--min-df', type=int, default=2) #minimum document frequency for a feature, default = 2 13 | parser.add_argument('--max-df', type=float, default=0.8) #max document frequency for a feature, default = 0.8 14 | parser.add_argument('--max-features', type=int, default=None) # max number of feature 15 | parser.add_argument('--no-idf', dest='user_idf', default=True, action='store_false') #flags whether we want to switch the idf weight off, using only the tf, idf is used by default 16 | parser.add_argument('--min-ngram', type=int, default=1) # lowebound for ngram to be extracted, default = 1 17 | parser.add_argument('--max-ngram', type=int, default=1) #upperbound for ngram to be extracted, default = 1 18 | return parser 19 | 20 | if __name__ == '__main__': 21 | parser = get_parser() 22 | args = parser.parse_args() 23 | if args.min_ngram > args.max_ngram: 24 | print("Error: incorrect value for --min--ngram ({}): it cant be higher than \ 25 | --max--value ({})".format(args.min_ngram, args.max_ngram)) 26 | sys.exit(1) 27 | with open(args.filename) as f: 28 | #load data 29 | 30 | users = [] 31 | for line in f: 32 | profile = json.loads(line) 33 | users.append(profile['description']) 34 | #create vectorizer 35 | vectorizer = TfidfVectorizer(max_df=args.max_df, 36 | min_df=args.min_df, 37 | max_features=args.max_features, 38 | stop_words='english', 39 | ngram_range=(args.min_ngram, args.max_ngram), 40 | use_idf=args.user_idf) 41 | 42 | #fit data 43 | X = vectorizer.fit_transform(users) 44 | print("Data dimensions: {}".format(X.shape)) 45 | 46 | #perform clustering 47 | km = KMeans(n_clusters=args.k) 48 | km.fit(X) 49 | clusters = defaultdict(list) 50 | for i, label in enumerate(km.labels_): 51 | clusters[label].append(users[i]) 52 | 53 | #print 10 user description of this cluster 54 | 55 | for label, description in clusters.items(): 56 | print("--------- Cluster {}".format(label+i)) 57 | for desc in description[:10]: 58 | print(desc) 59 | -------------------------------------------------------------------------------- /twitter_get_ff_csv.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | import json, csv 5 | import math 6 | from twitterclient import get_twitter_client 7 | from tweepy import Cursor 8 | 9 | MAX_FRIENDS = 15000 10 | 11 | ''' 12 | Usage of the script 13 | ''' 14 | 15 | def usage(): 16 | print("Usage: ") 17 | print("python {} <".format(sys.argv[0])) 18 | 19 | ''' 20 | Handle paginating results 21 | ''' 22 | def paginate(items, n): 23 | """ 24 | Generate n-sized chunks from Items 25 | 26 | """ 27 | for i in range(0, len(items), n): 28 | yield items[i:i+n] 29 | 30 | ''' 31 | Main logic for retrieving followers/friends 32 | ''' 33 | 34 | if __name__ == '__main__': 35 | ''' 36 | if len(sys.argv) != 2: 37 | usage() 38 | sys.exit(1) 39 | 40 | 41 | screen_name = sys.argv[1] 42 | ''' 43 | client = get_twitter_client() 44 | 45 | max_pages = math.ceil(MAX_FRIENDS/ 5000) 46 | 47 | 48 | # Create directory to store results 49 | ''' 50 | dirname = "Twitter_Profiles/{}".format(screen_name) 51 | try: 52 | os.makedirs(dirname, mode=0o755, exist_ok=True) 53 | except OSError: 54 | print("Directory {} already exists".format(dirname)) 55 | except Exception as e: 56 | print("Error while creating directory {}".format(dirname)) 57 | print(e) 58 | sys.exit(1) 59 | ''' 60 | 61 | 62 | #get followers for a given user 63 | list_of_users = ['1paulowe', 'Reem65850115', '0brainlab'] 64 | #loop through all users and write their followers to csv 65 | for user in list_of_users: 66 | 67 | fname = "data/network-edges.csv" 68 | fieldnames = ['From', 'To'] 69 | with open(fname, 'a') as f: 70 | 71 | csv_writer = csv.DictWriter(f, fieldnames=fieldnames) 72 | 73 | # get a user's followers list 74 | for followers in Cursor(client.followers_ids, screen_name=user).pages(max_pages): 75 | for chunk in paginate(followers, 100): 76 | userFollowers = client.lookup_users(user_ids=chunk) 77 | 78 | for follower in userFollowers: 79 | 80 | #print(len(users)) 81 | 82 | #print(follower.screen_name) 83 | csv_writer.writerow({'From': follower.screen_name, 'To': user}) 84 | 85 | # f.write(json.dumps(user._json)+"\n") 86 | if len(followers) == 5000: 87 | print("More results available. Sleeping for 60 seconds to avoid rate limit") 88 | time.sleep(60) 89 | 90 | # get a user's following list 91 | for friends in Cursor(client.friends_ids, screen_name=user).pages(max_pages): 92 | 93 | for chunk in paginate(friends, 100): 94 | userFollowing = client.lookup_users(user_ids=chunk) 95 | for followed in userFollowing: 96 | csv_writer.writerow({'From': user, 'To': followed.screen_name}) 97 | if len(friends) == 5000: 98 | print("More results available. Sleeping for 60 seconds to avoid rate limit") 99 | time.sleep(60) 100 | 101 | f.close() 102 | 103 | ''' 104 | #get friends for a given user 105 | 106 | fname = "Twitter_Profiles/{}/friends.jsonl".format(screen_name) 107 | with open(fname, 'w') as f: 108 | for friends in Cursor(client.friends_ids, screen_name=screen_name).pages(max_pages): 109 | for chunk in paginate(friends, 100): 110 | users = client.lookup_users(user_ids=chunk) 111 | for user in users: 112 | f.write(json.dumps(user._json)+"\n") 113 | if len(friends) == 5000: 114 | print("More results available. Sleeping for 60 seconds to avoid rate limit") 115 | time.sleep(60) 116 | 117 | # get user's profile 118 | fname = "Twitter_Profiles/{}/user_profile.json".format(screen_name) 119 | with open(fname, 'w') as f: 120 | profile = client.get_user(screen_name=screen_name) 121 | f.write(json.dumps(profile._json, indent=4)) 122 | 123 | # https://www.geeksforgeeks.org/convert-json-to-csv-in-python/ 124 | ''' -------------------------------------------------------------------------------- /twitter_influence.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | 4 | def usage(): 5 | print('Usage: ') 6 | print("python {} ".format(sys.argv[0])) 7 | 8 | if __name__ == "__main__": 9 | if len(sys.argv) != 3: 10 | usage() 11 | sys.exit(1) 12 | 13 | screen_name1 = sys.argv[1] 14 | screen_name2 = sys.argv[2] 15 | 16 | #Build up a list of followers 17 | followers_file1 = 'Twitter_Profiles/{}/followers.jsonl'.format(screen_name1) 18 | followers_file2 = 'Twitter_Profiles/{}/followers.jsonl'.format(screen_name2) 19 | 20 | with open(followers_file1) as f1, open(followers_file2) as f2: 21 | reach1 = [] 22 | reach2 = [] 23 | for line in f1: 24 | profile = json.loads(line) 25 | reach1.append((profile['screen_name'], profile['followers_count'])) 26 | for line in f2: 27 | profile = json.loads(line) 28 | reach2.append((profile['screen_name'], profile['followers_count'])) 29 | 30 | 31 | #Load basic statistics 32 | profile_file1 = 'Twitter_Profiles/{}/user_profile.json'.format(screen_name1) 33 | profile_file2 = 'Twitter_Profiles/{}/user_profile.json'.format(screen_name2) 34 | with open(profile_file1) as f1, open(profile_file2) as f2: 35 | profile1 = json.load(f1) 36 | profile2 = json.load(f2) 37 | 38 | followers1 = profile1['followers_count'] 39 | followers2 = profile2['followers_count'] 40 | 41 | tweets1 = profile['statuses_count'] 42 | tweets2 = profile['statuses_count'] 43 | 44 | sum_reach1 = sum([x[1] for x in reach1]) #sum up all of a user's followers, followers 45 | sum_reach2 = sum([x[1] for x in reach2]) 46 | avg_followers1 = round(sum_reach1/ followers1, 2) 47 | avg_followers2 = round(sum_reach2/ followers2, 2) 48 | 49 | #Load the timelines for the two users to observe the number of times their tweets have been favorited 50 | timeline_file1 = 'user_timeline_{}.jsonl'.format(screen_name1) 51 | timeline_file2 = 'user_timeline_{}.jsonl'.format(screen_name2) 52 | with open(timeline_file1) as f1, open(timeline_file2) as f2: 53 | favorited_count1, retweet_count1 = [], [] 54 | favorited_count2, retweet_count2 = [], [] 55 | for line in f1: 56 | tweet = json.loads(line) 57 | favorited_count1.append(tweet['favorite_count']) 58 | retweet_count1.append(tweet['retweet_count']) 59 | 60 | for line in f2: 61 | tweet = json.loads(line) 62 | favorited_count2.append(tweet['favorite_count']) 63 | retweet_count2.append(tweet['retweet_count']) 64 | 65 | #Average number of favorites and retweets 66 | 67 | avg_favorite1 = round(sum(favorited_count1)/ tweets1, 2) 68 | avg_favorite2 = round(sum(favorited_count2)/ tweets2, 2) #avg favorite per tweet 69 | avg_retweet1 = round(sum(retweet_count1)/ tweets1, 2) 70 | avg_retweet2 = round(sum(retweet_count2)/ tweets2, 2) 71 | favorite_per_user1 = round(sum(favorited_count1)/ followers1, 2) # your avg follower gives u x favorites in all of the likes u have accumulated 72 | favorite_per_user2 = round(sum(favorited_count2)/ followers2, 2) 73 | retweet_per_user1 = round(sum(retweet_count1)/ followers1, 2) # avg retweet per follower 74 | retweet_per_user2 = round(sum(retweet_count2)/ followers2, 2) 75 | 76 | print("------------ Stats for {} ---------------------".format(screen_name1)) 77 | print("{} follwers".format(followers1)) 78 | print("{} users can be reached by 1-degree connections.".format(sum_reach1)) 79 | print("{}'s followers averagely have: {} followers each".format(screen_name1, avg_followers1)) 80 | #print("Favorited {} times ({} favorites per tweet, and {} per user)".format(sum(favorited_count1), avg_favorite1, favorite_per_user1)) 81 | print("Retweeted {} times ({} retweets per tweet, and {} per user)".format(sum(retweet_count1), avg_retweet1, retweet_per_user1)) 82 | 83 | print("---------------------------- Next User ----------------------------") 84 | 85 | print("------------ Stats for {} ---------------------".format(screen_name2)) 86 | print("{} follwers".format(followers2)) 87 | print("{} users can be reached by 1-degree connections.".format(sum_reach2)) 88 | print("{}'s followers averagely have: {} followers each".format(screen_name2, avg_followers2)) 89 | #print("Favorited {} times ({} favorites per tweet, and {} per user)".format(sum(favorited_count2), avg_favorite2, favorite_per_user2)) 90 | print("Retweeted {} times ({} retweets per tweet, and {} per user)".format(sum(retweet_count2), avg_retweet2, retweet_per_user2)) 91 | -------------------------------------------------------------------------------- /Twitter_Profiles/1paulowe/followers.csv: -------------------------------------------------------------------------------- 1 | jeanmarie_johnm 2 | KingJef93973237 3 | yourdonally2 4 | CloudComputin11 5 | OCutetomi 6 | summit_cloud 7 | ArielMunafo 8 | MelonOnly 9 | Ababiorichmond6 10 | AhmedMalekX 11 | BaOkesse 12 | STEPHENEONYIBOR 13 | Rine_A_11 14 | jr_peller 15 | kwvme_poku 16 | Reem65850115 17 | WekaInvest 18 | SHOCKYGH1 19 | Millionton323 20 | _SneakerHouse 21 | TFC_Blog 22 | TwitterDev 23 | Naa_Dedeix 24 | yoavz_ 25 | Ezekiel33572991 26 | 0brainlab 27 | AbigailBrazy 28 | UchescoAugusti2 29 | senanu_t 30 | Discord_Child 31 | ydktweet 32 | MTrendn 33 | Kataals 34 | AinebyonaDaniel 35 | WindowsDocs 36 | EmsonFewdy 37 | Strategy_Gal 38 | LBJJNR1 39 | TanyoScott199 40 | iamDjShadow_SA 41 | andrew_antwi21 42 | j_aryeetey 43 | samuelkumi_ 44 | SireDrew 45 | k_bawura 46 | JustNaarki 47 | SaintJanney 48 | GraceKabukie 49 | JustTestingBB2 50 | _Christhell_ 51 | Computers_MDPI 52 | three_cube 53 | richswappo 54 | ALukman77 55 | freewayTY 56 | TupacKweku 57 | AduniahC 58 | PriscitaS 59 | Martyr_Entertai 60 | Ike66600695 61 | TwittGhana 62 | kelvinselassie1 63 | NANAWANZY 64 | Reigner_Boamah 65 | Xsue_ 66 | that_fine_gen 67 | Lipscy_dede 68 | felix_benni 69 | Richard71033697 70 | BismarkAnsahKw1 71 | KUKUMUSCLA 72 | paakofi406 73 | INyrako 74 | ashimiyu7 75 | GhKings 76 | AmaingM 77 | DanielL63644229 78 | PapaVanderpuye 79 | bko_si 80 | iamswill_ 81 | kojo_diamond 82 | IzuekeAnthony 83 | _addotey 84 | iamchrisbill 85 | bbygirldarks 86 | Henry07614909 87 | skillz8figure 88 | AdjoahAggrey 89 | KwasiNobi 90 | Obrempongfrimps 91 | ezinne__am 92 | marveldss 93 | floydft9 94 | ChervyQuame 95 | SemanhyiaPrince 96 | _LEJJ 97 | efua_neizer 98 | Tamarab35507639 99 | Hollison_03 100 | kausijustice 101 | JimmySongcrypto 102 | blackillusionnn 103 | DanielAntwiEff1 104 | okunadetolulop9 105 | kokudon 106 | oo0_0oooo 107 | essuman_nicole 108 | DavidAn53002148 109 | TIS_Media_House 110 | NanaSik29949599 111 | TheRealKKO 112 | qveen_ivy_ 113 | _adehye 114 | xaneasiamah 115 | EricAmpah 116 | d_dcee 117 | amoh_justice 118 | toluwaanimi 119 | ABDULKA62745386 120 | Desi_rogers99 121 | chalewotte 122 | _akrashie 123 | Nsayorku 124 | a_temaboy 125 | MirageWhite2 126 | airport_view 127 | WlgmLouis 128 | dollardollar11 129 | OyinkaAY 130 | Maxkud1990 131 | Charled32 132 | stratulatsimona 133 | dadaboattt 134 | XinZhoi 135 | Jbyronnn 136 | girlsunitedgh 137 | Lucil_lle 138 | 3mmanuelAshun 139 | anfoevi 140 | Im_also_a_We 141 | Acelamar1996 142 | pksims 143 | IdsKaren 144 | sneetsamuell 145 | cantbemont_ 146 | ramiie___ 147 | ArabellaBrown1 148 | who_is_merlin 149 | _utz1 150 | ladypearlbs 151 | _Soundoftriumph 152 | ohamsboy 153 | bts_smithh 154 | japhethdarpoh 155 | Viceroycoby84 156 | FajekMife 157 | AttafuahNana 158 | 1offiza 159 | Gilberte_xo 160 | Mickk_Youtube 161 | Isatuuuu 162 | Metrostraw42 163 | FokalFruits 164 | suramaking 165 | santo_punk1 166 | Santiey__ 167 | ForeverNellie 168 | KFirempong 169 | madgiee_ 170 | ____elss 171 | abena__x 172 | owusu_erin 173 | ZoeSawyerr 174 | notseannnn 175 | tony__poe 176 | clefg012 177 | ante_pipi 178 | madyriss 179 | fashionvillain1 180 | CureAidGhana 181 | Channette_ 182 | serwaa_ya 183 | Kevin_WIJ 184 | Mfonn_JL 185 | MawuenaG_ 186 | RealRamyCarrey 187 | jaredquampah 188 | chantelle_rex 189 | jukusenuattionu 190 | Germ_aine 191 | keziah_xo 192 | XCSTASY00 193 | konadu_aa 194 | cherryberry_360 195 | ___jnn 196 | __heman 197 | essienukunu 198 | mauricequartey_ 199 | Radjiie_ 200 | TheCatInSky 201 | nissi__a 202 | _seyy__ 203 | bettydeposoy 204 | worla06 205 | Nquartey1 206 | swaanzyy 207 | babygirlemmss 208 | pozentsubtdels1 209 | yourdonally 210 | BroDocwood 211 | mchemans 212 | ioneskudowor 213 | pano9 214 | gyasiwa_ 215 | nanay_oa 216 | SamanthaAzu 217 | FionaKufuor 218 | Jackie_tetteh 219 | yun99d 220 | brooklynnnxvo 221 | Adade_XX 222 | small_aryete 223 | Kelionair 224 | mekye6ft 225 | Terianjela 226 | ishi_carreyXVI 227 | ATMBenard 228 | youngbrowny1 229 | Bomaaa_x 230 | _Skate___ 231 | OniwaaFound 232 | Leon_Jamil 233 | dekmadee 234 | ElikplimAbledu 235 | df_antonio97 236 | benjikyem 237 | cedrriic_ 238 | VgGracia 239 | omaridenzel 240 | Basi3005 241 | C_Mendss 242 | tanaakam 243 | brimahprince 244 | Laurethee 245 | mensah__jr 246 | itsmovies 247 | benji_bks 248 | JTFmusic1 249 | REEZOfficial 250 | itsYawkwakwa 251 | thequietson 252 | WVillars 253 | Mayanie_16 254 | AhriaBrihanna 255 | YKorantema_ 256 | amuzu__ 257 | nanabbz 258 | eWe_MaN 259 | kdb100_ 260 | AJ_VII 261 | marylnefelix83 262 | Kwasi_fb 263 | sowahd 264 | danielosei 265 | _JxstJ 266 | dacrvz 267 | KwamJnr 268 | _Remzy 269 | koBBy___ 270 | kingtorku 271 | Misft88 272 | Gustus_ 273 | AlmightyTrei 274 | FremaSefa 275 | TroyJeffrey_ 276 | samuelina_t 277 | eileen_akorfa 278 | ryanapreala 279 | JacquesDelorme 280 | reggyjamablack 281 | Simply_Melinda 282 | OhenebaQuarshie 283 | trigga_drai 284 | bar___bara 285 | Sunchild_1 286 | Cyrian_HD 287 | Calvin_W 288 | _math_hieu 289 | Akua_kyei_ 290 | kokui_xx 291 | ewube 292 | lordinaa_ 293 | ENAsante 294 | -------------------------------------------------------------------------------- /data/network-edges-demo.csv: -------------------------------------------------------------------------------- 1 | jeanmarie_johnm,1paulowe 2 | KingJef93973237,1paulowe 3 | yourdonally2,1paulowe 4 | CloudComputin11,1paulowe 5 | OCutetomi,1paulowe 6 | summit_cloud,1paulowe 7 | ArielMunafo,1paulowe 8 | MelonOnly,1paulowe 9 | Ababiorichmond6,1paulowe 10 | AhmedMalekX,1paulowe 11 | BaOkesse,1paulowe 12 | STEPHENEONYIBOR,1paulowe 13 | Rine_A_11,1paulowe 14 | jr_peller,1paulowe 15 | kwvme_poku,1paulowe 16 | Reem65850115,1paulowe 17 | WekaInvest,1paulowe 18 | SHOCKYGH1,1paulowe 19 | Millionton323,1paulowe 20 | _SneakerHouse,1paulowe 21 | TFC_Blog,1paulowe 22 | TwitterDev,1paulowe 23 | Naa_Dedeix,1paulowe 24 | yoavz_,1paulowe 25 | Ezekiel33572991,1paulowe 26 | 0brainlab,1paulowe 27 | AbigailBrazy,1paulowe 28 | UchescoAugusti2,1paulowe 29 | senanu_t,1paulowe 30 | Discord_Child,1paulowe 31 | ydktweet,1paulowe 32 | MTrendn,1paulowe 33 | Kataals,1paulowe 34 | AinebyonaDaniel,1paulowe 35 | WindowsDocs,1paulowe 36 | EmsonFewdy,1paulowe 37 | Strategy_Gal,1paulowe 38 | LBJJNR1,1paulowe 39 | TanyoScott199,1paulowe 40 | iamDjShadow_SA,1paulowe 41 | andrew_antwi21,1paulowe 42 | j_aryeetey,1paulowe 43 | samuelkumi_,1paulowe 44 | SireDrew,1paulowe 45 | k_bawura,1paulowe 46 | JustNaarki,1paulowe 47 | SaintJanney,1paulowe 48 | GraceKabukie,1paulowe 49 | JustTestingBB2,1paulowe 50 | _Christhell_,1paulowe 51 | Computers_MDPI,1paulowe 52 | three_cube,1paulowe 53 | richswappo,1paulowe 54 | ALukman77,1paulowe 55 | freewayTY,1paulowe 56 | TupacKweku,1paulowe 57 | AduniahC,1paulowe 58 | PriscitaS,1paulowe 59 | Martyr_Entertai,1paulowe 60 | Ike66600695,1paulowe 61 | TwittGhana,1paulowe 62 | kelvinselassie1,1paulowe 63 | NANAWANZY,1paulowe 64 | Reigner_Boamah,1paulowe 65 | Xsue_,1paulowe 66 | that_fine_gen,1paulowe 67 | Lipscy_dede,1paulowe 68 | felix_benni,1paulowe 69 | Richard71033697,1paulowe 70 | BismarkAnsahKw1,1paulowe 71 | KUKUMUSCLA,1paulowe 72 | paakofi406,1paulowe 73 | INyrako,1paulowe 74 | ashimiyu7,1paulowe 75 | GhKings,1paulowe 76 | AmaingM,1paulowe 77 | DanielL63644229,1paulowe 78 | PapaVanderpuye,1paulowe 79 | bko_si,1paulowe 80 | iamswill_,1paulowe 81 | kojo_diamond,1paulowe 82 | IzuekeAnthony,1paulowe 83 | _addotey,1paulowe 84 | iamchrisbill,1paulowe 85 | bbygirldarks,1paulowe 86 | Henry07614909,1paulowe 87 | skillz8figure,1paulowe 88 | AdjoahAggrey,1paulowe 89 | KwasiNobi,1paulowe 90 | Obrempongfrimps,1paulowe 91 | ezinne__am,1paulowe 92 | marveldss,1paulowe 93 | floydft9,1paulowe 94 | ChervyQuame,1paulowe 95 | SemanhyiaPrince,1paulowe 96 | _LEJJ,1paulowe 97 | efua_neizer,1paulowe 98 | Tamarab35507639,1paulowe 99 | Hollison_03,1paulowe 100 | kausijustice,1paulowe 101 | JimmySongcrypto,1paulowe 102 | blackillusionnn,1paulowe 103 | DanielAntwiEff1,1paulowe 104 | okunadetolulop9,1paulowe 105 | kokudon,1paulowe 106 | oo0_0oooo,1paulowe 107 | essuman_nicole,1paulowe 108 | DavidAn53002148,1paulowe 109 | TIS_Media_House,1paulowe 110 | NanaSik29949599,1paulowe 111 | TheRealKKO,1paulowe 112 | qveen_ivy_,1paulowe 113 | _adehye,1paulowe 114 | xaneasiamah,1paulowe 115 | EricAmpah,1paulowe 116 | d_dcee,1paulowe 117 | amoh_justice,1paulowe 118 | toluwaanimi,1paulowe 119 | ABDULKA62745386,1paulowe 120 | Desi_rogers99,1paulowe 121 | chalewotte,1paulowe 122 | _akrashie,1paulowe 123 | Nsayorku,1paulowe 124 | a_temaboy,1paulowe 125 | MirageWhite2,1paulowe 126 | airport_view,1paulowe 127 | WlgmLouis,1paulowe 128 | dollardollar11,1paulowe 129 | OyinkaAY,1paulowe 130 | Maxkud1990,1paulowe 131 | Charled32,1paulowe 132 | stratulatsimona,1paulowe 133 | dadaboattt,1paulowe 134 | XinZhoi,1paulowe 135 | Jbyronnn,1paulowe 136 | girlsunitedgh,1paulowe 137 | Lucil_lle,1paulowe 138 | 3mmanuelAshun,1paulowe 139 | anfoevi,1paulowe 140 | Im_also_a_We,1paulowe 141 | Acelamar1996,1paulowe 142 | pksims,1paulowe 143 | IdsKaren,1paulowe 144 | sneetsamuell,1paulowe 145 | cantbemont_,1paulowe 146 | ramiie___,1paulowe 147 | ArabellaBrown1,1paulowe 148 | who_is_merlin,1paulowe 149 | _utz1,1paulowe 150 | ladypearlbs,1paulowe 151 | _Soundoftriumph,1paulowe 152 | ohamsboy,1paulowe 153 | bts_smithh,1paulowe 154 | japhethdarpoh,1paulowe 155 | Viceroycoby84,1paulowe 156 | FajekMife,1paulowe 157 | AttafuahNana,1paulowe 158 | 1offiza,1paulowe 159 | Gilberte_xo,1paulowe 160 | Mickk_Youtube,1paulowe 161 | Isatuuuu,1paulowe 162 | Metrostraw42,1paulowe 163 | FokalFruits,1paulowe 164 | suramaking,1paulowe 165 | santo_punk1,1paulowe 166 | Santiey__,1paulowe 167 | ForeverNellie,1paulowe 168 | KFirempong,1paulowe 169 | madgiee_,1paulowe 170 | ____elss,1paulowe 171 | abena__x,1paulowe 172 | owusu_erin,1paulowe 173 | ZoeSawyerr,1paulowe 174 | notseannnn,1paulowe 175 | tony__poe,1paulowe 176 | clefg012,1paulowe 177 | ante_pipi,1paulowe 178 | madyriss,1paulowe 179 | fashionvillain1,1paulowe 180 | CureAidGhana,1paulowe 181 | Channette_,1paulowe 182 | serwaa_ya,1paulowe 183 | Kevin_WIJ,1paulowe 184 | Mfonn_JL,1paulowe 185 | MawuenaG_,1paulowe 186 | RealRamyCarrey,1paulowe 187 | jaredquampah,1paulowe 188 | chantelle_rex,1paulowe 189 | jukusenuattionu,1paulowe 190 | Germ_aine,1paulowe 191 | keziah_xo,1paulowe 192 | XCSTASY00,1paulowe 193 | konadu_aa,1paulowe 194 | cherryberry_360,1paulowe 195 | ___jnn,1paulowe 196 | __heman,1paulowe 197 | essienukunu,1paulowe 198 | mauricequartey_,1paulowe 199 | Radjiie_,1paulowe 200 | TheCatInSky,1paulowe 201 | nissi__a,1paulowe 202 | _seyy__,1paulowe 203 | bettydeposoy,1paulowe 204 | worla06,1paulowe 205 | Nquartey1,1paulowe 206 | swaanzyy,1paulowe 207 | babygirlemmss,1paulowe 208 | pozentsubtdels1,1paulowe 209 | yourdonally,1paulowe 210 | BroDocwood,1paulowe 211 | mchemans,1paulowe 212 | ioneskudowor,1paulowe 213 | pano9,1paulowe 214 | gyasiwa_,1paulowe 215 | nanay_oa,1paulowe 216 | SamanthaAzu,1paulowe 217 | FionaKufuor,1paulowe 218 | Jackie_tetteh,1paulowe 219 | yun99d,1paulowe 220 | brooklynnnxvo,1paulowe 221 | Adade_XX,1paulowe 222 | small_aryete,1paulowe 223 | Kelionair,1paulowe 224 | mekye6ft,1paulowe 225 | Terianjela,1paulowe 226 | ishi_carreyXVI,1paulowe 227 | ATMBenard,1paulowe 228 | youngbrowny1,1paulowe 229 | Bomaaa_x,1paulowe 230 | _Skate___,1paulowe 231 | OniwaaFound,1paulowe 232 | Leon_Jamil,1paulowe 233 | dekmadee,1paulowe 234 | ElikplimAbledu,1paulowe 235 | df_antonio97,1paulowe 236 | benjikyem,1paulowe 237 | cedrriic_,1paulowe 238 | VgGracia,1paulowe 239 | omaridenzel,1paulowe 240 | Basi3005,1paulowe 241 | C_Mendss,1paulowe 242 | tanaakam,1paulowe 243 | brimahprince,1paulowe 244 | Laurethee,1paulowe 245 | mensah__jr,1paulowe 246 | itsmovies,1paulowe 247 | benji_bks,1paulowe 248 | JTFmusic1,1paulowe 249 | REEZOfficial,1paulowe 250 | itsYawkwakwa,1paulowe 251 | thequietson,1paulowe 252 | WVillars,1paulowe 253 | Mayanie_16,1paulowe 254 | AhriaBrihanna,1paulowe 255 | YKorantema_,1paulowe 256 | amuzu__,1paulowe 257 | nanabbz,1paulowe 258 | eWe_MaN,1paulowe 259 | kdb100_,1paulowe 260 | AJ_VII,1paulowe 261 | marylnefelix83,1paulowe 262 | Kwasi_fb,1paulowe 263 | sowahd,1paulowe 264 | danielosei,1paulowe 265 | _JxstJ,1paulowe 266 | dacrvz,1paulowe 267 | KwamJnr,1paulowe 268 | _Remzy,1paulowe 269 | koBBy___,1paulowe 270 | kingtorku,1paulowe 271 | Misft88,1paulowe 272 | Gustus_,1paulowe 273 | AlmightyTrei,1paulowe 274 | FremaSefa,1paulowe 275 | TroyJeffrey_,1paulowe 276 | samuelina_t,1paulowe 277 | eileen_akorfa,1paulowe 278 | ryanapreala,1paulowe 279 | JacquesDelorme,1paulowe 280 | reggyjamablack,1paulowe 281 | Simply_Melinda,1paulowe 282 | OhenebaQuarshie,1paulowe 283 | trigga_drai,1paulowe 284 | bar___bara,1paulowe 285 | Sunchild_1,1paulowe 286 | Cyrian_HD,1paulowe 287 | Calvin_W,1paulowe 288 | _math_hieu,1paulowe 289 | Akua_kyei_,1paulowe 290 | kokui_xx,1paulowe 291 | ewube,1paulowe 292 | lordinaa_,1paulowe 293 | ENAsante,1paulowe 294 | 1paulowe,Reem65850115 295 | 1paulowe,0brainlab 296 | -------------------------------------------------------------------------------- /data/network-edges.csv: -------------------------------------------------------------------------------- 1 | jeanmarie_johnm,1paulowe 2 | KingJef93973237,1paulowe 3 | yourdonally2,1paulowe 4 | CloudComputin11,1paulowe 5 | OCutetomi,1paulowe 6 | summit_cloud,1paulowe 7 | ArielMunafo,1paulowe 8 | MelonOnly,1paulowe 9 | Ababiorichmond6,1paulowe 10 | AhmedMalekX,1paulowe 11 | BaOkesse,1paulowe 12 | STEPHENEONYIBOR,1paulowe 13 | Rine_A_11,1paulowe 14 | jr_peller,1paulowe 15 | kwvme_poku,1paulowe 16 | Reem65850115,1paulowe 17 | WekaInvest,1paulowe 18 | SHOCKYGH1,1paulowe 19 | Millionton323,1paulowe 20 | _SneakerHouse,1paulowe 21 | TFC_Blog,1paulowe 22 | TwitterDev,1paulowe 23 | Naa_Dedeix,1paulowe 24 | yoavz_,1paulowe 25 | Ezekiel33572991,1paulowe 26 | 0brainlab,1paulowe 27 | AbigailBrazy,1paulowe 28 | UchescoAugusti2,1paulowe 29 | senanu_t,1paulowe 30 | Discord_Child,1paulowe 31 | ydktweet,1paulowe 32 | MTrendn,1paulowe 33 | Kataals,1paulowe 34 | AinebyonaDaniel,1paulowe 35 | WindowsDocs,1paulowe 36 | EmsonFewdy,1paulowe 37 | Strategy_Gal,1paulowe 38 | LBJJNR1,1paulowe 39 | TanyoScott199,1paulowe 40 | iamDjShadow_SA,1paulowe 41 | andrew_antwi21,1paulowe 42 | j_aryeetey,1paulowe 43 | samuelkumi_,1paulowe 44 | SireDrew,1paulowe 45 | k_bawura,1paulowe 46 | JustNaarki,1paulowe 47 | SaintJanney,1paulowe 48 | GraceKabukie,1paulowe 49 | JustTestingBB2,1paulowe 50 | _Christhell_,1paulowe 51 | Computers_MDPI,1paulowe 52 | three_cube,1paulowe 53 | richswappo,1paulowe 54 | ALukman77,1paulowe 55 | freewayTY,1paulowe 56 | TupacKweku,1paulowe 57 | AduniahC,1paulowe 58 | PriscitaS,1paulowe 59 | Martyr_Entertai,1paulowe 60 | Ike66600695,1paulowe 61 | TwittGhana,1paulowe 62 | kelvinselassie1,1paulowe 63 | NANAWANZY,1paulowe 64 | Reigner_Boamah,1paulowe 65 | Xsue_,1paulowe 66 | that_fine_gen,1paulowe 67 | Lipscy_dede,1paulowe 68 | felix_benni,1paulowe 69 | Richard71033697,1paulowe 70 | BismarkAnsahKw1,1paulowe 71 | KUKUMUSCLA,1paulowe 72 | paakofi406,1paulowe 73 | INyrako,1paulowe 74 | ashimiyu7,1paulowe 75 | GhKings,1paulowe 76 | AmaingM,1paulowe 77 | DanielL63644229,1paulowe 78 | PapaVanderpuye,1paulowe 79 | bko_si,1paulowe 80 | iamswill_,1paulowe 81 | kojo_diamond,1paulowe 82 | IzuekeAnthony,1paulowe 83 | _addotey,1paulowe 84 | iamchrisbill,1paulowe 85 | bbygirldarks,1paulowe 86 | Henry07614909,1paulowe 87 | skillz8figure,1paulowe 88 | AdjoahAggrey,1paulowe 89 | KwasiNobi,1paulowe 90 | Obrempongfrimps,1paulowe 91 | ezinne__am,1paulowe 92 | marveldss,1paulowe 93 | floydft9,1paulowe 94 | ChervyQuame,1paulowe 95 | SemanhyiaPrince,1paulowe 96 | _LEJJ,1paulowe 97 | efua_neizer,1paulowe 98 | Tamarab35507639,1paulowe 99 | Hollison_03,1paulowe 100 | kausijustice,1paulowe 101 | JimmySongcrypto,1paulowe 102 | blackillusionnn,1paulowe 103 | DanielAntwiEff1,1paulowe 104 | okunadetolulop9,1paulowe 105 | kokudon,1paulowe 106 | oo0_0oooo,1paulowe 107 | essuman_nicole,1paulowe 108 | DavidAn53002148,1paulowe 109 | TIS_Media_House,1paulowe 110 | NanaSik29949599,1paulowe 111 | TheRealKKO,1paulowe 112 | qveen_ivy_,1paulowe 113 | _adehye,1paulowe 114 | xaneasiamah,1paulowe 115 | EricAmpah,1paulowe 116 | d_dcee,1paulowe 117 | amoh_justice,1paulowe 118 | toluwaanimi,1paulowe 119 | ABDULKA62745386,1paulowe 120 | Desi_rogers99,1paulowe 121 | chalewotte,1paulowe 122 | _akrashie,1paulowe 123 | Nsayorku,1paulowe 124 | a_temaboy,1paulowe 125 | MirageWhite2,1paulowe 126 | airport_view,1paulowe 127 | WlgmLouis,1paulowe 128 | dollardollar11,1paulowe 129 | OyinkaAY,1paulowe 130 | Maxkud1990,1paulowe 131 | Charled32,1paulowe 132 | stratulatsimona,1paulowe 133 | dadaboattt,1paulowe 134 | XinZhoi,1paulowe 135 | Jbyronnn,1paulowe 136 | girlsunitedgh,1paulowe 137 | Lucil_lle,1paulowe 138 | 3mmanuelAshun,1paulowe 139 | anfoevi,1paulowe 140 | Im_also_a_We,1paulowe 141 | Acelamar1996,1paulowe 142 | pksims,1paulowe 143 | IdsKaren,1paulowe 144 | sneetsamuell,1paulowe 145 | cantbemont_,1paulowe 146 | ramiie___,1paulowe 147 | ArabellaBrown1,1paulowe 148 | who_is_merlin,1paulowe 149 | _utz1,1paulowe 150 | ladypearlbs,1paulowe 151 | _Soundoftriumph,1paulowe 152 | ohamsboy,1paulowe 153 | bts_smithh,1paulowe 154 | japhethdarpoh,1paulowe 155 | Viceroycoby84,1paulowe 156 | FajekMife,1paulowe 157 | AttafuahNana,1paulowe 158 | 1offiza,1paulowe 159 | Gilberte_xo,1paulowe 160 | Mickk_Youtube,1paulowe 161 | Isatuuuu,1paulowe 162 | Metrostraw42,1paulowe 163 | FokalFruits,1paulowe 164 | suramaking,1paulowe 165 | santo_punk1,1paulowe 166 | Santiey__,1paulowe 167 | ForeverNellie,1paulowe 168 | KFirempong,1paulowe 169 | madgiee_,1paulowe 170 | ____elss,1paulowe 171 | abena__x,1paulowe 172 | owusu_erin,1paulowe 173 | ZoeSawyerr,1paulowe 174 | notseannnn,1paulowe 175 | tony__poe,1paulowe 176 | clefg012,1paulowe 177 | ante_pipi,1paulowe 178 | madyriss,1paulowe 179 | fashionvillain1,1paulowe 180 | CureAidGhana,1paulowe 181 | Channette_,1paulowe 182 | serwaa_ya,1paulowe 183 | Kevin_WIJ,1paulowe 184 | Mfonn_JL,1paulowe 185 | MawuenaG_,1paulowe 186 | RealRamyCarrey,1paulowe 187 | jaredquampah,1paulowe 188 | chantelle_rex,1paulowe 189 | jukusenuattionu,1paulowe 190 | Germ_aine,1paulowe 191 | keziah_xo,1paulowe 192 | XCSTASY00,1paulowe 193 | konadu_aa,1paulowe 194 | cherryberry_360,1paulowe 195 | ___jnn,1paulowe 196 | __heman,1paulowe 197 | essienukunu,1paulowe 198 | mauricequartey_,1paulowe 199 | Radjiie_,1paulowe 200 | TheCatInSky,1paulowe 201 | nissi__a,1paulowe 202 | _seyy__,1paulowe 203 | bettydeposoy,1paulowe 204 | worla06,1paulowe 205 | Nquartey1,1paulowe 206 | swaanzyy,1paulowe 207 | babygirlemmss,1paulowe 208 | pozentsubtdels1,1paulowe 209 | yourdonally,1paulowe 210 | BroDocwood,1paulowe 211 | mchemans,1paulowe 212 | ioneskudowor,1paulowe 213 | pano9,1paulowe 214 | gyasiwa_,1paulowe 215 | nanay_oa,1paulowe 216 | SamanthaAzu,1paulowe 217 | FionaKufuor,1paulowe 218 | Jackie_tetteh,1paulowe 219 | yun99d,1paulowe 220 | brooklynnnxvo,1paulowe 221 | Adade_XX,1paulowe 222 | small_aryete,1paulowe 223 | Kelionair,1paulowe 224 | mekye6ft,1paulowe 225 | Terianjela,1paulowe 226 | ishi_carreyXVI,1paulowe 227 | ATMBenard,1paulowe 228 | youngbrowny1,1paulowe 229 | Bomaaa_x,1paulowe 230 | _Skate___,1paulowe 231 | OniwaaFound,1paulowe 232 | Leon_Jamil,1paulowe 233 | dekmadee,1paulowe 234 | ElikplimAbledu,1paulowe 235 | df_antonio97,1paulowe 236 | benjikyem,1paulowe 237 | cedrriic_,1paulowe 238 | VgGracia,1paulowe 239 | omaridenzel,1paulowe 240 | Basi3005,1paulowe 241 | C_Mendss,1paulowe 242 | tanaakam,1paulowe 243 | brimahprince,1paulowe 244 | Laurethee,1paulowe 245 | mensah__jr,1paulowe 246 | itsmovies,1paulowe 247 | benji_bks,1paulowe 248 | JTFmusic1,1paulowe 249 | REEZOfficial,1paulowe 250 | itsYawkwakwa,1paulowe 251 | thequietson,1paulowe 252 | WVillars,1paulowe 253 | Mayanie_16,1paulowe 254 | AhriaBrihanna,1paulowe 255 | YKorantema_,1paulowe 256 | amuzu__,1paulowe 257 | nanabbz,1paulowe 258 | eWe_MaN,1paulowe 259 | kdb100_,1paulowe 260 | AJ_VII,1paulowe 261 | marylnefelix83,1paulowe 262 | Kwasi_fb,1paulowe 263 | sowahd,1paulowe 264 | danielosei,1paulowe 265 | _JxstJ,1paulowe 266 | dacrvz,1paulowe 267 | KwamJnr,1paulowe 268 | _Remzy,1paulowe 269 | koBBy___,1paulowe 270 | kingtorku,1paulowe 271 | Misft88,1paulowe 272 | Gustus_,1paulowe 273 | AlmightyTrei,1paulowe 274 | FremaSefa,1paulowe 275 | TroyJeffrey_,1paulowe 276 | samuelina_t,1paulowe 277 | eileen_akorfa,1paulowe 278 | ryanapreala,1paulowe 279 | JacquesDelorme,1paulowe 280 | reggyjamablack,1paulowe 281 | Simply_Melinda,1paulowe 282 | OhenebaQuarshie,1paulowe 283 | trigga_drai,1paulowe 284 | bar___bara,1paulowe 285 | Sunchild_1,1paulowe 286 | Cyrian_HD,1paulowe 287 | Calvin_W,1paulowe 288 | _math_hieu,1paulowe 289 | Akua_kyei_,1paulowe 290 | kokui_xx,1paulowe 291 | ewube,1paulowe 292 | lordinaa_,1paulowe 293 | ENAsante,1paulowe 294 | 1paulowe,jeanmarie_johnm 295 | 1paulowe,vercel 296 | 1paulowe,adamnash 297 | 1paulowe,alejandrocrosa 298 | 1paulowe,impronunciable 299 | 1paulowe,DFInstitute 300 | 1paulowe,UofT_DCSIL 301 | 1paulowe,UofTCompSci 302 | 1paulowe,brian_lovin 303 | 1paulowe,OpenStack 304 | 1paulowe,CloudStack 305 | 1paulowe,dynamodb 306 | 1paulowe,yourdonally2 307 | 1paulowe,mojombo 308 | 1paulowe,babygirlemmss 309 | 1paulowe,AWSCloudFormer 310 | 1paulowe,sebsto 311 | 1paulowe,Code4Africa 312 | 1paulowe,justinarenstein 313 | 1paulowe,dchaplot 314 | 1paulowe,Rine_A_11 315 | 1paulowe,TerryTangYuan 316 | 1paulowe,D2L_ai 317 | 1paulowe,kwvme_poku 318 | 1paulowe,Reem65850115 319 | 1paulowe,TheStoicEmperor 320 | 1paulowe,lexfridman 321 | 1paulowe,UofT 322 | 1paulowe,NandoDF 323 | 1paulowe,goodfellow_ian 324 | 1paulowe,kev_deyoungster 325 | 1paulowe,noahfschr 326 | 1paulowe,danilop 327 | 1paulowe,jeremy_daly 328 | 1paulowe,em__shea 329 | 1paulowe,jbesw 330 | 1paulowe,WekaInvest 331 | 1paulowe,fchollet 332 | 1paulowe,ylecun 333 | 1paulowe,demishassabis 334 | 1paulowe,OpenAI 335 | 1paulowe,jeffbarr 336 | 1paulowe,DataScienceNIG 337 | 1paulowe,AmazonScience 338 | 1paulowe,TwitterAPI 339 | 1paulowe,Naa_Dedeix 340 | 1paulowe,_modelzoo_ 341 | 1paulowe,yoavz_ 342 | 1paulowe,kdnuggets 343 | 1paulowe,kaggle 344 | 1paulowe,0brainlab 345 | 1paulowe,AinebyonaDaniel 346 | 1paulowe,geoffreyhinton 347 | 1paulowe,DeepLearningAI_ 348 | 1paulowe,DeepMind 349 | 1paulowe,facebookai 350 | 1paulowe,StanfordAILab 351 | 1paulowe,StanfordSVL 352 | 1paulowe,AWS_Edu 353 | 1paulowe,AWS_Gov 354 | 1paulowe,julsimon 355 | 1paulowe,AbigailBrazy 356 | 1paulowe,UchescoAugusti2 357 | 1paulowe,Diop_IFC 358 | 1paulowe,Kataals 359 | 1paulowe,PGelsinger 360 | 1paulowe,spoonen 361 | 1paulowe,AWSstartups 362 | 1paulowe,FT 363 | 1paulowe,AWSOpen 364 | 1paulowe,Discord_Child 365 | 1paulowe,BillGates 366 | 1paulowe,JeffBezos 367 | 1paulowe,karpathy 368 | 1paulowe,AndrewYNg 369 | 1paulowe,senanu_t 370 | 1paulowe,TensorFlow 371 | 1paulowe,PyTorch 372 | 1paulowe,PyTorchPractice 373 | 1paulowe,SpaceX 374 | 1paulowe,NASA 375 | 1paulowe,POTUS45 376 | 1paulowe,BarackObama 377 | 1paulowe,awscloud 378 | 1paulowe,Werner 379 | 1paulowe,ajassy 380 | 1paulowe,IBMcloud 381 | 1paulowe,ShopifyData 382 | 1paulowe,IBMData 383 | 1paulowe,IBMWatson 384 | 1paulowe,SDSC_UCSD 385 | 1paulowe,SuperChristineK 386 | 1paulowe,TwitterU 387 | 1paulowe,TwitterCareers 388 | 1paulowe,TwitterData 389 | 1paulowe,TwitterDev 390 | 1paulowe,jack 391 | 1paulowe,TheCitizenTZ 392 | 1paulowe,Strategy_Gal 393 | 1paulowe,Carnage4Life 394 | 1paulowe,DataSciFact 395 | 1paulowe,deanabb 396 | 1paulowe,kncukier 397 | 1paulowe,cleantechnica 398 | 1paulowe,andrew_antwi21 399 | 1paulowe,j_aryeetey 400 | 1paulowe,samuelkumi_ 401 | 1paulowe,jmkikwete 402 | 1paulowe,MagufuliJP 403 | 1paulowe,Teradata 404 | 1paulowe,cnv0z 405 | 1paulowe,4thFromOurStar 406 | 1paulowe,bko_si 407 | 1paulowe,JustNaarki 408 | 1paulowe,elonmusk 409 | 1paulowe,Harvard 410 | 1paulowe,business 411 | 1paulowe,stevemagness 412 | 1paulowe,three_cube 413 | 1paulowe,DerickOmari 414 | 1paulowe,_addotey 415 | 1paulowe,skillz8figure 416 | 1paulowe,ezinne__am 417 | 1paulowe,floydft9 418 | 1paulowe,_LEJJ 419 | 1paulowe,kokudon 420 | 1paulowe,oo0_0oooo 421 | 1paulowe,TheRealKKO 422 | 1paulowe,EricAmpah 423 | 1paulowe,ishi_carreyXVI 424 | 1paulowe,3mmanuelAshun 425 | 1paulowe,Jackie_tetteh 426 | 1paulowe,nissi__a 427 | 1paulowe,Desi_rogers99 428 | 1paulowe,WlgmLouis 429 | 1paulowe,Skepta 430 | 1paulowe,Jbyronnn 431 | 1paulowe,Lucil_lle 432 | 1paulowe,pksims 433 | 1paulowe,IdsKaren 434 | 1paulowe,MawuenaG_ 435 | 1paulowe,sneetsamuell 436 | 1paulowe,ioneskudowor 437 | 1paulowe,ramiie___ 438 | 1paulowe,_utz1 439 | 1paulowe,bts_smithh 440 | 1paulowe,yung_d3mz 441 | 1paulowe,1offiza 442 | 1paulowe,Gilberte_xo 443 | 1paulowe,Isatuuuu 444 | 1paulowe,suramaking 445 | 1paulowe,Santiey__ 446 | 1paulowe,ForeverNellie 447 | 1paulowe,ValaAfshar 448 | 1paulowe,madgiee_ 449 | 1paulowe,____elss 450 | 1paulowe,abena__x 451 | 1paulowe,owusu_erin 452 | 1paulowe,Leon_Jamil 453 | 1paulowe,ZoeSawyerr 454 | 1paulowe,notseannnn 455 | 1paulowe,tony__poe 456 | 1paulowe,mauricequartey_ 457 | 1paulowe,Channette_ 458 | 1paulowe,serwaa_ya 459 | 1paulowe,Kevin_WIJ 460 | 1paulowe,Radjiie_ 461 | 1paulowe,kanyewest 462 | 1paulowe,Basi3005 463 | 1paulowe,Adade_XX 464 | 1paulowe,Mfonn_JL 465 | 1paulowe,chantelle_rex 466 | 1paulowe,keziah_xo 467 | 1paulowe,cherryberry_360 468 | 1paulowe,___jnn 469 | 1paulowe,__heman 470 | 1paulowe,essienukunu 471 | 1paulowe,brooklynnnxvo 472 | 1paulowe,yourdonally 473 | 1paulowe,gyasiwa_ 474 | 1paulowe,mchemans 475 | 1paulowe,SamanthaAzu 476 | 1paulowe,df_antonio97 477 | 1paulowe,cedrriic_ 478 | 1paulowe,tanaakam 479 | 1paulowe,C_Mendss 480 | 1paulowe,Laurethee 481 | 1paulowe,mzafiakyei3 482 | 1paulowe,benji_bks 483 | 1paulowe,Gustus_ 484 | 1paulowe,REEZOfficial 485 | 1paulowe,thequietson 486 | 1paulowe,kingtorku 487 | 1paulowe,nanabbz 488 | 1paulowe,Mayanie_16 489 | 1paulowe,KwamJnr 490 | 1paulowe,YKorantema_ 491 | 1paulowe,eWe_MaN 492 | 1paulowe,amuzu__ 493 | 1paulowe,danielosei 494 | 1paulowe,kdb100_ 495 | 1paulowe,AJ_VII 496 | 1paulowe,Kwasi_fb 497 | 1paulowe,sowahd 498 | 1paulowe,_JxstJ 499 | 1paulowe,Kelionair 500 | 1paulowe,dacrvz 501 | 1paulowe,koBBy___ 502 | 1paulowe,ryanapreala 503 | 1paulowe,TroyJeffrey_ 504 | 1paulowe,samuelina_t 505 | 1paulowe,eileen_akorfa 506 | 1paulowe,AlmightyTrei 507 | 1paulowe,trigga_drai 508 | 1paulowe,Simply_Melinda 509 | 1paulowe,bar___bara 510 | 1paulowe,Sunchild_1 511 | 1paulowe,Akua_kyei_ 512 | 1paulowe,Calvin_W 513 | 1paulowe,_math_hieu 514 | 1paulowe,kokui_xx 515 | 1paulowe,ewube 516 | 1paulowe,lordinaa_ 517 | 1paulowe,ENAsante 518 | 1paulowe,Reem65850115 519 | Reem65850115,Independent 520 | Reem65850115,NatGeoTravel 521 | Reem65850115,FT 522 | Reem65850115,TheEconomist 523 | Reem65850115,TIME 524 | Reem65850115,nytimes 525 | Reem65850115,Forbes 526 | Reem65850115,AppleTV 527 | Reem65850115,BBCWorld 528 | Reem65850115,Fact 529 | Reem65850115,Apple 530 | Reem65850115,elonmusk 531 | Reem65850115,1paulowe 532 | 1paulowe,0brainlab 533 | 0brainlab,dpkingma 534 | 0brainlab,OriolVinyalsML 535 | 0brainlab,arkrause 536 | 0brainlab,BachFrancis 537 | 0brainlab,UofTCompSci 538 | 0brainlab,cjmaddison 539 | 0brainlab,kswersk 540 | 0brainlab,maithra_raghu 541 | 0brainlab,dchaplot 542 | 0brainlab,scottniekum 543 | 0brainlab,danilop 544 | 0brainlab,OpenAI 545 | 0brainlab,StanfordHAI 546 | 0brainlab,MIT_CSAIL 547 | 0brainlab,1paulowe 548 | 0brainlab,Harvard 549 | 0brainlab,Teradata 550 | 0brainlab,TwitterData 551 | 0brainlab,SDSC_UCSD 552 | 0brainlab,IBMData 553 | 0brainlab,ajassy 554 | 0brainlab,TensorFlow 555 | 0brainlab,PyTorch 556 | 0brainlab,karpathy 557 | 0brainlab,AndrewYNg 558 | 0brainlab,BillGates 559 | 0brainlab,JeffBezos 560 | 0brainlab,AWSstartups 561 | 0brainlab,julsimon 562 | 0brainlab,AWS_Edu 563 | 0brainlab,AWS_Gov 564 | 0brainlab,StanfordSVL 565 | 0brainlab,StanfordAILab 566 | 0brainlab,facebookai 567 | 0brainlab,DeepMind 568 | 0brainlab,DeepLearningAI_ 569 | 0brainlab,geoffreyhinton 570 | --------------------------------------------------------------------------------