├── clearcache.py
├── badcron.sh
├── all_legendary.py
├── dedup.py
├── banlist.txt
├── LICENSE.md
├── initialize.py
├── gracefulstats.py
├── deckstats.py
├── README.md
├── kmeans.py
├── deckstatscom.py
├── mtgsalvation.py
├── tappedout.py
├── reddit.py
├── api.py
├── core.py
└── decks_sample.json
/clearcache.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import core
4 |
5 |
6 | r = core.get_redis()
7 |
8 | c = 0
9 | for k in r.keys('CACHE_*'):
10 | print 'DEL %s' % k
11 | c += 1
12 | r.delete(k)
13 |
14 | print 'deleted', c, 'keys.'
15 |
16 |
--------------------------------------------------------------------------------
/badcron.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | while :
4 | do
5 | echo "starting tasks " `date`
6 | python dedup.py
7 |
8 | python mtgsalvation.py
9 |
10 | sh backup.sh staging.edhrec.com
11 |
12 | echo "done with tasks" `date`
13 |
14 | sleep 3600 # run every hour
15 |
16 | done
17 |
--------------------------------------------------------------------------------
/all_legendary.py:
--------------------------------------------------------------------------------
1 |
2 | # go through AllCards.json and print out a list of all of the legendary creatures
3 |
4 | import json
5 |
6 | for card, conts in json.load(open('AllCards.json')).items():
7 | if not conts.has_key(u'types'): continue
8 | if not conts.has_key(u'supertypes'): continue
9 |
10 | if u'Legendary' in conts[u'supertypes'] and u'Creature' in conts[u'types']:
11 | print card.encode('utf-8')
12 |
13 |
14 |
--------------------------------------------------------------------------------
/dedup.py:
--------------------------------------------------------------------------------
1 | import core
2 | import json
3 |
4 | r=core.get_redis()
5 |
6 | strs = r.keys('DECKS_*')
7 |
8 |
9 | for s in strs:
10 | ds = core.get_decks(s, dedup=True)
11 | dds = core.dedup_decks(ds)
12 |
13 | r.delete('DDD_' + s)
14 | for deck in dds:
15 | r.rpush('DDD_' + s, json.dumps(deck))
16 |
17 | r.delete('OLD_' + s)
18 | r.rename(s, 'OLD_' + s)
19 | r.rename('DDD_' + s, s)
20 |
21 |
22 | print 'Removed %d decks (%d - %d)' % (r.llen('OLD_' + s) - r.llen(s), r.llen('OLD_' + s), r.llen(s))
23 |
--------------------------------------------------------------------------------
/banlist.txt:
--------------------------------------------------------------------------------
1 | Ancestral Recall
2 | Balance
3 | Biorhythm
4 | Black Lotus
5 | Coalition Victory
6 | Channel
7 | Emrakul, the Aeons Torn
8 | Fastbond
9 | Gifts Ungiven
10 | Karakas
11 | Library of Alexandria
12 | Limited Resources
13 | Sundering Titan
14 | Primeval Titan
15 | Sylvan Primordial
16 | Rofellos, Llanowar Emissary
17 | Erayo, Soratami Ascendant
18 | Mox Sapphire
19 | Mox Ruby
20 | Mox Pearl
21 | Mox Emerald
22 | MoxJet
23 | Painter's Servant
24 | Panoptic Mirror
25 | Protean Hulk
26 | Recurring Nightmare
27 | Sway of the Stars
28 | Time Vault
29 | Time Walk
30 | Tinker
31 | Tolarian Academy
32 | Upheaval
33 | Yawgmoth's Bargain
34 | Griselbrand
35 | Worldfire
36 | Trade Secrets
37 | Braids, Cabal Minion
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Donald P. Miner
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/initialize.py:
--------------------------------------------------------------------------------
1 | import core
2 | import json
3 | import logging
4 |
5 | def load_cards_from_json(file_path):
6 | r = core.get_redis()
7 |
8 | writecount = 0
9 |
10 | # Go through each card in the set...
11 | for card, conts in json.load(open(file_path)).items():
12 | # If it has names, it's a split card or fuse card or something
13 | if conts.has_key('names'):
14 | cardname = core.sanitize_cardname('/'.join(conts['names'])).lower()
15 |
16 | for name in conts['names']:
17 | r.hset('CARDS_JSON', core.sanitize_cardname(name), json.dumps(conts))
18 |
19 | r.hset('CARDS_JSON', core.sanitize_cardname(cardname), json.dumps(conts))
20 | r.hset('CARDS_JSON', core.sanitize_cardname(cardname.replace('/', ' // ')), json.dumps(conts))
21 | r.hset('CARDS_JSON', core.sanitize_cardname(cardname.replace('/', ' / ')), json.dumps(conts))
22 |
23 | else:
24 | cardname = core.sanitize_cardname(conts['name']).lower()
25 |
26 | r.hset('CARDS_JSON', core.sanitize_cardname(cardname), json.dumps(conts))
27 |
28 | writecount += 1
29 |
30 | logging.debug('We just wrote ' + str(writecount) + ' card entries into Redis.')
31 |
32 | load_cards_from_json('AllCards.json')
33 |
34 | #for deck in open('decks_sample.json').readlines():
35 | # core.add_deck(json.loads(deck))
36 |
37 | #for cc in [ core.sanitize_cardname(c) for c in open('banlist.txt').read().strip().split('\n') ]:
38 | # core.get_redis().sadd('BANNED', cc)
39 |
--------------------------------------------------------------------------------
/gracefulstats.py:
--------------------------------------------------------------------------------
1 | import re
2 | import json
3 | import urllib2
4 | import core
5 |
6 | URL_PATTERN = re.compile('.*(https?://(?:www\.)?(?:beta\.)?gracefulstats\.com/deck/view/([0-9]+)).*')
7 |
8 | name = "gracefulstats"
9 |
10 |
11 | # Given a gracefulstats deck ID, get me the tuple:
12 | # (commander, deck contents (cards), color identity, the date the deck was updated)
13 | def get_deck(id):
14 | id = str(id)
15 |
16 | url = 'http://api.gracefulstats.com/1.0/deck/view?id=' + id + '&cards=true'
17 | try:
18 | con = core.urlopen(url)
19 | except urllib2.HTTPError as e:
20 | logging.warning('Someone posted a bad URL: ' + url + ' (%s) ' % str(e))
21 | return None
22 |
23 | deck = set()
24 |
25 | deckObject = json.loads(con)
26 |
27 | colorIdentity = deckObject['deck']['color_identity']
28 | name = deckObject['deck']['name']
29 |
30 | deckFormat = deckObject['deck']['format']['name']
31 |
32 | if (deckFormat != 'Commander'):
33 | raise ValueError("This isn't a commander deck, try to change the type to commander")
34 |
35 | commander = deckObject['deck']['commander']['name']
36 |
37 | for card in deckObject['deck']['cards']:
38 | deck.add(core.sanitize_cardname(card['name']))
39 |
40 | out_deck = {
41 | 'commander': core.sanitize_cardname(commander),
42 | 'cards': sorted(list(deck)),
43 | 'date': deckObject['deck']['created'],
44 | 'ref': 'gracefulstats'
45 | }
46 |
47 | return out_deck
48 |
49 | def scrapedeck(url_str):
50 | m = URL_PATTERN.match(url_str)
51 |
52 | if m is None:
53 | raise ValueError("This doesn't seem to be a valid gracefulstats url")
54 |
55 | return get_deck(m.group(2))
56 |
57 |
58 | #print scrapedeck('http://www.gracefulstats.com/deck/view/9349')
59 |
--------------------------------------------------------------------------------
/deckstats.py:
--------------------------------------------------------------------------------
1 | import core
2 | import datetime
3 | import json
4 |
5 |
6 |
7 | def tally(decks):
8 | types = {u'Creature' : 0, u'Enchantment' : 0, u'Sorcery' : 0, \
9 | u'Instant' : 0, u'Artifact' : 0, u'Planeswalker' : 0}
10 | curve = {'0':0, '1':0, '2':0, '3':0, '4':0, '5':0, '6':0, '7':0, '8+':0}
11 | colors= {u'Red' : 0, u'Blue' : 0, u'Green' : 0, u'White' : 0, u'Black' : 0}
12 |
13 |
14 | nonland_counts = []
15 | c = 0
16 | for deck in decks:
17 | c += 1
18 |
19 | nonlands = 0
20 | for card in deck['cards']:
21 | cd = core.lookup_card(card)
22 |
23 | if cd is None:
24 | continue
25 |
26 | if not 'Land' in cd['types']:
27 | nonlands += 1
28 |
29 | for t in cd['types']:
30 | if not t in types.keys(): continue
31 | types[t] += 1
32 |
33 | if cd.has_key('cmc'):
34 | if cd['cmc'] >= 8:
35 | curve['8+'] += 1
36 | else:
37 | curve[str(cd['cmc'])] += 1
38 |
39 | if cd.has_key('colors'):
40 | if u'Land' in cd['types']: continue
41 | for col in cd['colors']:
42 | colors[col] += 1
43 | nonland_counts.append(nonlands)
44 |
45 | for key in types:
46 | types[key] /= c
47 | for key in curve:
48 | curve[key] /= c
49 | for key in colors:
50 | colors[key] /= c
51 | nonland_average = sum(nonland_counts) / len(nonland_counts)
52 |
53 | out = {}
54 | out['types'] = types
55 | out['curve'] = sorted(curve.items())
56 | out['colors'] = colors
57 | out['nonlands'] = nonland_average
58 | out['lands'] = 99 - nonland_average
59 | return out
60 |
61 | def get_global_stats():
62 | out = tally(core.get_all_decks())
63 | return out
64 |
65 | def get_commander_stats(commander):
66 |
67 | ds = []
68 | for deck in core.get_decks(core.color_identity(commander)):
69 | if deck['commander'] == core.sanitize_cardname(commander):
70 | ds.append(deck)
71 |
72 | out = tally(ds)
73 | out['commander'] = core.cap_cardname(commander)
74 |
75 | return out
76 |
77 |
78 | #print get_commander_stats('omnath, locus of mana')
79 | #print get_commander_stats('Mayael the Anima')
80 | #print get_commander_stats('animar, soul of elements')
81 |
82 |
83 |
84 | #r = core.get_redis()
85 | #r.rename('STATS_GLOBAL', 'OLD_STATS_GLOBAL')
86 | #r.set('STATS_GLOBAL', json.dumps(get_global_stats()))
87 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | EDHREC is a reddit bot that provides recommendations for edh decks.
2 | It is built using Python. It uses PRAW to interact with Reddit.
3 | It stores data in Redis.
4 | It uses an approach called Collaborative Filtering to get the recommendations.
5 |
6 | DISCLAIMER: THIS CODE WILL POST TO REDDIT LIVE. PLEASE DON'T SPAM PEOPLE UNLESS YOU KNOW WHAT YOU ARE DOING.
7 |
8 | Original author: /u/orangeoctopus Donald Miner
9 |
10 | This code is licensed under The MIT License, which basically allows you to do whatever you want with it
11 | as long as you keep the original copyright around. I picked this license because I wanted to be flexible
12 | and encourage people to play with the code.
13 |
14 | Copyright 2014 Donald Miner
15 |
16 |
17 | Installation instructions
18 | =========================
19 |
20 | Check out the edhrec repository or download the source from https://github.com/donaldpminer/edhrec.git
21 | You should see files like reddit.py, tappedout.py
22 |
23 |
24 |
25 | Install the following dependencies used by the edhrec python program:
26 |
27 | PRAW is the reddit API module
28 | $ pip install praw
29 |
30 | The redis module... talks to redis
31 | $ pip install redis
32 |
33 |
34 |
35 | Get the latest stable release of redis, build it, then start it:
36 | http://redis.io/download
37 | Redis is an in-memory data store that edhrec uses
38 | ... or on some distributions you should be able to install the "redis-server" and "redis-cli" packages
39 |
40 |
41 |
42 | Download AllCards.json from http://mtgjson.com/
43 | $ curl http://mtgjson.com/json/AllCards.json > AllCards.json
44 | Make sure you get AllCards.json not AllSets.json
45 | Put this file in the same directory as reddit.py, core.py, etc.
46 |
47 |
48 |
49 | If you are running for the first time, you'll need to run initialize.py
50 | $ python initialize.py
51 | DO NOT continue if you get any errors. Things to check if this doesn't work:
52 | - is redis running?
53 | - is banlist.txt there?
54 | - is AllCards.json good to go?
55 |
56 |
57 |
58 |
59 | Notice there is a file called "decks_sample.json".
60 | This is to show you how a deck is formatted by example. It's up to you go find deck data yourself...
61 |
62 |
63 |
64 |
65 | Create your login information for a reddit account in a file called login.txt.
66 | Put this on a single line, separated by a space. Username on the left, password on the right.
67 | For example:
68 | login.txt:
69 | BotAccount524 hunter2
70 |
71 |
72 |
73 | Make sure TESTING = True at the top of reddit.py or you are about to spam a bunch of people.
74 | The TESTING flag makes it so edhrec runs in a mock manner and doesn't actually do anything,
75 | but it will show you what he's doing in the logs.
76 |
77 |
78 |
79 | Run the bot and hope for the best:
80 | $ python reddit.py
81 |
82 |
83 |
84 | Testing
85 | ======================================
86 |
87 | Please please please please please do not run this bot in /r/edh unless everyone is cool with it.
88 | We don't want a ton of bots spamming the subreddit. I'm always sensitive about this.
89 |
90 | Go wild in /r/edhrec
91 | You can change which subreddit your bot scans by changing this line
92 | subreddit = PRAW.get_subreddit('edhrec+edh').get_new(limit=sublimit)
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
--------------------------------------------------------------------------------
/kmeans.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | import core
3 | import sklearn.cluster
4 | import deckstats
5 | import random
6 |
7 | def kmeans(cmdr, k=4):
8 |
9 | # random.seed(52485)
10 |
11 | cmdr = core.sanitize_cardname(cmdr)
12 |
13 | card_to_idx = {}
14 | idx_to_card = {}
15 | dims = None
16 |
17 | decks = []
18 |
19 | i = 0
20 | for deck in core.get_all_decks():
21 | if deck['commander'] != cmdr:
22 | continue
23 |
24 | for card in deck['cards']:
25 | if card in ['island', 'swamp', 'mountain', 'forest', 'plains', cmdr]:
26 | continue
27 |
28 | lo = core.lookup_card(card)
29 | if lo is None or 'Land' in lo['types']: continue
30 |
31 | if card_to_idx.has_key(card): continue
32 |
33 | card_to_idx[card] = i
34 | idx_to_card[i] = card
35 | i += 1
36 |
37 | ll = numpy.zeros(i, dtype=int)
38 |
39 | idxs = []
40 | for card in deck['cards']:
41 | try:
42 | idxs.append(card_to_idx[card])
43 | except KeyError:
44 | continue
45 |
46 | for idx in idxs:
47 | ll[idx] = 1
48 |
49 | decks.append(ll)
50 |
51 | for idx, deck in enumerate(decks):
52 | decks[idx].resize(i, refcheck=False)
53 |
54 | decks = numpy.array(decks, dtype=int)
55 |
56 | kmc = sklearn.cluster.KMeans(n_clusters=k, init='k-means++', n_init=25, max_iter=300, tol=0.000001, precompute_distances=True, verbose=0, random_state=None, n_jobs=1)
57 |
58 | kmc.fit(decks)
59 |
60 | clusters = [ [] for i in range(k) ]
61 |
62 | out = []
63 |
64 | for idx, deck in enumerate(decks):
65 | clusters[kmc.labels_[idx]].append([idx_to_card[idx] for idx, v in enumerate(deck) if v == 1])
66 |
67 | for idx, cluster in enumerate(kmc.cluster_centers_):
68 | outc = {}
69 |
70 | sumdiff = sum([ cluster - other for other in kmc.cluster_centers_ ])
71 | defining = sorted( enumerate(sumdiff), key=lambda x: x[1], reverse=True)[:12]
72 | defining = [ {'score' : val, 'card_info' : {'name' : core.lookup_card(idx_to_card[jdx])['name'], \
73 | 'types' : core.lookup_card(idx_to_card[jdx])['types'], \
74 | 'colors' : core.lookup_card(idx_to_card[jdx]).get('colors', []), \
75 | 'cmc' : core.lookup_card(idx_to_card[jdx]).get('cmc', 0) } } for jdx, val in defining ]
76 |
77 | topc = sorted( [(val, idx_to_card[jdx] ) for jdx, val in enumerate(cluster)], reverse=True)[:125]
78 | topc = [ {'score' : val, 'card_info' : {'name' : core.lookup_card(card)['name'], \
79 | 'types' : core.lookup_card(card)['types'], \
80 | 'colors' : core.lookup_card(card).get('colors', []), \
81 | 'cmc' : core.lookup_card(card).get('cmc', 0) } } for val, card in topc ]
82 |
83 | outc['defining'] = defining
84 | outc['recs'] = topc
85 |
86 | outc['numdecks'] = len(clusters[idx])
87 | outc['percentdecks'] = int( len(clusters[idx]) / float(len(decks)) * 100 )
88 | outc['commander'] = cmdr
89 | outc['stats'] = deckstats.tally([ {'cards' : d } for d in clusters[idx] ])
90 | out.append(outc)
91 |
92 | return sorted(out, key=lambda x: x['percentdecks'], reverse=True)
93 |
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/deckstatscom.py:
--------------------------------------------------------------------------------
1 | import urllib2
2 | import core
3 | import logging
4 | import datetime
5 | import random
6 | import re
7 |
8 |
9 | EXPORT_APPEND = '?export_dec=1'
10 |
11 | URL_PATTERN = re.compile('.*(https?://w?w?w?.?deckstats.net/decks/[0-9]+/[0-9]+-.*?/).*')
12 | URL_PATTERN2 = re.compile('.*(https?://w?w?w?.?deckstats.net/deck-[0-9]+-[0-9a-f]+.html).*')
13 |
14 |
15 | def guess_commander(cards, text=''):
16 | text = text.lower().strip().replace('and', '').replace('or', '').replace('of', '').replace('the', '')
17 | text = ''.join( c for c in text if c.isalpha() or c == ' ' )
18 |
19 | candidates = []
20 | colors = set()
21 |
22 | for cardname in cards:
23 | card = core.lookup_card(cardname)
24 |
25 | if card is None:
26 | logging.warn('ignoring this card %s because i couldnt find it' % cardname)
27 | continue
28 |
29 | try:
30 | if 'Legendary' in card['supertypes'] and 'Creature' in card['types']:
31 | candidates.append(cardname)
32 |
33 | colors = colors.union(set(core.color_identity(cardname)))
34 | except KeyError:
35 | continue
36 |
37 | colors = sorted(colors)
38 |
39 | candidates = [ cardname for cardname in candidates if core.color_identity(cardname) == colors ]
40 |
41 | if len(candidates) == 0:
42 | raise ValueError("There is no good commander option for this pool of cards")
43 |
44 | if len(candidates) == 1:
45 | return candidates[0]
46 |
47 | wordmatch = []
48 | for cardname in candidates:
49 | ncardname = ''.join( c for c in cardname if c.isalpha() or c == ' ' )
50 | tokens = [ k.rstrip('s') for k in ncardname.split() ]
51 | texttokens = [ k.rstrip('s') for k in text.split() ]
52 | logging.debug(str(tokens) + ' vs. ' + str(texttokens) + ' (word match)')
53 | c = len( [t for t in tokens if t.rstrip('s') in texttokens] )
54 | wordmatch.append((c, cardname))
55 |
56 | wordmatch.sort(reverse=True)
57 |
58 | logging.debug("There are multiple candidates, these are the scores: %s" % str(wordmatch))
59 |
60 | return wordmatch[0][1]
61 |
62 | def scrapedeck(url_str):
63 | logging.debug('attempting to scrape the deckstats url: %s ' % url_str)
64 |
65 | url_fetch = url_str + EXPORT_APPEND
66 |
67 | logging.debug("going to go fetch '%s'" %url_fetch)
68 |
69 | try:
70 | content = urllib2.urlopen(url_fetch).readlines()
71 | except:
72 | raise ValueError("Invalid URL '%s'" % url_str)
73 |
74 | text = content[0][len('//NAME: '):-len('from DeckStats.net') - 2]
75 | logging.debug('The name of this deck is: %s' % text)
76 |
77 | cards = set()
78 | sideboard = set()
79 | for line in content:
80 | line = line.split('//')[0]
81 | line = line.split('#')[0]
82 | line = line.strip()
83 |
84 | if len(line) == 0:
85 | continue
86 |
87 | if line.startswith('SB:'):
88 | sideboard.add(core.sanitize_cardname(line.split(' ', 2)[2]))
89 | line = line[4:]
90 |
91 | if not line[0] in '0123456789':
92 | raise ValueError("This isn't a valid line of the form '# Card Name': %s " % line)
93 |
94 | cardname = core.sanitize_cardname(line.split(' ', 1)[1])
95 |
96 |
97 | cards.add(cardname)
98 |
99 | commander = None
100 | if len(sideboard) == 1:
101 | cardname = list(sideboard)[0]
102 | card = core.lookup_card(cardname)
103 |
104 | if card.has_key('supertypes') and 'Legendary' in card['supertypes']:
105 | commander = list(sideboard)[0]
106 |
107 | if commander is None:
108 | commander = guess_commander(cards, text)
109 |
110 | out = {}
111 |
112 | out['url'] = url_str
113 | out['scrapedate'] = str(datetime.datetime.now())
114 | out['commander'] = commander
115 | out['cards'] = sorted( cards )
116 | out['ref'] = 'deckstats'
117 |
118 |
119 | return out
120 |
121 | #print scrapedeck('http://deckstats.net/decks/11763/98275-athreos-god-of-passage/en')
122 | #print scrapedeck('http://deckstats.net/decks/20915/121652-mayael-lords-and-ladies/en')
123 |
--------------------------------------------------------------------------------
/mtgsalvation.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | # a deck is made out of:
4 | # url let's make this T/O specific
5 | # mtgsalvation <-- this is the new URL
6 | # ip of the person who submitted it
7 | # scrapedate 2014-11-08 01:56:25.872182
8 | # commander "taniwha"
9 | # cards cleansed names
10 | # ref mtgsalvation
11 |
12 | import urllib2
13 | import BeautifulSoup as bs
14 | import json
15 | import core
16 | import datetime
17 | import logging
18 | import time
19 |
20 | def scrape_deck(url_str):
21 | logging.debug('scraping a deck for %s' % url_str)
22 |
23 | content = urllib2.urlopen(url_str).read()
24 | parsed = bs.BeautifulSoup(content)
25 | tables = parsed.findAll('table')
26 |
27 | deck = []
28 | # find the deck
29 | for t in tables:
30 | attrs = dict(t.attrs)
31 | if attrs['class'] != u'deck':
32 | continue
33 |
34 | data = json.loads(attrs['data-card-list'])
35 |
36 | num_cards = 0
37 | for card in data['Deck']:
38 | num_cards += card['Qty']
39 | deck.append(core.sanitize_cardname(card['CardName']))
40 |
41 | if num_cards < 95 or num_cards > 102:
42 | # raise ValueError("This deck has %d cards... that's bad." % num_cards)
43 | pass
44 |
45 | if not core.lookup_card(deck[0]).has_key(u'supertypes') or not u'Legendary' in core.lookup_card(deck[0])[u'supertypes']:
46 | raise ValueError("The first card in this deck is not legendary.")
47 |
48 | break
49 | else:
50 | raise ValueError("I couldn't find a deck in this post")
51 |
52 | out = {}
53 | out['url'] = url_str
54 | out['mtgsalvation'] = url_str
55 | out['date'] = datetime.datetime.now().toordinal()
56 | out['scrapedate'] = str(datetime.datetime.now())
57 | out['commander'] = deck[0]
58 | out['cards'] = sorted(deck)
59 | out['ref'] = 'mtgsalvation'
60 |
61 | return out
62 |
63 |
64 | def frontpage(page=1):
65 | url = 'http://www.mtgsalvation.com/forums/the-game/commander-edh/multiplayer-commander-decklists?page=%d' % page
66 |
67 | logging.debug('Looking at page %d of the multiplayer decklists' % page)
68 |
69 | content = urllib2.urlopen(url).read()
70 | parsed = bs.BeautifulSoup(content)
71 | anchors = parsed.findAll('a')
72 |
73 | decklinks = []
74 | for a in anchors:
75 | attrs = dict(a.attrs)
76 | if not attrs.has_key('href'):
77 | continue
78 |
79 | href = attrs['href'].split('?',1)[0]
80 |
81 | if not '/forums/the-game/commander-edh/multiplayer-commander-decklists/' in href:
82 | continue
83 |
84 | if href in decklinks:
85 | continue
86 |
87 | decklinks.append(href)
88 |
89 | return decklinks
90 |
91 | def frontpages(startpage=1, endpage=100):
92 | outlinks = []
93 | for i in range(endpage, startpage, -1):
94 | for ol in frontpage(i):
95 | yield ol
96 |
97 |
98 | def onetimescrape():
99 | for link in frontpages(startpage=1, endpage=100):
100 | try:
101 | url = 'http://www.mtgsalvation.com' + link
102 |
103 | cachekey = 'CACHE_MTGSALVATION_%s' % url
104 | if not core.get_redis().get(cachekey) is None:
105 | continue
106 |
107 | core.get_redis().set(cachekey, str(datetime.datetime.now()), ex=60*60*4) # 4 hour cache
108 |
109 | deck = scrape_deck(url)
110 | core.add_deck(deck)
111 | #core.add_recent(url, core.cap_cardname(deck['commander']))
112 |
113 | logging.debug("added a deck, yay! %s" % deck['commander'])
114 |
115 | except Exception, e:
116 | logging.debug('for "%s" : %s' % (url, e))
117 |
118 |
119 | if __name__ == '__main__':
120 |
121 | for link in frontpage(page=1):
122 | try:
123 | url = 'http://www.mtgsalvation.com' + link
124 |
125 | cachekey = 'CACHE_MTGSALVATION_%s' % url
126 | if not core.get_redis().get(cachekey) is None:
127 | continue
128 |
129 | core.get_redis().set(cachekey, str(datetime.datetime.now()), ex=60*60*24*3) # 3 day cache
130 |
131 | deck = scrape_deck(url)
132 | core.add_deck(deck)
133 | core.add_recent(url, core.cap_cardname(deck['commander']))
134 |
135 | except Exception, e:
136 | logging.debug('for "%s" : %s' % (url, e))
137 |
138 |
139 |
--------------------------------------------------------------------------------
/tappedout.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import core
3 | import urllib2
4 | import datetime
5 | import re
6 | import HTMLParser
7 |
8 | # This file contains the scraper code for tappedout.net
9 | # Note that there is no official api for tappedout and this is straight up scraping HTML...
10 | # it may get ugly. hold on.
11 |
12 | # Hypothetically if we want to add another source than tappedout, all that is used by reddit.py
13 | # is URL_PATTERN and get_deck. So, future scraper modules should expose these methods and then
14 | # we'll add them to some sort of list in reddit.py.
15 |
16 | URL_PATTERN = re.compile('.*(http://tappedout.net/mtg-decks/[a-zA-Z0-9-]+).*')
17 |
18 | # Given a tappedout URL, get me the tuple:
19 | # (commander, deck contents (cards), color identity, the date the deck was updated)
20 | def get_deck(url):
21 | try:
22 | # I tack on /?fmt=txt because it gives th list of cards in a somewhat nice
23 | # text format. If only there was an API...
24 | con = core.urlopen(url.rstrip('/') + '/?fmt=txt')
25 | except urllib2.HTTPError as e:
26 | # This will happen on 404 or any other error.
27 | logging.warning("Someone posted a bad URL: " + url + " (%s)" % str(e))
28 | return None
29 |
30 | deck = set()
31 |
32 | # For each line in the content of the web page....
33 | for line in con.splitlines():
34 | line = line.strip()
35 | if len(line) == 0:
36 | continue
37 |
38 | if not line[0] in '0123456789':
39 | continue
40 |
41 | # At this point, the line is not empty and the line starts with a number
42 | # This, we know, is a card
43 |
44 | # The line is tab delimited like this: "1\tAustere Command\n"
45 | card = line.split('\t')[1]
46 |
47 | try:
48 | deck.add(core.sanitize_cardname(card))
49 | except KeyError:
50 | pass
51 | except ValueError as e:
52 | logging.warning("Ignored this card because of some sort of bad value")
53 |
54 |
55 | # Call out to get_tappedout_info to grab the deck info
56 | cmdr, colors, date = get_tappedout_info(url)
57 |
58 | # if they didn't post the commander, i'm going to try to figure out who it is
59 | if cmdr is None:
60 | for card in deck:
61 | cd = core.lookup_card(card)
62 |
63 | if not cd.has_key('supertypes'):
64 | continue
65 |
66 | if 'Legendary' in cd['supertypes'] and sorted(list(core.color_identity(card))) == sorted(list(colors)):
67 | # ok, we've got a legenadry with the colors i think the deck should be. i'll just stop here.
68 | cmdr = card
69 | break
70 | else:
71 | logging.warn("there was no legendary creature here.... and none was specified... something f'd up is going on")
72 | cmdr = 'jedit ojanen'
73 |
74 | deck.add(cmdr)
75 |
76 | out_deck = { 'commander' : cmdr, 'cards' : sorted(list(deck)), 'date' : date }
77 |
78 | return out_deck
79 |
80 | def get_tappedout_info(url, assume_now = True):
81 |
82 | con = core.urlopen(url).splitlines()
83 |
84 | # GET COMMANDER
85 | cmdr = None
86 | for line in con:
87 | # First, we need to find the commander we're talking about here.
88 | if line.strip().startswith('" in cline:
98 | cmdr = core.sanitize_cardname(cline.split('>')[1].split('(')[0])
99 | break
100 |
101 | break
102 |
103 | # GET COLORS
104 | # GET COLORS from the pie on the right
105 | colors = set([])
106 | for line in con:
107 | if line.strip().startswith('buildColorChart'):
108 | if 'Green' in line: colors.add('GREEN')
109 | if 'Red' in line: colors.add('RED')
110 | if 'Blue' in line: colors.add('BLUE')
111 | if 'Black' in line: colors.add('BLACK')
112 | if 'White' in line: colors.add('WHITE')
113 |
114 | break
115 |
116 | colors = sorted(list(colors))
117 |
118 | # override pie colors if we have a good commander
119 | if cmdr is not None:
120 | try:
121 | colors = core.color_identity(cmdr)
122 | except ValueError:
123 | logging.warn('I have a commander that I don\'t think should exist')
124 | cmdr = None
125 | # this will happen if the commander is one that tappedout sees but is not in allcards.json (i.e. if it is new)
126 | pass
127 |
128 | # GET DATE
129 | if assume_now:
130 | date = datetime.datetime.now().toordinal() # Note: we use ordinal dates to represent the day.
131 | else:
132 | # Go fetch the time. Tappedout makes it hard because they say "3 days ago" or "2 years ago"
133 | # and it's got so many options. So, this scraping is pretty fickle but seems to work fine.
134 | for line in con:
135 | line = line.lower()
136 | if '
' in line and len(line) < 21 and ('day' in line or 'hour' in line or 'month' in line or 'year' in line):
137 | num, unit = line.strip()[4:].split('<')[0].split()
138 | num = int(num)
139 | unit = unit.strip('s')
140 |
141 | now = datetime.datetime.now().toordinal()
142 |
143 | if unit == 'hour':
144 | date = now
145 | elif unit == 'day':
146 | date = now - num
147 | elif unit == 'month':
148 | date = int(now - num * (365. / 12.))
149 | elif unit == 'year':
150 | date = now - num * 365
151 |
152 | break
153 | else:
154 | date = now
155 |
156 | return cmdr, colors, date
157 |
--------------------------------------------------------------------------------
/reddit.py:
--------------------------------------------------------------------------------
1 | import core
2 | import tappedout
3 | import gracefulstats
4 | import praw
5 | import logging
6 | import time
7 | import traceback
8 | import datetime
9 | import getpass
10 | import deckstatscom
11 |
12 | TESTING = False
13 |
14 | PRAW = praw.Reddit(user_agent=core.USER_AGENT)
15 | PRAW.login(raw_input('user name: ').strip(), getpass.getpass().strip())
16 |
17 | BOT_NOTICE = """
18 | \n\nI'm a bot - visit me in /r/edhrec or [edhrec.com](http://edhrec.com)"""
19 |
20 | logging.basicConfig(filename='reddit.log')
21 |
22 | # The universal easy sleep command
23 | def sleep(t=5.0, x=1.0):
24 | time.sleep(t * x)
25 |
26 | # Given a submission, try to find the tappedout.net URL.
27 | # If there is one, return it. Otherwise, return None.
28 | def find_url(submission):
29 | op_text = submission.selftext.lower().replace('\n', ' ').strip()
30 |
31 | url = tappedout.URL_PATTERN.match(op_text) or tappedout.URL_PATTERN.match(submission.url) \
32 | or deckstatscom.URL_PATTERN.match(op_text) or deckstatscom.URL_PATTERN.match(submission.url) \
33 | or deckstatscom.URL_PATTERN2.match(op_text) or deckstatscom.URL_PATTERN2.match(submission.url) \
34 | or gracefulstats.URL_PATTERN.match(op_text) or gracefulstats.URL_PATTERN.match(submission.url)
35 |
36 | if url is None:
37 | return None
38 | else:
39 | return str(url.group(1))
40 |
41 | def linkify(cn):
42 | return '[%s](http://gatherer.wizards.com/Handlers/Image.ashx?name=%s&type=card&.jpg)' % (core.cap_cardname(cn), cn)
43 |
44 | # Go through recent submissions and try to find something I haven't seen before.
45 | # If there is something, post the recommendations. This is the default behavior
46 | # that edhrec does to respond to posts.
47 | def seek_submissions(sublimit=250):
48 | logging.debug('STARTING SUBMISSION SEEK AT ' + str(datetime.datetime.now()))
49 |
50 | # Scan edh and edhrec
51 | subreddit = PRAW.get_subreddit('edhrec+edh+competitiveedh').get_new(limit=sublimit)
52 |
53 | rds = core.get_redis()
54 |
55 | # For each submission in newness order...
56 | for submission in subreddit:
57 | # Check to see if I've scanned this already. If so, pass on it.
58 | if not TESTING:
59 | if rds.sismember('SEEN', submission.id):
60 | continue
61 | logging.debug("Scanning " + str(submission.id) + " - " + str(submission.title.encode('utf-8')))
62 |
63 | # Fetch the tappedout url
64 | url = find_url(submission)
65 |
66 | # If there was no tappedout URL, then let's pass over this one.
67 | if url is None:
68 | rds.sadd('SEEN', submission.id)
69 | continue
70 |
71 | ## At this point, we have a deck we'e never seen before that has been posted!
72 | #
73 | # ~*~ GET EXCITED ~*~
74 |
75 | logging.debug("I found a URL to scrape: " + str(url))
76 |
77 | # Scrape it
78 | if 'tappedout.net' in url:
79 | deck = tappedout.get_deck(url)
80 | elif 'deckstats.net' in url:
81 | deck = deckstatscom.scrapedeck(url)
82 | elif 'gracefulstats.com' in url:
83 | deck = gracefulstats.scrapedeck(url)
84 | else:
85 | logging.error("something went seriously wrong. '%s' doesn't contain deckstats or tappedout in it" % url)
86 | raise ValueError("bad url")
87 |
88 | deck['scrapedate'] = str(datetime.datetime.now())
89 |
90 | if deck is None:
91 | logging.warning('Skipping this URL because something went wrong. (' + submission.title.encode('utf-8') +')')
92 | rds.sadd('SEEN', submission.id)
93 | continue
94 |
95 | # Go get the recommendations
96 | newrecs, outrecs = core.recommend(deck)
97 |
98 | lands = []
99 | creatures =[]
100 | noncreatures = []
101 |
102 | for card, score in newrecs:
103 | # filter out basic lands from being recommendations
104 | if card in ['swamp', 'island', 'plains', 'mountain', 'forest']:
105 | continue # there is an annoying thing that happens when people use snow-covered basics
106 | # where edhrec will post basic lands as a recommendation. this prevents that
107 |
108 | if score < .3:
109 | continue
110 |
111 | score = int(score * 100) # make score easier to read
112 |
113 | try:
114 | types = core.lookup_card(card)['types']
115 | except:
116 | logging.warn('something went wong with the card %s, ignoring it' % card)
117 | continue
118 |
119 |
120 | if 'Creature' in types:
121 | creatures.append((card, score))
122 | elif 'Land' in types:
123 | lands.append((card, score))
124 | else:
125 | noncreatures.append((card, score))
126 |
127 | # build the output string
128 | if str(submission.subreddit).lower() in ['edhrec', 'edh', 'competitiveedh']:
129 | out_str = ['Other decks like yours use:\n\nCreatures | Non-creatures | Lands | Unique in your deck\n:--------|:---------|:---------|:--------']
130 |
131 | for i in range(16):
132 | try:
133 | c = '[%d] %s ' % (creatures[i][1], linkify(creatures[i][0]))
134 | except IndexError:
135 | c = ' '
136 |
137 | try:
138 | n = '[%d] %s ' % (noncreatures[i][1], linkify(noncreatures[i][0]))
139 | except IndexError:
140 | n = ' '
141 |
142 | try:
143 | l = '[%d] %s ' % (lands[i][1], linkify(lands[i][0]))
144 | except IndexError:
145 | l = ' '
146 |
147 | try:
148 | u = '%s ' % linkify(outrecs[i][0])
149 | except IndexError:
150 | u = ' '
151 |
152 | if len(c + n + l) == 3:
153 | break
154 |
155 | out_str.append('%s | %s | %s | %s' % (c, n , l, u))
156 |
157 | out_str.append('\n\n[This deck on edhrec.com](http://edhrec.com/#/recommendations?q=' + url + ')')
158 |
159 | out_str.append(BOT_NOTICE)
160 |
161 | elif str(submission.subreddit).lower() == 'edh':
162 | pass
163 |
164 | elif str(submission.subreddit).lower() == 'competitiveedh':
165 | pass
166 |
167 | # Post the comment!
168 | if not TESTING and str(submission.subreddit).lower() in ['edh','edhrec'] and not 'noedhrec' in str(submission.selftext).lower():
169 | submission.add_comment('\n'.join(out_str))
170 | else:
171 | logging.debug("I'm not in edh or edhrec, so I'm just shooting blanks! (noedhrec is here: %s)" % str('noedhrec' in str(submission.selftext).lower()))
172 |
173 | logging.debug('comment i think I posted:\n' + '\n'.join(out_str))
174 |
175 | logging.debug("I posted a comment with recommendations!")
176 |
177 | deck['ref'] = 'reddit bot'
178 | deck['url'] = url
179 |
180 | core.add_recent(url, core.cap_cardname(deck['commander']), 'http://reddit.com/%s' % str(submission.id))
181 |
182 | # Keep track of the fact that I've now processed this deck.
183 | # It is important that this is last in case the scraping fails and
184 | # the problem is later fixed.
185 | if not TESTING:
186 | rds.sadd('SEEN', submission.id)
187 | core.add_deck(deck)
188 |
189 | sleep()
190 |
191 | logging.debug('DONE WITH SUBMISSION SEEK AT ' + str(datetime.datetime.now()))
192 |
193 |
194 | if __name__ == '__main__':
195 | while True:
196 | try:
197 | # Go find submissions
198 | seek_submissions()
199 | except Exception as e:
200 | logging.warning("Got an unhandled exception! " + str(e))
201 | logging.warning("traceback:" + str(traceback.format_exc()))
202 |
203 | core.flush_cache()
204 |
205 | # Sleep for a minute. A minute is because your requests/minute seem
206 | # to reset every minute for Reddit.
207 | # edh and edhrec subreddits don't have that much traffic, so no big deal
208 | # if someone has to wait at most a minute to get a response.
209 | sleep(t=30.0)
210 |
--------------------------------------------------------------------------------
/api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import cherrypy
4 | import json
5 | import core
6 | import tappedout
7 | import datetime
8 | import logging
9 | import deckstats
10 | import random
11 | import kmeans
12 | import mtgsalvation
13 | import deckstatscom
14 | import gracefulstats
15 |
16 | COMMANDERS = sorted( core.sanitize_cardname(cn.decode('utf-8').strip().lower()) for cn in open('commanders.txt').readlines() )
17 |
18 | def closest_commander(partial_name):
19 | pn = core.sanitize_cardname(partial_name)
20 |
21 | for cn in COMMANDERS:
22 | if pn == cn:
23 | return cn
24 |
25 | for cn in COMMANDERS:
26 | if cn.startswith(pn):
27 | return cn
28 |
29 | for cn in COMMANDERS:
30 | if pn in cn:
31 | return cn
32 |
33 |
34 | class API(object):
35 | _cp_config = {'tools.staticdir.on' : True,
36 | 'tools.staticdir.dir' : '/home/ubuntu/edhrec-site',
37 | 'tools.staticdir.index' : 'index.html',
38 | '/favicon.ico' : { 'tools.staticfile.on' : True, 'tools.staticfile.filename' : '/home/ubuntu/edhrec-site/favicon.ico' }
39 | }
40 |
41 |
42 | @cherrypy.expose
43 | def rec(self, to=None, ref=None):
44 | to = to[:500].strip()
45 |
46 | if ref is None:
47 | ref = "No ref"
48 |
49 | ref = ref[:20].strip()
50 |
51 | cherrypy.response.headers['Content-Type']= 'application/json'
52 | cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
53 |
54 | if not ('tappedout.net/mtg-decks' in to or 'mtgsalvation.com/forums/' in to \
55 | or 'deckstats.net/deck' in to or 'gracefulstats.com/de' in to):
56 | raise ValueError('invalid deck url %s . it should look like http://tappedout.net/mtg-decks/xxxx or http://www.mtgsalvation.com/forums/xxxx or http://deckstats.net/decks/xxxx/xxxx or http://www.gracefulstats.com/deck/view/xxxx' % to)
57 |
58 | ip = cherrypy.request.remote.ip
59 |
60 | r = core.get_redis()
61 |
62 |
63 | if r.exists('api' + str(ip)):
64 | logging.warn('%s ip is overloading' % str(ip))
65 | return json.dumps('Too many API calls. Try again in a few seconds.')
66 |
67 | r.set('api' + str(ip), '', ex=1)
68 |
69 | deck = None
70 | if 'tappedout' in to:
71 | deck = tappedout.get_deck(to)
72 | elif 'mtgsalvation' in to:
73 | deck = mtgsalvation.scrape_deck(to)
74 | elif 'deckstats' in to:
75 | deck = deckstatscom.scrapedeck(to)
76 | elif 'gracefulstats' in to:
77 | deck = gracefulstats.scrapedeck(to)
78 |
79 | deck['scrapedate'] = str(datetime.datetime.now())
80 |
81 | if deck['commander'] == 'jedit ojanen':
82 | raise ValueError('You input a deck without a valid commander. Please go back and add it to the web interface.')
83 |
84 | core.add_recent(to, \
85 | core.cap_cardname(deck['commander']))
86 |
87 |
88 | hashkey = 'CACHE_REC_' + core.hash_pyobj([deck['cards']] + [deck['commander']])
89 |
90 | if r.exists(hashkey):
91 | return r.get(hashkey)
92 |
93 | newrecs, outrecs, topk = core.recommend(deck, returnk=True)
94 |
95 | outnewrecs = []
96 | for cn, sc in newrecs:
97 | if sc < .3:
98 | continue
99 | try:
100 | cd = { 'score' : sc, 'card_info' : {'name': core.lookup_card(cn)['name'], 'types': core.lookup_card(cn)['types']} }
101 | except TypeError:
102 | logging.warn('The card %s failed to do lookup card.' % cn)
103 | continue
104 | outnewrecs.append(cd)
105 |
106 | outoutrecs = []
107 | for cn, sc in outrecs:
108 | if sc < .5:
109 | continue
110 | try:
111 | cd = { 'score' : sc, 'card_info' : {'name': core.lookup_card(cn)['name'], 'types': core.lookup_card(cn)['types']} }
112 | except TypeError:
113 | logging.warn('The card %s failed to do lookup card.' % cn)
114 | continue
115 | outoutrecs.append(cd)
116 |
117 |
118 | deck['url'] = to
119 |
120 | if ref is not None:
121 | deck['ref'] = ref
122 | else:
123 | deck['ref'] = 'non-ref api call'
124 |
125 | deck['ip'] = str(ip)
126 | try:
127 | deck['headref'] = cherrypy.request.headerMap['Referer']
128 | except AttributeError:
129 | pass
130 |
131 | core.add_deck(deck)
132 |
133 | stats = deckstats.tally([deck])
134 | kstats = deckstats.tally(topk)
135 | cstats = deckstats.get_commander_stats(deck['commander'])
136 |
137 | output_json = json.dumps({'url' : to, 'recs' : outnewrecs, 'cuts' : outoutrecs, \
138 | 'stats' : stats, 'kstats' : kstats, 'cstats' : cstats}, indent=4)
139 |
140 | r.set(hashkey, output_json, ex=60*60*24*3) # 3 days expiration
141 |
142 | ckey = 'CACHE_COMMANDER_' + deck['commander'].replace(' ', '_')
143 | r.delete(ckey)
144 |
145 | return output_json
146 |
147 | @cherrypy.expose
148 | def cmdr(self, commander, nolog=False):
149 | commander = commander[:50]
150 |
151 | cherrypy.response.headers['Content-Type']= 'application/json'
152 | cherrypy.response.headers['Access-Control-Allow-Origin'] = "*"
153 |
154 | r = core.get_redis()
155 |
156 | commander = core.sanitize_cardname(commander)
157 |
158 | commander = closest_commander(commander)
159 |
160 | r = core.get_redis()
161 |
162 | if not cherrypy.session.has_key('id'):
163 | cherrypy.session['id'] = ''.join(random.choice('0123456789abcdefghijklmnopqrstuvwxyz') for i in range(8))
164 |
165 | if not nolog:
166 | r.sadd("SESSION_CMDRSEARCH_" +cherrypy.session['id'], commander)
167 |
168 | ckey = 'CACHE_COMMANDER_' + commander.replace(' ', '_')
169 | if r.exists(ckey):
170 | return r.get(ckey)
171 |
172 | colors = core.color_identity(commander)
173 |
174 | decks = [ deck for deck in core.get_decks(colors) if deck['commander'] == commander]
175 |
176 | if len(decks) < 3:
177 | return json.dumps({'error_code' : 'NOT_ENOUGH_DATA', 'message' : 'There are not enough decks in my database to generate recommendations for %s' % commander})
178 |
179 | out = {}
180 | out['numdecks'] = len(decks)
181 |
182 | cards = {}
183 | for deck in decks:
184 | for card in deck['cards']:
185 |
186 | try:
187 | cards[card] = {'count' : 0, 'card_info' : {'name' : core.lookup_card(card)['name'], \
188 | 'types' : core.lookup_card(card)['types'], \
189 | 'colors' : core.lookup_card(card).get('colors', []), \
190 | 'cmc' : core.lookup_card(card).get('cmc', 0) \
191 | } }
192 | except TypeError:
193 | logging.warn("for some reason card %s could not be looked up, ignoring." % card)
194 | continue
195 |
196 |
197 | for deck in decks:
198 | for card in deck['cards']:
199 | if card == commander: continue
200 | if card in ['swamp', 'island', 'mountain', 'forest', 'plains']: continue
201 |
202 | try:
203 | cards[card]['count'] += 1
204 | except KeyError:
205 | continue
206 |
207 | #out['recs'] = [ pp for pp in sorted(cards.values(), key = (lambda x: -1 * x['count'])) if pp['count'] > 1 and pp['count'] > .1 * len(decks) ]
208 | out['recs'] = [ pp for pp in sorted(cards.values(), key = (lambda x: -1 * x['count'])) if pp['count'] > 1 ][:125]
209 |
210 | out['commander'] = core.cap_cardname(commander)
211 |
212 | out['stats'] = deckstats.get_commander_stats(commander)
213 |
214 | # kmeans output for subtopics
215 | if len(decks) > 15:
216 | out['archetypes'] = kmeans.kmeans(commander)
217 |
218 | r.set(ckey, json.dumps(out), ex=60*60*24*2) # 2 day cache
219 |
220 | return json.dumps(out)
221 |
222 | @cherrypy.expose
223 | def cmdrdeck(self, commander):
224 | commander = commander[:50]
225 | cherrypy.response.headers['Access-Control-Allow-Origin'] = "*"
226 | cherrypy.response.headers['Content-Type']= 'application/json'
227 |
228 |
229 | try:
230 | cmdr_out = json.loads(self.cmdr(commander))
231 | except ZeroDivisionError:
232 | return "Unfortunately, there are not enough decks in my database for '%s', so I can't generate a list" % commander
233 |
234 | colors = [ clr for clr, cnt in cmdr_out['stats']['colors'].items() if cnt > 0 ]
235 |
236 | lands = cmdr_out['stats']['lands']
237 | nonlands = cmdr_out['stats']['nonlands']
238 | outdeck = []
239 |
240 | landmap = {'Red' : 'Mountain', 'Blue' : 'Island', 'Black' : 'Swamp', 'White' : 'Plains', 'Green' : 'Forest' }
241 | lands -= len(colors)
242 |
243 | for rec_dict in cmdr_out['recs']:
244 | if 'Land' in rec_dict['card_info']['types'] and lands > 0:
245 | outdeck.append(rec_dict)
246 | lands -= 1
247 | continue
248 |
249 | if (not 'Land' in rec_dict['card_info']['types']) and nonlands > 0:
250 | outdeck.append(rec_dict)
251 | nonlands -= 1
252 | continue
253 |
254 | # fill out the lands
255 | total = sum( cnt for clr, cnt in cmdr_out['stats']['colors'].items() )
256 | old_lands = lands
257 | basics = dict(zip(colors, [0] * len(colors)))
258 |
259 | for color, count in cmdr_out['stats']['colors'].items():
260 | if count == 0 : continue
261 | num = int( float(count) / total * old_lands ) + 1
262 | lands -= num
263 | basics[color] += num
264 |
265 | # tack on a card if we are at 99
266 | if lands + nonlands > 2:
267 | logging.warn('deck built had less than 98 cards... thats weird... %d' % len(outdeck))
268 |
269 | while lands + nonlands > 0:
270 | basics[random.choice(colors)] += 1
271 | lands -= 1
272 |
273 | out = {}
274 | out['cards'] = outdeck
275 | out['basics'] = [ (landmap[color], count) for color, count in basics.items()]
276 | out['commander'] = cmdr_out['commander']
277 |
278 | out['stats'] = deckstats.tally([ { 'cards' : [ core.sanitize_cardname(c['card_info']['name']) for c in out['cards'] ] } ])
279 |
280 | return json.dumps(out)
281 |
282 | @cherrypy.expose
283 | def recent(self):
284 | cherrypy.response.headers['Access-Control-Allow-Origin'] = "*"
285 | cherrypy.response.headers['Content-Type']= 'application/json'
286 |
287 | return core.get_recent_json()
288 |
289 | @cherrypy.expose
290 | def stats(self):
291 | cherrypy.response.headers['Access-Control-Allow-Origin'] = "*"
292 | cherrypy.response.headers['Content-Type']= 'application/json'
293 |
294 | r = core.get_redis()
295 |
296 | ckey = 'CACHE_STATS'
297 | if r.exists(ckey):
298 | return r.get(ckey)
299 |
300 | out = {}
301 |
302 | w_counts = {}
303 | m_counts = {}
304 | for d in core.get_all_decks():
305 | if not d.has_key('scrapedate'): continue
306 |
307 | try:
308 | if d['scrapedate'] < '2014-11-28 10:52:53.525961' and d['scrapedate'] > '2014-11-28 03:52:53.525961' and d['ref'] == 'mtgsalvation': continue # this is to prevent the mass load I did from impacting stats
309 | except KeyError: pass
310 |
311 | datedelta = (datetime.datetime.now() - core.date_from_str(d['scrapedate'])).days
312 |
313 | if datedelta <= 30:
314 | m_counts.setdefault(core.cap_cardname(d['commander']), 0)
315 | m_counts[core.cap_cardname(d['commander'])] += 1
316 | if datedelta <= 7:
317 | w_counts.setdefault(core.cap_cardname(d['commander']), 0)
318 | w_counts[core.cap_cardname(d['commander'])] += 1
319 |
320 | out['topweek'] = sorted(w_counts.items(), key= lambda x: x[1], reverse=True)[:25]
321 | out['topmonth'] = sorted(m_counts.items(), key= lambda x: x[1], reverse=True)[:25]
322 |
323 | alltime_counts = {}
324 | for d in core.get_all_decks():
325 | alltime_counts.setdefault(core.cap_cardname(d['commander']), 0)
326 |
327 | alltime_counts[core.cap_cardname(d['commander'])] += 1
328 |
329 | out['topalltime'] = sorted(alltime_counts.items(), key= lambda x: x[1], reverse=True)[:25]
330 |
331 | out['deckcount'] = len(core.get_all_decks())
332 |
333 | r.set(ckey, json.dumps(out), ex=60*60*3) # 3 hour cache
334 | return json.dumps(out)
335 |
336 | @cherrypy.expose
337 | def randomcmdr(self):
338 | cherrypy.response.headers['Access-Control-Allow-Origin'] = "*"
339 | cherrypy.response.headers['Content-Type']= 'application/json'
340 |
341 | r = core.get_redis()
342 |
343 | ckey = 'CACHE_COMMANDER_COUNTS'
344 | o = r.get(ckey)
345 |
346 | if o is None:
347 | alltime_counts = {}
348 | for d in core.get_all_decks():
349 | alltime_counts.setdefault(d['commander'], 0)
350 | alltime_counts[d['commander']] += 1
351 |
352 | options = [ cmdr for cmdr, cnt in alltime_counts.items() if cnt > 4 ]
353 | r.set(ckey, json.dumps(options), ex=60*60*24*5) # 5 day cache
354 |
355 | else:
356 | options = json.loads(o)
357 |
358 |
359 |
360 | return self.cmdr(random.choice(options), nolog=True)
361 |
362 |
363 | if __name__ == "__main__":
364 | cherrypy.config.update({'server.socket_host': raw_input('your ip: ').strip(),
365 | 'server.socket_port': int(raw_input('port to host on: ').strip()),
366 | 'environment': 'production',
367 | 'tools.sessions.on': True,
368 | 'tools.sessions.timeout' : 60 * 24 * 3 # keep sessions live for 3 days
369 | })
370 |
371 |
372 | logging.basicConfig(filename='api.log')
373 |
374 | cherrypy.tree.mount(API(), '/')
375 | cherrypy.engine.start()
376 | cherrypy.engine.block()
377 |
378 |
379 |
380 |
381 |
382 |
383 |
--------------------------------------------------------------------------------
/core.py:
--------------------------------------------------------------------------------
1 | import redis
2 | import logging
3 | import json
4 | import time
5 | import datetime
6 | import itertools
7 | import random
8 | import urllib2
9 | import HTMLParser
10 | import hashlib
11 | import re
12 | import deckstats
13 |
14 | # this keeps all the code that is shared amongst most of the mdoules and future modules
15 | # it mostly contains redis storage and the recommendation engine stuff
16 |
17 |
18 | # This is the redis configurations.
19 | # Note: 6379 is the default Redis port, so if you have any other apps
20 | # hitting against redis, you might want to stand up your own.
21 | REDIS_HOST = 'localhost'
22 | REDIS_PORT = 6379
23 |
24 | logging.getLogger().setLevel(logging.DEBUG)
25 |
26 | # A global variable that pools a Redis connection.
27 | _REDIS = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=2)
28 |
29 | # this is what the scraper will post places
30 | USER_AGENT = "reddit.com/r/edh recommender by /u/orangeoctopus v2.0"
31 |
32 |
33 | ############# UTILITY FUNCTIONS ###############
34 |
35 | # Returns a redis instance. This checks to see if the connetion is open
36 | # and if not creates a new one. Using this function makes it so we don't
37 | # have to recreate the redis connection every time we want to use it.
38 | def get_redis():
39 | '''Returns a redis instance using the defaults provided at the top of mr.py'''
40 | global _REDIS
41 |
42 | try:
43 | _REDIS.ping()
44 | except redis.ConnectionError as ce:
45 | _REDIS = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=2)
46 |
47 | return _REDIS
48 |
49 | CARDS = get_redis().hgetall('CARDS_JSON')
50 | for key in CARDS:
51 | CARDS[key] = json.loads(CARDS[key])
52 |
53 |
54 |
55 |
56 | def hash_pyobj(python_obj):
57 | return hashlib.sha256(json.dumps(python_obj)).hexdigest()
58 |
59 | # Nasty hack of a function that removes all the characters that annoy me in
60 | # magic cards.
61 | def strip_accents(s):
62 | return s.replace(u'\xc3', 'ae').replace(u'\xe6', 'ae').replace(u'\xc6', 'ae').replace(u'\xe9', 'e').replace(u'\xf6', 'o') \
63 | .replace(u'\xfb', 'u').replace(u'\xe2', 'a').replace(u'\xe1', 'a').replace(u'\xe0', 'a') \
64 | .replace(u'\xae', 'r').replace(u'\xfa', 'u').replace(u'\xed', 'i')
65 |
66 | # The official sanitization function. Any cardnames should be sent through this before
67 | # hitting the data store or whatever.
68 | def sanitize_cardname(cn):
69 | cn = u_to_str(cn)
70 | return HTMLParser.HTMLParser().unescape(strip_accents(cn.strip().lower())).encode('utf-8')
71 |
72 | def u_to_str(ustr):
73 | return ''.join(c for c in ustr if ord(c) < 128).encode('utf-8')
74 |
75 | def date_from_str(dstr):
76 | return datetime.datetime(*[ int(p) for p in re.split('[ \.:-]', dstr)[:-1]])
77 |
78 | # Undoes most of what sanitize_cardname does. This is used for presentation purposes.
79 | def cap_cardname(cn):
80 | return cn.strip().lower().title().replace("'S", "'s").replace(' The', ' the').replace(' Of', ' of')
81 |
82 | # looks up the cardname cn in the redis data store. It turns a nice dictionary object that maps the json object.
83 | def lookup_card(cn):
84 | cn = sanitize_cardname(cn)
85 |
86 | try:
87 | card_obj = CARDS[str(cn)]
88 | except KeyError:
89 | logging.warn('I couldn\'t find this card: ' + str(cn))
90 | return None
91 |
92 | return card_obj
93 |
94 | # figures out the color identitify for a particular card
95 | def color_identity(cn):
96 | card = lookup_card(cn)
97 |
98 | if card is None:
99 | raise ValueError('Card doesnt exist ' + str(cn))
100 |
101 | colors = { '{W}' : 'WHITE' , '{B}' : 'BLACK' , '{U}' : 'BLUE', '{R}' : 'RED', '{G}' : 'GREEN' }
102 | oc = set()
103 |
104 | for colorsig in colors:
105 | if card.has_key('manaCost') and colorsig in card['manaCost'].replace('/', '}{'):
106 | oc.add(colors[colorsig])
107 | elif card.has_key('text') and colorsig in ' '.join(card['text'].replace(')', '(').split('(')[::2]).replace('/', '}{'):
108 | oc.add(colors[colorsig])
109 |
110 | return sorted(list(oc))
111 |
112 | # returns true if the card is banned
113 | def is_banned(cn):
114 | return get_redis().sismember('BANNED', sanitize_cardname(cn))
115 |
116 | # adds a deck to the redis data store
117 | def add_deck(deck_dict):
118 | try:
119 | # prepare the name of the key in redis (it's DECKS_ followed by sorted colors in the color identity, all caps)
120 | color_str = 'DECKS_' + '_'.join(color_identity(deck_dict['commander']))
121 | except ValueError:
122 | logging.warn("This commander doesn't exist, not adding it to my corpus: " + deck_dict['commander'])
123 | return
124 |
125 | logging.debug('Adding the deck with the commander ' + deck_dict['commander'])
126 |
127 | if deck_dict['commander'] == 'jedit ojanen':
128 | logging.warn('jedit ojanen means someone submitted a deck without a commander. Im not going to add it')
129 | return
130 |
131 | # check to see if this exact deck exists already:
132 | for deck in get_decks(color_identity(deck_dict['commander'])):
133 | if deck['cards'] == deck_dict['cards']:
134 | logging.debug('this deck is an exact dup. I\'m not going to add it at all.')
135 | return False
136 | else:
137 | # add it to the beginning of the list
138 | get_redis().lpush(color_str, json.dumps(deck_dict))
139 |
140 | return True
141 |
142 | # Returns all of the decks for a particular color. Turn dedup on if you want to remove dups
143 | def get_decks(colors, dedup=False):
144 | if type(colors) is str:
145 | color_str = colors
146 | else:
147 | color_str = 'DECKS_' + '_'.join(sorted(c.upper() for c in colors))
148 |
149 | logging.debug('Retrieving all decks from ' + color_str)
150 |
151 | out =[ json.loads(d) for d in get_redis().lrange(color_str, 0, -1) ]
152 |
153 | if dedup:
154 | out = dedup_decks(out)
155 |
156 | return out
157 |
158 | def get_all_decks(dedup=False):
159 | deck_strs = get_redis().keys('DECKS_*')
160 |
161 | logging.debug('Retrieving ALLLL decks')
162 |
163 | out = []
164 | for ds in deck_strs:
165 | decks = [ json.loads(d) for d in get_redis().lrange(ds, 0, -1) ]
166 | if dedup:
167 | decks = dedup_decks(decks)
168 | out.extend(decks)
169 |
170 | return out
171 |
172 |
173 | # This function wraps a URL get request with a cache.
174 | def urlopen(url):
175 | # The cache is stored in Redis
176 | r = get_redis()
177 |
178 | # TODO: I liked how I did caching before. Here I keep everything
179 | # in one key, URL_CACHE. Unfortunately, I can't set expirations per
180 | # key in a hash, I can only expire top-level keys. This makes it so
181 | # you have to flush the cache manually
182 |
183 | if r.hexists('URL_CACHE', url):
184 | logging.debug("Cache hit on " + url)
185 | return r.hget('URL_CACHE', url)
186 |
187 | logging.debug("Cache miss on " + url)
188 |
189 | req = urllib2.Request(url, headers={'User-Agent' : USER_AGENT})
190 | con = urllib2.urlopen(req).read()
191 |
192 | r.hset('URL_CACHE', url, con)
193 |
194 | return con
195 |
196 | # flushes the entire cache
197 | def flush_cache():
198 | get_redis().delete('URL_CACHE')
199 |
200 |
201 | def add_recent(url_ref, commander, reddit_ref = None):
202 | r = get_redis()
203 |
204 | out = {'url' : url_ref.strip('/'), 'commander' : commander}
205 |
206 |
207 | if reddit_ref is not None:
208 | s = json.dumps(out)
209 | r.lrem('RECENT', s, 0) # try to remove the non-reddit ref if it exists
210 | out['reddit'] = reddit_ref
211 | else:
212 | # see if there is already a reddit based one...
213 | for it in get_redis().lrange('RECENT', 0, -1):
214 | o = json.loads(it)
215 | if o.has_key('reddit') and o['url'] == out['url']:
216 | r.lrem('RECENT', json.dumps(out), 0)
217 | out = o
218 | r.lrem('RECENT', it, 0)
219 |
220 | break
221 |
222 | s = json.dumps(out)
223 |
224 | r.lrem('RECENT', s, 0)
225 |
226 | r.lpush('RECENT', s)
227 |
228 | r.ltrim('RECENT', 0, 99)
229 |
230 | def get_recent_json():
231 |
232 |
233 | return json.dumps(get_redis().lrange('RECENT', 0, -1))
234 |
235 |
236 |
237 | ################# RECOMMENDATION ENGINE ######################
238 |
239 |
240 | # This is one of the most important functions.
241 | # It says how close two decks are. The higher the number,
242 | # the more close deck1 and deck2 are. The recommendation
243 | # engine use this closeness score to compute nearest
244 | # neighbor.
245 | def rec_deck_closeness(deck1, deck2):
246 | r = get_redis()
247 |
248 | d1stat = deckstats.tally([deck1])
249 | d2stat = deckstats.tally([deck2])
250 |
251 | minsum = 0
252 | maxsum = 0
253 | for d1t, d2t in zip(sorted(d1stat['types'].items()), sorted(d2stat['types'].items())):
254 | minsum += min(d1t[1], d2t[1])
255 | maxsum += max(d1t[1], d2t[1])
256 |
257 | typescore = float(minsum) / maxsum
258 |
259 |
260 | minsum = 0
261 | maxsum = 0
262 | for d1c, d2c in zip(sorted(d1stat['colors'].items()), sorted(d2stat['colors'].items())):
263 | minsum += min(d1c[1], d2c[1])
264 | maxsum += max(d1c[1], d2c[1])
265 |
266 | colorscore = float(minsum) / maxsum
267 |
268 |
269 | minsum = 0
270 | maxsum = 0
271 | for d1m, d2m in zip(sorted(d1stat['curve']), sorted(d2stat['curve'])):
272 | minsum += min(d1m[1], d2m[1])
273 | maxsum += max(d1m[1], d2m[1])
274 |
275 | curvescore = float(minsum) / maxsum
276 |
277 |
278 | cards1 = set(deck1['cards'])
279 | cards2 = set(deck2['cards'])
280 | d1ind2 = 0
281 |
282 | for c in cards1:
283 | if c in cards2:
284 | d1ind2 += 1
285 | d1ind2 /= float(len(cards1))
286 |
287 |
288 | d2ind1 = 0
289 | for c in cards2:
290 | if c in cards1:
291 | d2ind1 += 1
292 | d2ind1 /= float(len(cards2))
293 |
294 |
295 | # Find how many non-land cards they have in common
296 | #lenint = 0
297 | #for c in set(deck1['cards']).intersection(set(deck2['cards'])):
298 | # try:
299 | # if 'Land' in lookup_card(c)['types']:
300 | # continue
301 | # except TypeError:
302 | # continue
303 |
304 | # lenint += 1.0
305 |
306 | # If they share the same commander, give the score a bit of a boost
307 | # The rationale is that we want decks with the same commander first,
308 | # with perhaps some help from other similar commanders if we can't
309 | # find enough.
310 | if deck1['commander'] == deck2['commander']:
311 | same_cmdr_bonus = 1.0
312 | else:
313 | same_cmdr_bonus = 0.0
314 |
315 | # Give a bit of a bonus if the decks are similar in age. If they are
316 | # within 90 days of each other (roughly a new set is release every 90 days),
317 | # it just gets a 1.0. Otherwise, it slowly degrades.
318 | # The rationale here is that we don't want to be basing recommendations
319 | # on decks that are 3 years old because they aren't up to date.
320 | #if deck1['date'] - deck2['date'] < 90:
321 | # newness_bonus = 1.0
322 | #else:
323 | # newness_bonus = .99 ** ((deck1['date'] - deck2['date']) / 366.)
324 |
325 | if deck1.has_key('scrapedate') and deck2.has_key('scrapedate'):
326 | d1date = datetime.datetime.strptime(deck1['scrapedate'], '%Y-%m-%d %H:%M:%S.%f')
327 | d2date = datetime.datetime.strptime(deck2['scrapedate'], '%Y-%m-%d %H:%M:%S.%f')
328 | ddelta = abs(d1date - d2date).days
329 |
330 |
331 | if ddelta < 30: #if decks are within a month (super recent):
332 | dscore = 1.0
333 | if ddelta < 90: #if decks are within 90 days (most recent set):
334 | dscore = .9
335 | elif ddelta < 365: #if decks are within one year (most recent block):
336 | dscore = .5
337 | else:
338 | dscore = 0.0
339 | else:
340 | dscore = 0.0
341 |
342 |
343 | # Compute the final score and return it!
344 |
345 | weights = ((1.0, d1ind2), (1.0, d2ind1), (.15, same_cmdr_bonus), (.35, typescore), (.35, colorscore), (.1, curvescore), (.1, dscore))
346 |
347 | out = sum(w * s for w, s in weights) / sum(w for w, s in weights)
348 |
349 | return out
350 | # Determines if two decks are duplicates
351 | # You can adjust it to be more aggressive by making threshold higher.
352 | # Threshold is the number of cards the two decks have in common.
353 | def decks_are_dups(deck1, deck2, threshold = .7):
354 | if deck1['commander'] != deck2['commander']:
355 | return False
356 |
357 | try:
358 | if deck1['url'] == deck2['url']:
359 | return True
360 | except KeyError:
361 | pass
362 |
363 | # Find out if the difference in number of cards is < threshold. If it is, it's a dup.
364 | avg_size = (len(deck1['cards']) + len(deck2['cards'])) / 2.0
365 |
366 | in_common = len(set(deck1['cards']).intersection(set(deck2['cards'])))
367 |
368 | if in_common / avg_size > threshold:
369 | #print avg_size, in_common
370 | return True
371 | else:
372 | return False
373 |
374 | # For a list of decks, deduplicate ones that are near duplicates of others in the list
375 | def dedup_decks(decks, threshold = .7):
376 | sdecks = sorted( decks, key= lambda x: int(x['date'] if x.has_key('date') else 0), reverse=True )
377 |
378 | badlist = []
379 | for (i1, d1), (i2, d2) in itertools.combinations(enumerate(sdecks), 2):
380 | if d1 in badlist or d2 in badlist:
381 | continue
382 |
383 | if decks_are_dups(d1, d2, threshold = threshold):
384 | badlist.append(i2)
385 | continue
386 |
387 | #for k in badlist: print k, '!!!'
388 |
389 | return [ d for i, d in enumerate(sdecks) if i not in badlist ]
390 |
391 |
392 | # This function generates the recommendatins for a deck.
393 | # The optional parameter k tells you how far out to cast your net
394 | # for similar decks. Smaller numbers will have more variance and bias,
395 | # but larger numbers will degenrate into "goodstuff.dec" for those particular colors.
396 | # See "Collaborative Filtering" on the Google. This approach is based on that.
397 | def recommend(deck, k=15, returnk=False):
398 | nn = datetime.datetime.now()
399 | logging.debug("Finding recommendations for deck with general " + str(deck['commander']))
400 |
401 | # Go calculate all of the closeness scores for this deck to all of the other decks in the corpus.
402 | scores = []
403 | for deck2 in get_decks(color_identity(deck['commander'])):
404 | if decks_are_dups(deck, deck2):
405 | logging.debug("The deck submitted here is a duplicate of another deck in my corpus...")
406 | continue
407 |
408 | d = rec_deck_closeness(deck, deck2) ** 2 # notice that I square the score.
409 | # squaring the score makes closer decks weighted higher. I found empirically this gives better results.
410 |
411 | # Keep the score around but also keep the cards that were different.
412 | scores.append((d, deck2, set(deck2['cards']) - set(deck['cards']), set(deck['cards']) - set(deck2['cards'])))
413 |
414 | # Pull off the top K highest scores. Break ties randomly.
415 | topk = sorted(scores, key=lambda x: (x[0], random.random()), reverse=True)[:k]
416 |
417 | for dd in topk:
418 | logging.debug("Deck similar to this one: " + str(strip_accents(dd[1]['commander'])) + ' score: %.2f' % dd[0] )
419 |
420 | total_score = float(sum(ee[0] for ee in topk))
421 |
422 | card_counts = {}
423 | uniq_counts = {}
424 |
425 | # go through each deck in the top k and tally some cards
426 | for dist, deck2, newcards, uniqcards in topk:
427 | for nc in newcards:
428 | if is_banned(nc):
429 | continue
430 |
431 | if not nc in card_counts:
432 | card_counts[nc] = 0.0
433 |
434 | card_counts[nc] += ( dist / total_score ) # dist / total score is what gives weight.
435 |
436 | for uc in uniqcards:
437 | if uc == deck['commander']:
438 | continue
439 |
440 | if not uc in uniq_counts:
441 | uniq_counts[uc] = 0.0
442 |
443 | uniq_counts[uc] += ( dist / total_score )
444 |
445 | # Get ordered lists of card counts
446 | newrecs = sorted(card_counts.items(), key=lambda x:x[1], reverse=True)
447 | outrecs = sorted(uniq_counts.items(), key=lambda x:x[1], reverse=True)
448 |
449 | logging.debug("Done finding recommendations for deck with general " + str(deck['commander']) + " (took %s time)" % str(datetime.datetime.now() - nn))
450 |
451 | if returnk:
452 | return newrecs, outrecs, [ deck for _, deck, _, _ in topk ]
453 | else:
454 | return newrecs, outrecs
455 |
456 |
--------------------------------------------------------------------------------
/decks_sample.json:
--------------------------------------------------------------------------------
1 | {"cards": ["adaptive automaton", "ajani, mentor of heroes", "alpha status", "amoeboid changeling", "ancient ziggurat", "arcane sanctum", "belbe's portal", "blood crypt", "bonescythe sliver", "breeding pool", "brood sliver", "cavern of souls", "chameleon colossus", "city of brass", "clot sliver", "coat of arms", "command tower", "constricting sliver", "coordinated barrage", "creeping renaissance", "crosis's catacombs", "crumbling necropolis", "darigaaz's caldera", "descendants' path", "distant melody", "door of destinies", "dromar's cavern", "essence sliver", "fungus sliver", "gaea's cradle", "galerider sliver", "garruk, caller of beasts", "gavony township", "gemhide sliver", "gilded lotus", "godless shrine", "grand coliseum", "hallowed fountain", "haunted fengraf", "homing sliver", "horned sliver", "hunter sliver", "jungle shrine", "konda's banner", "leeching sliver", "luminescent rain", "lurking predators", "mana confluence", "mana echoes", "manaweft sliver", "megantic sliver", "muscle sliver", "mutavault", "necrotic sliver", "overgrown tomb", "pack's disdain", "predatory sliver", "prophet of kruphix", "quest for renewal", "quick sliver", "renewing touch", "riptide replicator", "rith's grove", "roar of the crowd", "rupture spire", "sacred foundry", "savage lands", "seaside citadel", "sentinel sliver", "shared animosity", "sinew sliver", "sliver hive", "sliver hivelord", "sliver legion", "sliver overlord", "sliver queen", "steam vents", "stomping ground", "striking sliver", "synapse sliver", "syphon sliver", "talon sliver", "taurean mauler", "temple garden", "thorncaster sliver", "transguild promenade", "treva's ruins", "urza's incubator", "vampiric sliver", "vault of the archangel", "venom sliver", "vivid crag", "vivid creek", "vivid grove", "vivid marsh", "vivid meadow", "volrath's stronghold", "watery grave", "wild pair", "winged sliver"], "date": 735439, "colors": ["BLACK", "BLUE", "GREEN", "RED", "WHITE"], "commander": "sliver overlord"}
2 | {"cards": ["academy ruins", "adarkar wastes", "ajani, caller of the pride", "angelic destiny", "archetype of aggression", "aurelia's fury", "aurelia, the warleader", "azorius signet", "batterskull", "battlefield forge", "benevolent bodyguard", "boros charm", "boros signet", "champion's helm", "city of brass", "clifftop retreat", "command tower", "consecrated sphinx", "counterflux", "counterspell", "cyclonic rift", "dack's duplicate", "darksteel plate", "detention sphere", "elspeth, knight-errant", "enlightened tutor", "fellwar stone", "ghostly prison", "glacial fortress", "grand abolisher", "gratuitous violence", "hallowed fountain", "hindering light", "iroas, god of victory", "island", "izzet signet", "lightning greaves", "loxodon warhammer", "minamo, school at water's edge", "mistveil plains", "mother of runes", "mountain", "mystic barrier", "not of this world", "obsidian battle-axe", "path to exile", "phyrexian metamorph", "plains", "propaganda", "render silent", "return to dust", "rewind", "rogue's passage", "ruhan of the fomori", "sacred foundry", "seize the day", "serra ascendant", "shivan reef", "silent arbiter", "slayers' stronghold", "sol ring", "solemn simulacrum", "spell crumple", "steam vents", "steel of the godhead", "steelshaper's gift", "stonecloaker", "stoneforge mystic", "stonehewer giant", "suffocating blast", "sulfur falls", "sunforger", "sunhome, fortress of the legion", "supreme verdict", "sword of feast and famine", "swords to plowshares", "temple of enlightenment", "temple of epiphany", "temple of triumph", "thassa, god of the sea", "umezawa's jitte", "urabrask the hidden", "waves of aggression", "weathered wayfarer", "wrath of god"], "date": 735439, "colors": ["BLUE", "RED", "WHITE"], "commander": "ruhan of the fomori"}
3 | {"cards": ["altar of dementia", "arcbound bruiser", "arcbound crusher", "arcbound hybrid", "arcbound overseer", "arcbound ravager", "arcbound reclaimer", "arcbound stinger", "arcbound worker", "basalt monolith", "burnished hart", "clock of omens", "codex shredder", "coldsteel heart", "crystal ball", "darksteel citadel", "darksteel ingot", "decimator web", "dormant volcano", "dreamstone hedron", "dross scorpion", "energy chamber", "everflowing chalice", "forgotten cave", "goblin welder", "great furnace", "grindclock", "grindstone", "hair-strung koto", "hammer of purphoros", "ichor wellspring", "iron myr", "karn, silver golem", "keening stone", "kiki-jiki, mirror breaker", "krark-clan ironworks", "kurkesh, onakke ancient", "lightning coils", "liquimetal coating", "millstone", "mimic vat", "mirrodin's core", "mirrorworks", "mountain", "mycosynth golem", "mycosynth lattice", "mycosynth wellspring", "myr battlesphere", "myr retriever", "myr turbine", "nim deathmantle", "palladium myr", "reliquary tower", "sculpting steel", "shimmer myr", "skullclamp", "slobad, goblin tinkerer", "sol ring", "solemn simulacrum", "staff of nin", "star compass", "steel hellkite", "steel overseer", "thopter assembly", "thought dissector", "thran dynamo", "trading post", "unwinding clock", "urza's factory", "urza's mine", "urza's power plant", "urza's tower", "valakut, the molten pinnacle", "vandalblast", "voltaic key", "workhorse", "zealous conscripts"], "date": 735439, "colors": ["RED"], "commander": "slobad, goblin tinkerer"}
4 | {"cards": ["anarchist", "archaeomancer", "augur of bolas", "caldera lake", "chandra ablaze", "chaos warp", "charmbreaker devils", "clockspinning", "command tower", "confusion in the ranks", "conundrum sphinx", "counterlash", "dakra mystic", "delay", "desolate lighthouse", "drift of phantasms", "eerie procession", "ethereal usher", "felhide spiritbinder", "fiery gambit", "grip of chaos", "guard gomazoa", "halimar depths", "heed the mists", "hive mind", "interpret the signs", "island", "izzet boilerworks", "izzet charm", "izzet chronarch", "izzet guildgate", "izzet signet", "jhoira of the ghitu", "mass polymorph", "mountain", "murmurs from beyond", "mystical tutor", "noggle ransacker", "nucklavee", "omenspeaker", "peer through depths", "perplexing chimera", "planar chaos", "planeswalker's mischief", "polymorph", "possibility storm", "psychic battle", "psychic theft", "radiate", "ral zarek", "reliquary tower", "reweave", "riptide laboratory", "riptide shapeshifter", "rite of replication", "sage of epityr", "scrambleverse", "shifting borders", "shivan reef", "sol ring", "spellshift", "split decision", "steam vents", "stitch in time", "sulfur falls", "temple of epiphany", "thieves' auction", "tibalt, the fiend-blooded", "timebender", "uncovered clues", "wandering eye", "warp world", "whims of the fates"], "date": 735439, "colors": ["BLUE", "RED"], "commander": "jhoira of the ghitu"}
5 | {"cards": ["acidic slime", "aetherling", "anger", "archaeomancer", "avenger of zendikar", "beast within", "birthing pod", "blatant thievery", "body double", "boundless realms", "buried ruin", "ceta sanctuary", "chronozoa", "clone", "coiling oracle", "command tower", "conjurer's closet", "consecrated sphinx", "crystal shard", "cultivate", "cyclonic rift", "dawntreader elk", "deadeye navigator", "diluvian primordial", "elvish piper", "evolving wilds", "explosive vegetation", "fabricate", "fact or fiction", "farhaven elf", "farseek", "fathom mage", "fauna shaman", "fierce empath", "flametongue kavu", "forest", "ghost quarter", "gilded drake", "glen elendra archmage", "green sun's zenith", "haunted fengraf", "inferno titan", "island", "ixidron", "kessig cagebreakers", "kessig wolf run", "kira, great glass-spinner", "kodama's reach", "mimic vat", "molten primordial", "mountain", "mulldrifter", "nostalgic dreams", "overwhelming stampede", "phyrexian metamorph", "pongify", "prime speaker zegana", "progenitor mimic", "prophet of kruphix", "proteus staff", "rampant growth", "rapid hybridization", "recurring insight", "regrowth", "riku of two reflections", "rite of replication", "rupture spire", "skullclamp", "sol ring", "somberwald sage", "sphinx of uthuun", "spike weaver", "steel hellkite", "terastodon", "terramorphic expanse", "transguild promenade", "trinket mage", "voyager staff", "worldly tutor", "zealous conscripts"], "date": 735439, "colors": ["BLUE", "GREEN", "RED"], "commander": "riku of two reflections"}
6 | {"cards": ["academy ruins", "arcane denial", "archaeomancer", "augur of bolas", "azami, lady of scrolls", "blue sun's zenith", "brainstorm", "brittle effigy", "caged sun", "capsize", "coldsteel heart", "commandeer", "counterspell", "dakra mystic", "darksteel ingot", "deranged assistant", "dismiss", "dissipate", "dissolve", "ertai, wizard adept", "fabricate", "fact or fiction", "future sight", "glen elendra archmage", "hapless researcher", "hinder", "into the roil", "island", "jushi apprentice", "knowledge exploitation", "laboratory maniac", "lonely sandbar", "master of waves", "merchant scroll", "mind stone", "miscalculation", "misdirection", "mystical tutor", "nykthos, shrine to nyx", "oblivion stone", "patron wizard", "perilous vault", "pithing needle", "ponder", "reliquary tower", "rewind", "riptide laboratory", "sage's dousing", "sea gate oracle", "seat of the synod", "sky diamond", "sol ring", "spellstutter sprite", "star compass", "steel hellkite", "stonybrook banneret", "surgespanner", "talrand, sky summoner", "temporal adept", "thespian's stage", "time warp", "tolaria west", "treachery", "treasure mage", "trinket mage", "turnabout", "vedalken aethermage", "venser, shaper savant", "voidmage apprentice", "voidmage husher", "voidmage prodigy", "willbender", "winding canyons"], "date": 735438, "colors": ["BLUE"], "commander": "azami, lady of scrolls"}
7 | {"cards": ["arcane denial", "bident of thassa", "boomerang", "bosium strip", "brainstorm", "buried ruin", "caged sun", "capsize", "coastal piracy", "coat of arms", "counterspell", "cyclonic rift", "dissipate", "dissolve", "echoing truth", "elixir of immortality", "fabricate", "fact or fiction", "faerie trickery", "foil", "forbid", "frantic search", "gauntlet of power", "gitaxian probe", "gravitational shift", "high tide", "hinder", "impulse", "into the roil", "island", "isochron scepter", "jin-gitaxias, core augur", "keep watch", "leyline of anticipation", "mana leak", "muddle the mixture", "mystical tutor", "negate", "opposition", "opt", "pact of negation", "peer through depths", "ponder", "portent", "preordain", "proteus staff", "reliquary tower", "rewind", "rhystic study", "riptide laboratory", "sapphire medallion", "serum visions", "sleight of hand", "snap", "sol ring", "spell crumple", "spell syphon", "swan song", "talrand, sky summoner", "tamiyo, the moon sage", "telling time", "think twice", "thought scour", "tidespout tyrant", "unsummon", "vapor snag", "vedalken shackles", "wash out", "whiplash trap", "windreader sphinx"], "date": 735438, "colors": ["BLUE"], "commander": "talrand, sky summoner"}
8 | {"cards": ["acidic slime", "archive trap", "body double", "buried alive", "butcher of malakir", "coalition relic", "command tower", "counterspell", "creeping tar pit", "cultivate", "damia, sage of stone", "death's shadow", "defense of the heart", "diabolic intent", "dimir aqueduct", "dimir signet", "doomgape", "drowned catacomb", "entomb", "eternal witness", "evolving wilds", "exhume", "explosive vegetation", "fauna shaman", "flooded grove", "forbid", "forest", "forgotten ancient", "gaea's revenge", "ghoultree", "golgari rot farm", "golgari signet", "grave pact", "greater good", "green sun's zenith", "grimoire of the dead", "hinterland harbor", "hydra omnivore", "increasing ambition", "inkwell leviathan", "island", "jace, memory adept", "jokulmorder", "jwar isle refuge", "kodama's reach", "krosan cloudscraper", "liliana of the veil", "lord of extinction", "mesmeric orb", "mindcrank", "mortivore", "mosswort bridge", "necrotic ooze", "phyrexian metamorph", "plague wind", "polar kraken", "praetor's counsel", "primal command", "profane command", "putrefy", "reanimate", "sewer nemesis", "simic growth chamber", "simic signet", "simic sky swallower", "sol ring", "spell crumple", "spirit of the night", "swamp", "swiftfoot boots", "terastodon", "terramorphic expanse", "the mimeoplasm", "thorn elemental", "thrun, the last troll", "tooth and nail", "trygon predator", "twilight mire", "urborg elf", "vivid creek", "vivid grove", "vivid marsh", "vorosh, the hunter", "woodland cemetery", "worldly tutor"], "date": 735438, "colors": ["BLACK", "BLUE", "GREEN"], "commander": "the mimeoplasm"}
9 | {"cards": ["acidic slime", "archaeomancer", "army of the damned", "astral cornucopia", "birthing pod", "bojuka bog", "boundless realms", "brawn", "buried ruin", "colossus of akros", "command tower", "consecrated sphinx", "contagion clasp", "contagion engine", "corpsejack menace", "corrupted conscience", "counterspell", "cultivate", "cyclonic rift", "damia, sage of stone", "deadbridge chant", "deadeye navigator", "diabolic tutor", "dimir guildgate", "doom blade", "doubling season", "elvish mystic", "evolution vat", "evolving wilds", "explore", "forest", "garruk wildspeaker", "gilded lotus", "glen elendra archmage", "golgari charm", "golgari guildgate", "grafted exoskeleton", "grisly salvage", "halimar depths", "isochron scepter", "jace beleren", "jace, memory adept", "kiora's follower", "kruphix, god of horizons", "leyline of anticipation", "liliana vess", "llanowar elves", "mimic vat", "mistcutter hydra", "mystic snake", "nim deathmantle", "opal palace", "phyrexian metamorph", "phyrexian reclamation", "prime speaker zegana", "prophet of kruphix", "rampant growth", "rapid hybridization", "reclamation sage", "reins of power", "reliquary tower", "rise from the grave", "rite of replication", "rupture spire", "sakura-tribe elder", "seedborn muse", "sensei's divining top", "sepulchral primordial", "shimmering grotto", "simic guildgate", "snow-covered island", "sol ring", "spell crumple", "sudden spoiling", "swamp", "sylvan caryatid", "tamiyo, the moon sage", "temporal mastery", "tezzeret the seeker", "thassa, god of the sea", "transguild promenade", "underworld connections", "vivid creek", "yisan, the wanderer bard"], "date": 735438, "colors": ["BLACK", "BLUE", "GREEN"], "commander": "damia, sage of stone"}
10 | {"cards": ["animate dead", "artisan of kozilek", "asceticism", "avatar of woe", "brawn", "breeding pool", "buried alive", "command tower", "consecrated sphinx", "consuming aberration", "corpsejack menace", "cultivate", "darksteel ingot", "darksteel plate", "deadbridge chant", "dimir aqueduct", "dimir signet", "dismember", "doom blade", "drowned catacomb", "essence harvest", "eternal witness", "evil twin", "fact or fiction", "forest", "frost marsh", "genesis wave", "geth, lord of the vault", "ghoultree", "gilded lotus", "go for the throat", "golgari rot farm", "golgari signet", "grave pact", "gravedigger", "havengul lich", "island", "jarad's orders", "jin-gitaxias, core augur", "kira, great glass-spinner", "krosan grip", "life from the loam", "lightning greaves", "liliana vess", "living death", "lord of extinction", "maelstrom pulse", "memory erosion", "mossbridge troll", "murder", "nim deathmantle", "overgrown tomb", "painful quandary", "plague wind", "predator ooze", "prime speaker zegana", "primordial hydra", "putrefax", "putrefy", "rampant growth", "rancor", "reanimate", "rupture spire", "sensei's divining top", "sewer nemesis", "sheoldred, whispering one", "simic growth chamber", "skithiryx, the blight dragon", "slaughter pact", "sol ring", "solemn simulacrum", "stitch together", "swamp", "swiftfoot boots", "syphon flesh", "syphon mind", "tainted wood", "temple of the false god", "the mimeoplasm", "triskelion", "tropical island", "vivid creek", "vivid grove", "vivid marsh", "vraska the unseen", "watery grave", "whispersilk cloak", "windfall", "wonder", "woodfall primus", "worn powerstone", "wrexial, the risen deep", "yavimaya elder"], "date": 735438, "colors": ["BLACK", "BLUE", "GREEN"], "commander": "the mimeoplasm"}
11 | {"cards": ["acidic slime", "alchemist's refuge", "angel of finality", "archaeomancer", "aura shards", "avenger of zendikar", "azorius chancery", "azorius guildgate", "bant panorama", "birthing pod", "brago, king eternal", "brainstorm", "brutalizer exarch", "cauldron of souls", "chord of calling", "clone", "cloudshift", "coiling oracle", "command tower", "conjurer's closet", "dauntless escort", "deadeye navigator", "diluvian primordial", "elfhame palace", "eternal witness", "evolving wilds", "farhaven elf", "fierce empath", "flickerwisp", "forest", "galepowder mage", "garruk relentless", "ghostly flicker", "ghostway", "graypelt refuge", "hornet queen", "island", "ixidron", "karmic guide", "knight-captain of eos", "kor hookmaster", "lavinia of the tenth", "loxodon hierarch", "lyev skyknight", "momentary blink", "mosswort bridge", "mulldrifter", "new benalia", "palinchron", "peregrine drake", "pithing needle", "plains", "plaxmanta", "ponder", "progenitor mimic", "prophet of kruphix", "reclamation sage", "reliquary tower", "resolute archangel", "restoration angel", "roon of the hidden realm", "rupture spire", "rush of knowledge", "sage of epityr", "seaside citadel", "secluded steppe", "seedborn muse", "sejiri refuge", "selesnya guildgate", "selesnya sanctuary", "simic growth chamber", "simic guildgate", "sol ring", "soul of the harvest", "spike weaver", "stonecloaker", "stonehorn dignitary", "suture priest", "sylvan ranger", "temple of plenty", "temple of the false god", "tempt with discovery", "terramorphic expanse", "thragtusk", "tranquil thicket", "transguild promenade", "venser, the sojourner", "wall of omens", "wild pair", "woodfall primus"], "date": 735438, "colors": ["BLUE", "GREEN", "WHITE"], "commander": "roon of the hidden realm"}
12 | {"cards": ["all suns' dawn", "angel's grace", "arcane denial", "avatar of discord", "avatar of fury", "avatar of hope", "avatar of might", "avatar of slaughter", "avatar of will", "avatar of woe", "bant charm", "black sun's zenith", "bonfire of the damned", "child of alara", "clifftop retreat", "counterspell", "countersquall", "crime/punishment", "crypt incursion", "dack's duplicate", "debt to the deathless", "decree of pain", "deus of calamity", "divinity of pride", "dominus of fealty", "dragonskull summit", "drowned catacomb", "elspeth, knight-errant", "excruciator", "faith's fetters", "faith's reward", "favor of the overbeing", "fellwar stone", "figure of destiny", "forest", "frenzied tilling", "genju of the realm", "ghastlord of fugue", "godhead of awe", "hallowed burial", "herald of leshrac", "hex", "hindering light", "homeward path", "increasing ambition", "island", "isolated chapel", "maelstrom pulse", "manalith", "merciless eviction", "mirari", "mirari's wake", "mortify", "mountain", "nobilis of war", "overbeing of myth", "oversoul of dusk", "personal sanctuary", "plains", "progenitus", "putrefy", "reap and sow", "revenge of the hunted", "righteousness", "rout", "serra avatar", "shape stealer", "skyshroud claim", "slave of bolas", "sol ring", "spectral searchlight", "swamp", "terminus", "transcendent master", "unmake", "unscythe, killer of kings", "woodland cemetery", "world queller", "wrath of god"], "date": 735438, "colors": ["BLACK", "BLUE", "GREEN", "RED", "WHITE"], "commander": "progenitus"}
13 | {"cards": ["acidic slime", "aegis of the gods", "ajani's chosen", "argothian enchantress", "armada wurm", "aura of silence", "avacyn's pilgrim", "banishing light", "birthing pod", "blind obedience", "bow of nylea", "brushland", "celestial archon", "collective blessing", "command tower", "courser of kruphix", "crystal chimes", "doubling season", "eidolon of blossoms", "enchantress's presence", "eternal witness", "ethereal armor", "exploration", "fiendslayer paladin", "font of fertility", "forest", "gavony township", "ghost quarter", "ghostly prison", "gift of immortality", "greater auramancy", "green sun's zenith", "grove of the guardian", "heliod's emissary", "heliod, god of the sun", "hopeful eidolon", "indestructibility", "intangible virtue", "karametra, god of harvests", "kitchen finks", "knight of the reliquary", "knightly valor", "land tax", "leafcrown dryad", "loxodon smiter", "luminarch ascension", "mana bloom", "maze of ith", "mesa enchantress", "mirari's wake", "mirri's guile", "nylea's emissary", "nylea, god of the hunt", "oblivion ring", "observant alseid", "path to exile", "plains", "primeval bounty", "rancor", "replenish", "rest in peace", "sakura-tribe elder", "savannah", "scavenging ooze", "scroll rack", "selesnya guildgate", "selvala, explorer returned", "serra's sanctum", "sigarda, host of herons", "sigil of the empty throne", "silverblade paladin", "sphere of safety", "squirrel nest", "stony silence", "sublime archangel", "sun titan", "sunpetal grove", "suppression field", "swords to plowshares", "sylvan caryatid", "sylvan library", "temple garden", "terminus", "thespian's stage", "trostani, selesnya's voice", "urza's factory", "vedalken orrery", "verduran enchantress", "vivid grove", "vivid meadow", "voyaging satyr", "wall of omens", "windbrisk heights", "witchstalker"], "date": 735438, "colors": ["GREEN", "WHITE"], "commander": "karametra, god of harvests"}
14 | {"cards": ["acidic slime", "adarkar wastes", "alchemist's refuge", "arcane denial", "archaeomancer", "austere command", "avacyn's pilgrim", "avenger of zendikar", "bant charm", "birds of paradise", "blue sun's zenith", "brainstorm", "breeding pool", "brushland", "champion's helm", "command tower", "condemn", "counterspell", "craterhoof behemoth", "cryptic command", "cultivate", "cyclonic rift", "darksteel plate", "deadeye navigator", "dissipate", "eladamri's call", "elvish mystic", "eternal witness", "evolving wilds", "fact or fiction", "farhaven elf", "finest hour", "forest", "gavony township", "glacial fortress", "green sun's zenith", "halimar depths", "hallowed fountain", "hinder", "hinterland harbor", "island", "karmic guide", "kodama's reach", "krosan grip", "llanowar elves", "mulldrifter", "mystical tutor", "nimbus maze", "path to exile", "plains", "plasm capture", "prophet of kruphix", "qasali pridemage", "rafiq of the many", "reflecting pool", "reliquary tower", "render silent", "rhystic study", "rite of replication", "seaside citadel", "skycloud expanse", "snapcaster mage", "sol ring", "spell crumple", "stoic angel", "stoneforge mystic", "strip mine", "sun titan", "sungrass prairie", "sunpetal grove", "supreme verdict", "swiftfoot boots", "sword of feast and famine", "sword of fire and ice", "swords to plowshares", "tamiyo, the moon sage", "temple garden", "temple of enlightenment", "temple of mystery", "temple of plenty", "terminus", "tooth and nail", "unexpectedly absent", "urban evolution", "venser, shaper savant", "voidslime", "wargate", "wood elves", "worldly tutor", "yavimaya coast"], "date": 735438, "colors": ["BLUE", "GREEN", "WHITE"], "commander": "rafiq of the many"}
15 | {"cards": ["bloodchief ascension", "bloodgift demon", "bojuka bog", "bottomless pit", "butcher of malakir", "cabal coffers", "caged sun", "charcoal diamond", "corrupt", "crypt of agadeem", "cunning lethemancer", "damnation", "death cloud", "demonic tutor", "diabolic tutor", "dictate of erebos", "dimir house guard", "enslave", "entomber exarch", "erebos, god of the dead", "expedition map", "exsanguinate", "extraplanar lens", "fate unraveler", "font of mythos", "geth's grimoire", "grave betrayal", "grave pact", "gray merchant of asphodel", "helldozer", "howling mine", "hypnotic specter", "increasing ambition", "iron maiden", "kagemaro, first to suffer", "killing wave", "liliana of the dark realms", "liliana vess", "liliana's caress", "lodestone golem", "magus of the coffers", "master of the feast", "megrim", "memory jar", "mikokoro, center of the sea", "mind shatter", "mind stone", "mind twist", "mutilate", "myojin of night's reach", "necrogen mists", "nether void", "nevinyrral's disk", "no mercy", "ob nixilis, unshackled", "oblivion stone", "painful quandary", "phyrexian arena", "phyrexian gargantua", "pox", "price of knowledge", "psychosis crawler", "reliquary tower", "rune-scarred demon", "sangromancer", "scythe specter", "seizan, perverter of truth", "silent specter", "skinrender", "sol ring", "solemn simulacrum", "sorin markov", "sphere of resistance", "steel hellkite", "strip mine", "suffer the past", "swamp", "syphon mind", "teferi's puzzle box", "temple bell", "thorn of amethyst", "toxic deluge", "underworld dreams", "urborg, tomb of yawgmoth", "venser's journal", "vesuva", "viseling", "waste not", "words of waste", "worn powerstone"], "date": 735438, "colors": ["BLACK"], "commander": "seizan, perverter of truth"}
16 | {"cards": ["adaptive automaton", "anger of the gods", "battle hymn", "beetleback chief", "blasphemous act", "blood moon", "boggart shenanigans", "breath of fury", "brightstone ritual", "browbeat", "burning earth", "caged sun", "chancellor of the forge", "chandra, the firebrand", "coat of arms", "dictate of the twin gods", "dragon fodder", "druidic satchel", "empty the warrens", "extraplanar lens", "fanatic of mogis", "fervor", "firecat blitz", "forgotten cave", "furnace of rath", "gauntlet of power", "gempalm incinerator", "goblin assault", "goblin chieftain", "goblin king", "goblin matron", "goblin offensive", "goblin piledriver", "goblin rabblemaster", "goblin rally", "goblin recruiter", "goblin ringleader", "goblin war strike", "goblin warchief", "goblin warrens", "gratuitous violence", "guttersnipe", "hammer of purphoros", "horde of boggarts", "ib halfheart, goblin tactician", "increasing vengeance", "kher keep", "koth of the hammer", "krenko's command", "krenko, mob boss", "mizzium mortars", "mogg infestation", "mogg war marshal", "mountain", "nykthos, shrine to nyx", "pandemonium", "past in flames", "purphoros, god of the forge", "reckless one", "recoup", "reforge the soul", "reiterate", "roar of the crowd", "ruination", "seer's sundial", "shared animosity", "siege-gang commander", "skirk fire marshal", "skirk prospector", "smoldering crater", "smoldering spires", "snake basket", "sol ring", "solemn simulacrum", "spinerock knoll", "staff of nin", "tempt with vengeance", "urabrask the hidden", "valakut, the molten pinnacle", "vandalblast", "warstorm surge", "wild guess"], "date": 735438, "colors": ["RED"], "commander": "purphoros, god of the forge"}
17 | {"cards": ["aeon chronicler", "arcane denial", "blasphemous act", "blast of genius", "blue sun's zenith", "brainstorm", "capsize", "cerebral vortex", "colossus of akros", "command tower", "counterflux", "counterspell", "cyclonic rift", "darksteel ingot", "darksteel sentinel", "discombobulate", "dismiss", "dissolve", "djinn illuminatus", "elixir of immortality", "essence backlash", "fabricate", "future sight", "galvanoth", "halimar depths", "increasing vengeance", "inner fire", "into the roil", "island", "izzet boilerworks", "izzet cluestone", "izzet guildgate", "izzet keyrune", "izzet signet", "jace's archivist", "jace, memory adept", "jhoira of the ghitu", "keranos, god of storms", "laboratory maniac", "library of leng", "mana geyser", "melek, izzet paragon", "mercurial chemister", "mind spring", "mizzium mortars", "mountain", "myojin of seeing winds", "mystic retrieval", "nin, the pain artist", "niv-mizzet, the firemind", "prosperity", "psychosis crawler", "ral zarek", "reiterate", "reminisce", "reverberate", "runeflare trap", "skyscribing", "sol ring", "spellbook", "spin into myth", "stuffy doll", "swiftfoot boots", "talrand, sky summoner", "temple bell", "temple of epiphany", "vandalblast", "vision skeins", "whispersilk cloak", "words of wisdom"], "date": 735438, "colors": ["BLUE", "RED"], "commander": "nin, the pain artist"}
18 | {"cards": ["akoum refuge", "annihilate", "archmage ascension", "armillary sphere", "augur of bolas", "baleful strix", "bearer of the heavens", "blackcleave cliffs", "blood crypt", "bloodstained mire", "bojuka bog", "capsize", "charmbreaker devils", "command tower", "crosis's charm", "cruel ultimatum", "crumbling necropolis", "curse of chaos", "darkslick shores", "decree of pain", "diabolic revelation", "dimir guildgate", "dismiss", "dissolve", "diviner spirit", "dragonskull summit", "drowned catacomb", "evolving wilds", "fissure vent", "font of mythos", "foresee", "gem of becoming", "gomazoa", "grixis charm", "grixis panorama", "guul draz assassin", "hex", "howling mine", "incendiary command", "infest", "island", "izzet boilerworks", "izzet guildgate", "jace's archivist", "lightning greaves", "mnemonic wall", "molten disaster", "molten slagheap", "mountain", "nekusar, the mindrazer", "nivix guildmage", "obelisk of grixis", "opal palace", "opportunity", "polluted delta", "praetor's grasp", "price of knowledge", "promise of power", "propaganda", "prosperity", "psychic intrusion", "rakdos carnarium", "rakdos guildgate", "runeflare trap", "rupture spire", "scalding tarn", "shadowblood ridge", "silence the believers", "skyscribing", "sol ring", "soul manipulation", "sphinx of magosi", "spiteful visions", "starstorm", "steam vents", "stormbreath dragon", "strategic planning", "sudden spoiling", "sulfur falls", "swamp", "swiftfoot boots", "temple bell", "temple of deceit", "temple of epiphany", "temple of malice", "temple of the false god", "terminate", "thraximundar", "vampire nighthawk", "viseling", "vision skeins", "vivid creek", "vivid marsh", "watery grave", "wild ricochet"], "date": 735438, "colors": ["BLACK", "BLUE", "RED"], "commander": "nekusar, the mindrazer"}
19 | {"cards": ["arcanis the omnipotent", "basalt monolith", "blue sun's zenith", "bribery", "caged sun", "consecrated sphinx", "counterspell", "cyclonic rift", "dismiss", "elixir of immortality", "everflowing chalice", "extraplanar lens", "fabricate", "fact or fiction", "foil", "forbid", "freed from the real", "gauntlet of power", "gilded lotus", "halimar depths", "high tide", "hinder", "island", "jace beleren", "jin-gitaxias, core augur", "laboratory maniac", "last word", "leyline of anticipation", "lightning greaves", "long-term plans", "mind over matter", "mind stone", "mindbreak trap", "muddle the mixture", "omniscience", "pact of negation", "pemmin's aura", "phyrexian metamorph", "preordain", "propaganda", "psychosis crawler", "reliquary tower", "rewind", "rhystic study", "sapphire medallion", "scroll rack", "sensei's divining top", "sol ring", "solemn simulacrum", "standstill", "swan song", "swiftfoot boots", "teferi, mage of zhalfir", "temple of the false god", "tezzeret the seeker", "thought reflection", "thousand-year elixir", "thran dynamo", "time stop", "time stretch", "time warp", "torpor orb", "treachery", "trinket mage", "venser's journal", "venser, shaper savant"], "date": 735438, "colors": ["BLUE"], "commander": "arcanis the omnipotent"}
20 |
--------------------------------------------------------------------------------
|