├── .gitattributes
├── .gitignore
├── license_parser.py
├── README.md
├── vdf.py
└── missing_cover_downloader.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
4 | # Custom for Visual Studio
5 | *.cs diff=csharp
6 |
7 | # Standard to msysgit
8 | *.doc diff=astextplain
9 | *.DOC diff=astextplain
10 | *.docx diff=astextplain
11 | *.DOCX diff=astextplain
12 | *.dot diff=astextplain
13 | *.DOT diff=astextplain
14 | *.pdf diff=astextplain
15 | *.PDF diff=astextplain
16 | *.rtf diff=astextplain
17 | *.RTF diff=astextplain
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Windows image file caches
2 | Thumbs.db
3 | ehthumbs.db
4 |
5 | # Folder config file
6 | Desktop.ini
7 |
8 | # Recycle Bin used on file shares
9 | $RECYCLE.BIN/
10 |
11 | # Windows Installer files
12 | *.cab
13 | *.msi
14 | *.msm
15 | *.msp
16 |
17 | # Windows shortcuts
18 | *.lnk
19 |
20 | # =========================
21 | # Operating System Files
22 | # =========================
23 |
24 | # OSX
25 | # =========================
26 |
27 | .DS_Store
28 | .AppleDouble
29 | .LSOverride
30 |
31 | # Thumbnails
32 | ._*
33 |
34 | # Files that might appear in the root of a volume
35 | .DocumentRevisions-V100
36 | .fseventsd
37 | .Spotlight-V100
38 | .TemporaryItems
39 | .Trashes
40 | .VolumeIcon.icns
41 |
42 | # Directories potentially created on remote AFP share
43 | .AppleDB
44 | .AppleDesktop
45 | Network Trash Folder
46 | Temporary Items
47 | .apdisk
48 |
--------------------------------------------------------------------------------
/license_parser.py:
--------------------------------------------------------------------------------
1 | from steam.protobufs.steammessages_clientserver_pb2 import CMsgClientLicenseList
2 |
3 | NTAB = 32
4 | IA = 16807
5 | IM = 2147483647
6 | IQ = 127773
7 | IR = 2836
8 | NDIV = (1+(IM-1)//NTAB)
9 | MAX_RANDOM_RANGE = 0x7FFFFFFF
10 | class RandomStream:
11 | def __init__(self):
12 | self.set_seed(0)
13 |
14 | def set_seed(self, iSeed):
15 | self.m_idum = iSeed if ( iSeed < 0 ) else -iSeed
16 | self.m_iy = 0
17 | self.m_iv = [0 for _ in range(NTAB)]
18 |
19 | def generate_random_number(self):
20 | if self.m_idum <= 0 or not self.m_iy:
21 | if -(self.m_idum) < 1:
22 | self.m_idum = 1
23 | else:
24 | self.m_idum = -(self.m_idum)
25 | for j in range(NTAB+7,-1,-1):
26 | k = (self.m_idum)//IQ
27 | self.m_idum = IA*(self.m_idum-k*IQ)-IR*k
28 | if self.m_idum < 0:
29 | self.m_idum += IM
30 | if j < NTAB:
31 | self.m_iv[j] = self.m_idum
32 | self.m_iy=self.m_iv[0]
33 |
34 | k=(self.m_idum)//IQ
35 | self.m_idum=IA*(self.m_idum-k*IQ)-IR*k
36 | if (self.m_idum < 0):
37 | self.m_idum += IM
38 | j=self.m_iy//NDIV
39 |
40 | if j >= NTAB or j < 0:
41 | j = ( j % NTAB ) & 0x7fffffff
42 |
43 | self.m_iy=self.m_iv[j]
44 | self.m_iv[j] = self.m_idum
45 |
46 | return self.m_iy
47 |
48 | def random_int(self, iLow, iHigh):
49 | x = iHigh-iLow+1
50 | if x <= 1 or MAX_RANDOM_RANGE < x-1:
51 | return iLow
52 |
53 | maxAcceptable = MAX_RANDOM_RANGE - ((MAX_RANDOM_RANGE+1) % x )
54 | while True:
55 | n = self.generate_random_number()
56 | if n <= maxAcceptable:
57 | break
58 |
59 | return iLow + (n % x)
60 |
61 | def random_char(self):
62 | return self.random_int(32,126)
63 |
64 | def decrypt_data(self, key, data):
65 | self.set_seed(key)
66 | result = bytearray(data)
67 | for i in range(len(data)):
68 | byte = self.random_char()
69 | #if i >= 0x6c5a0:
70 | # print(hex(byte))
71 | result[i] = data[i] ^ byte
72 | return result
73 |
74 | def parse(path, steamid):
75 | with open(path,'rb') as f:
76 | encrypted = f.read()
77 |
78 | random = RandomStream()
79 | decrypted = random.decrypt_data(steamid, encrypted)
80 |
81 | msg = CMsgClientLicenseList()
82 | msg.ParseFromString(bytes(decrypted[:-4]))
83 | return msg
84 |
85 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Steam Missing Cover Downloader
2 |
3 | Downloads missing portrait covers in your library for steam beta.
4 | Covers downloaded from steamgriddb.com
5 |
6 | ## Getting Started
7 |
8 | ### Prerequisites
9 |
10 | Python 3.7+
11 |
12 | Libraries:
13 |
14 | * aiohttp
15 | * [steam](https://github.com/ValvePython/steam)
16 |
17 | Install using the commands:
18 | ```
19 | pip install aiohttp
20 | pip install steam
21 | ```
22 |
23 | ### Running
24 |
25 | ```
26 | python missing_cover_downloader.py
27 | ```
28 |
29 | #### Command Line Options
30 | ```
31 | usage: missing_cover_downloader.py [-h] [-l] [-r] [-m MIN_SCORE] [-s STYLES]
32 | [-o] [-d]
33 |
34 | Downloads missing covers for new steam UI. Covers are downloaded from
35 | steamgriddb.com
36 |
37 | optional arguments:
38 | -h, --help show this help message and exit
39 | -l, --local Local mode, this is the default operation.
40 | -r, --remote Remote mode, if both local and remote are specified,
41 | will try local mode first.
42 | -m MIN_SCORE, --minscore MIN_SCORE
43 | Set min score for a cover to be downloaded.
44 | -s STYLES, --styles STYLES
45 | Set styles of cover, can be comma separated list of
46 | alternate, blurred, white_logo, material or no_logo.
47 | -o, --overwrite Overwrite covers that are already present in local
48 | steam grid path.
49 | -d, --delete-local Delete local covers for games that already have
50 | official ones.
51 | ```
52 |
53 |
54 |
55 | ## Troubleshooting
56 |
57 | | Error | Solution |
58 | | ------------------------------------------------------------ | ------------------------------------------------------------ |
59 | | `ModuleNotFoundError: No module named 'google'` | Check if `protobuf` Python library is installed via `pip list`, if not, run `pip install protobuf` |
60 | | `File "asyncio\base_events.py", line 508, in _check_closed`
`RuntimeError: Event loop is closed` | Too many images needed to download at once?
Try grabbing some images manually from `steamgriddb.com`, and placing them in `Steam\userdata\[user id]\config\grid`
Also try running `missing_cover_downloader.py` with the `-m` argument. Start at `20` and work down (so `missing_cover_downloader.py -m 20`, then `missing_cover_downloader.py -m 15`, etc.) |
61 | | `Cannot connect to host www.steamgriddb.com:443 ssl:default` | Your proxy settings may be preventing you from downloading images from steamgriddb.
In Windows, go to *Internet Options -> Connections -> LAN settings*.
Under *Automatic configuration*, check *Automatically detect settings*
Under *Proxy Server* uncheck *Use a proxy server for your LAN* |
62 |
63 | ## Update History
64 |
65 | 1.0.0
66 | * Initial release
67 |
68 | 1.2.0
69 | * Added support to read data from local appcache.
70 | * Fixed an issue that steamgriddb stopped returning correct covers
71 | * Added Mac support (Thanks to [UKMeng](https://github.com/UKMeng))
72 |
73 | 1.5.0
74 |
75 | * Significantly imporves performance using asychronous requests
76 | * Refactored code
77 | * Added Linux support (Thanks to [KrystianoXPL](https://github.com/KrystianoXPL))
78 | * Fixed a bug that some games in library are not returned.
79 | * Fixed a bug that games in appcache but not in game library are returned.
80 |
81 | 1.6.0
82 | * The script now uses SGDB API 2.3.0, which supports filtering by size. Scrapping the site is no longer needed.
83 | * Added support for switching between local and remote mode.
84 | * Added support to set the minimum score for a cover to be downloaded.
85 |
86 | 1.6.2
87 | * Added option to overwrite existing covers.
88 | * Added option to select cover styles.
89 | * Added option to delete custom covers when official covers are available.
90 |
--------------------------------------------------------------------------------
/vdf.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for deserializing/serializing to and from VDF
3 | """
4 | __version__ = "3.2"
5 | __author__ = "Rossen Georgiev"
6 |
7 | import re
8 | import sys
9 | import struct
10 | from binascii import crc32
11 | from io import StringIO as unicodeIO
12 | from collections import namedtuple
13 |
14 | # Py2 & Py3 compatibility
15 | if sys.version_info[0] >= 3:
16 | string_type = str
17 | int_type = int
18 | BOMS = '\ufffe\ufeff'
19 |
20 | def strip_bom(line):
21 | return line.lstrip(BOMS)
22 | else:
23 | from StringIO import StringIO as strIO
24 | string_type = basestring
25 | int_type = long
26 | BOMS = '\xef\xbb\xbf\xff\xfe\xfe\xff'
27 | BOMS_UNICODE = '\\ufffe\\ufeff'.decode('unicode-escape')
28 |
29 | def strip_bom(line):
30 | return line.lstrip(BOMS if isinstance(line, str) else BOMS_UNICODE)
31 |
32 | # string escaping
33 | _unescape_char_map = {
34 | r"\n": "\n",
35 | r"\t": "\t",
36 | r"\v": "\v",
37 | r"\b": "\b",
38 | r"\r": "\r",
39 | r"\f": "\f",
40 | r"\a": "\a",
41 | r"\\": "\\",
42 | r"\?": "?",
43 | r"\"": "\"",
44 | r"\'": "\'",
45 | }
46 | _escape_char_map = {v: k for k, v in _unescape_char_map.items()}
47 |
48 | def _re_escape_match(m):
49 | return _escape_char_map[m.group()]
50 |
51 | def _re_unescape_match(m):
52 | return _unescape_char_map[m.group()]
53 |
54 | def _escape(text):
55 | return re.sub(r"[\n\t\v\b\r\f\a\\\?\"']", _re_escape_match, text)
56 |
57 | def _unescape(text):
58 | return re.sub(r"(\\n|\\t|\\v|\\b|\\r|\\f|\\a|\\\\|\\\?|\\\"|\\')", _re_unescape_match, text)
59 |
60 | # parsing and dumping for KV1
61 | def parse(fp, mapper=dict, merge_duplicate_keys=True, escaped=True):
62 | """
63 | Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a VDF)
64 | to a Python object.
65 |
66 | ``mapper`` specifies the Python object used after deserializetion. ``dict` is
67 | used by default. Alternatively, ``collections.OrderedDict`` can be used if you
68 | wish to preserve key order. Or any object that acts like a ``dict``.
69 |
70 | ``merge_duplicate_keys`` when ``True`` will merge multiple KeyValue lists with the
71 | same key into one instead of overwriting. You can se this to ``False`` if you are
72 | using ``VDFDict`` and need to preserve the duplicates.
73 | """
74 | if not issubclass(mapper, dict):
75 | raise TypeError("Expected mapper to be subclass of dict, got %s" % type(mapper))
76 | if not hasattr(fp, 'readline'):
77 | raise TypeError("Expected fp to be a file-like object supporting line iteration")
78 |
79 | stack = [mapper()]
80 | expect_bracket = False
81 |
82 | re_keyvalue = re.compile(r'^("(?P(?:\\.|[^\\"])+)"|(?P#?[a-z0-9\-\_\\\?]+))'
83 | r'([ \t]*('
84 | r'"(?P(?:\\.|[^\\"])*)(?P")?'
85 | r'|(?P[a-z0-9\-\_\\\?\*\.]+)'
86 | r'))?',
87 | flags=re.I)
88 |
89 | for idx, line in enumerate(fp):
90 | if idx == 0:
91 | line = strip_bom(line)
92 |
93 | line = line.lstrip()
94 |
95 | # skip empty and comment lines
96 | if line == "" or line[0] == '/':
97 | continue
98 |
99 | # one level deeper
100 | if line[0] == "{":
101 | expect_bracket = False
102 | continue
103 |
104 | if expect_bracket:
105 | raise SyntaxError("vdf.parse: expected openning bracket (line %d)" % (idx + 1))
106 |
107 | # one level back
108 | if line[0] == "}":
109 | if len(stack) > 1:
110 | stack.pop()
111 | continue
112 |
113 | raise SyntaxError("vdf.parse: one too many closing parenthasis (line %d)" % (idx + 1))
114 |
115 | # parse keyvalue pairs
116 | while True:
117 | match = re_keyvalue.match(line)
118 |
119 | if not match:
120 | try:
121 | line += next(fp)
122 | continue
123 | except StopIteration:
124 | raise SyntaxError("vdf.parse: unexpected EOF (open key quote?)")
125 |
126 | key = match.group('key') if match.group('qkey') is None else match.group('qkey')
127 | val = match.group('val') if match.group('qval') is None else match.group('qval')
128 |
129 | if escaped:
130 | key = _unescape(key)
131 |
132 | # we have a key with value in parenthesis, so we make a new dict obj (level deeper)
133 | if val is None:
134 | if merge_duplicate_keys and key in stack[-1]:
135 | _m = stack[-1][key]
136 | else:
137 | _m = mapper()
138 | stack[-1][key] = _m
139 |
140 | stack.append(_m)
141 | expect_bracket = True
142 |
143 | # we've matched a simple keyvalue pair, map it to the last dict obj in the stack
144 | else:
145 | # if the value is line consume one more line and try to match again,
146 | # until we get the KeyValue pair
147 | if match.group('vq_end') is None and match.group('qval') is not None:
148 | try:
149 | line += next(fp)
150 | continue
151 | except StopIteration:
152 | raise SyntaxError("vdf.parse: unexpected EOF (open value quote?)")
153 |
154 | stack[-1][key] = _unescape(val) if escaped else val
155 |
156 | # exit the loop
157 | break
158 |
159 | if len(stack) != 1:
160 | raise SyntaxError("vdf.parse: unclosed parenthasis or quotes (EOF)")
161 |
162 | return stack.pop()
163 |
164 |
165 | def loads(s, **kwargs):
166 | """
167 | Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
168 | document) to a Python object.
169 | """
170 | if not isinstance(s, string_type):
171 | raise TypeError("Expected s to be a str, got %s" % type(s))
172 |
173 | try:
174 | fp = unicodeIO(s)
175 | except TypeError:
176 | fp = strIO(s)
177 |
178 | return parse(fp, **kwargs)
179 |
180 |
181 | def load(fp, **kwargs):
182 | """
183 | Deserialize ``fp`` (a ``.readline()``-supporting file-like object containing
184 | a JSON document) to a Python object.
185 | """
186 | return parse(fp, **kwargs)
187 |
188 |
189 | def dumps(obj, pretty=False, escaped=True):
190 | """
191 | Serialize ``obj`` to a VDF formatted ``str``.
192 | """
193 | if not isinstance(obj, dict):
194 | raise TypeError("Expected data to be an instance of``dict``")
195 | if not isinstance(pretty, bool):
196 | raise TypeError("Expected pretty to be of type bool")
197 | if not isinstance(escaped, bool):
198 | raise TypeError("Expected escaped to be of type bool")
199 |
200 | return ''.join(_dump_gen(obj, pretty, escaped))
201 |
202 |
203 | def dump(obj, fp, pretty=False, escaped=True):
204 | """
205 | Serialize ``obj`` as a VDF formatted stream to ``fp`` (a
206 | ``.write()``-supporting file-like object).
207 | """
208 | if not isinstance(obj, dict):
209 | raise TypeError("Expected data to be an instance of``dict``")
210 | if not hasattr(fp, 'write'):
211 | raise TypeError("Expected fp to have write() method")
212 | if not isinstance(pretty, bool):
213 | raise TypeError("Expected pretty to be of type bool")
214 | if not isinstance(escaped, bool):
215 | raise TypeError("Expected escaped to be of type bool")
216 |
217 | for chunk in _dump_gen(obj, pretty, escaped):
218 | fp.write(chunk)
219 |
220 |
221 | def _dump_gen(data, pretty=False, escaped=True, level=0):
222 | indent = "\t"
223 | line_indent = ""
224 |
225 | if pretty:
226 | line_indent = indent * level
227 |
228 | for key, value in data.items():
229 | if escaped and isinstance(key, string_type):
230 | key = _escape(key)
231 |
232 | if isinstance(value, dict):
233 | yield '%s"%s"\n%s{\n' % (line_indent, key, line_indent)
234 | for chunk in _dump_gen(value, pretty, escaped, level+1):
235 | yield chunk
236 | yield "%s}\n" % line_indent
237 | else:
238 | if escaped and isinstance(value, string_type):
239 | value = _escape(value)
240 |
241 | yield '%s"%s" "%s"\n' % (line_indent, key, value)
242 |
243 |
244 | # binary VDF
245 | class BASE_INT(int_type):
246 | def __repr__(self):
247 | return "%s(%d)" % (self.__class__.__name__, self)
248 |
249 | class UINT_64(BASE_INT):
250 | pass
251 |
252 | class INT_64(BASE_INT):
253 | pass
254 |
255 | class POINTER(BASE_INT):
256 | pass
257 |
258 | class COLOR(BASE_INT):
259 | pass
260 |
261 | BIN_NONE = b'\x00'
262 | BIN_STRING = b'\x01'
263 | BIN_INT32 = b'\x02'
264 | BIN_FLOAT32 = b'\x03'
265 | BIN_POINTER = b'\x04'
266 | BIN_WIDESTRING = b'\x05'
267 | BIN_COLOR = b'\x06'
268 | BIN_UINT64 = b'\x07'
269 | BIN_END = b'\x08'
270 | BIN_INT64 = b'\x0A'
271 | BIN_END_ALT = b'\x0B'
272 |
273 | def binary_loads(s, mapper=dict, merge_duplicate_keys=True, alt_format=False):
274 | result, idx = binary_loads_at(s,0,mapper,merge_duplicate_keys,alt_format)
275 |
276 | if len(s) != idx:
277 | raise SyntaxError("Binary VDF ended at index %d, but length is %d" % (idx, len(s)))
278 |
279 | return result
280 |
281 |
282 | def binary_loads_at(s, idx=0, mapper=dict, merge_duplicate_keys=True, alt_format=False):
283 | """
284 | Deserialize ``s`` (``bytes`` containing a VDF in "binary form")
285 | to a Python object.
286 |
287 | ``mapper`` specifies the Python object used after deserializetion. ``dict` is
288 | used by default. Alternatively, ``collections.OrderedDict`` can be used if you
289 | wish to preserve key order. Or any object that acts like a ``dict``.
290 |
291 | ``merge_duplicate_keys`` when ``True`` will merge multiple KeyValue lists with the
292 | same key into one instead of overwriting. You can se this to ``False`` if you are
293 | using ``VDFDict`` and need to preserve the duplicates.
294 | """
295 | if not isinstance(s, bytes):
296 | raise TypeError("Expected s to be bytes, got %s" % type(s))
297 | if not issubclass(mapper, dict):
298 | raise TypeError("Expected mapper to be subclass of dict, got %s" % type(mapper))
299 |
300 | # helpers
301 | int32 = struct.Struct(' idx:
333 | t = s[idx:idx+1]
334 | idx += 1
335 |
336 | if t == CURRENT_BIN_END:
337 | if len(stack) > 1:
338 | stack.pop()
339 | continue
340 | break
341 |
342 | key, idx = read_string(s, idx)
343 |
344 | if t == BIN_NONE:
345 | if merge_duplicate_keys and key in stack[-1]:
346 | _m = stack[-1][key]
347 | else:
348 | _m = mapper()
349 | stack[-1][key] = _m
350 | stack.append(_m)
351 | elif t == BIN_STRING:
352 | stack[-1][key], idx = read_string(s, idx)
353 | elif t == BIN_WIDESTRING:
354 | stack[-1][key], idx = read_string(s, idx, wide=True)
355 | elif t in (BIN_INT32, BIN_POINTER, BIN_COLOR):
356 | val = int32.unpack_from(s, idx)[0]
357 |
358 | if t == BIN_POINTER:
359 | val = POINTER(val)
360 | elif t == BIN_COLOR:
361 | val = COLOR(val)
362 |
363 | stack[-1][key] = val
364 | idx += int32.size
365 | elif t == BIN_UINT64:
366 | stack[-1][key] = UINT_64(uint64.unpack_from(s, idx)[0])
367 | idx += uint64.size
368 | elif t == BIN_INT64:
369 | stack[-1][key] = INT_64(int64.unpack_from(s, idx)[0])
370 | idx += int64.size
371 | elif t == BIN_FLOAT32:
372 | stack[-1][key] = float32.unpack_from(s, idx)[0]
373 | idx += float32.size
374 | else:
375 | raise SyntaxError("Unknown data type at index %d: %s" % (idx-1, repr(t)))
376 |
377 | if len(stack) != 1:
378 | raise SyntaxError("Binary VDF ended at index %d, but stack is not empty." % (idx))
379 |
380 | return stack.pop(), idx
381 |
382 | def binary_dumps(obj, alt_format=False):
383 | """
384 | Serialize ``obj`` to a binary VDF formatted ``bytes``.
385 | """
386 | return b''.join(_binary_dump_gen(obj, alt_format=alt_format))
387 |
388 | def _binary_dump_gen(obj, level=0, alt_format=False):
389 | if level == 0 and len(obj) == 0:
390 | return
391 |
392 | int32 = struct.Struct('= 24 and data.startswith(b'\211PNG\r\n\032\n') and data[12:16] == b'IHDR':
251 | try:
252 | width, height = struct.unpack(">LL", data[16:24])
253 | except struct.error:
254 | raise ValueError("Invalid PNG file")
255 | # Maybe this is for an older PNG version.
256 | elif size >= 16 and data.startswith(b'\211PNG\r\n\032\n'):
257 | # Check to see if we have the right content type
258 | try:
259 | width, height = struct.unpack(">LL", data[8:16])
260 | except struct.error:
261 | raise ValueError("Invalid PNG file")
262 | # handle JPEGs
263 | elif size >= 2 and data.startswith(b'\377\330'):
264 | try:
265 | index = 0
266 | size = 2
267 | ftype = 0
268 | while not 0xc0 <= ftype <= 0xcf or ftype in [0xc4, 0xc8, 0xcc]:
269 | index+=size
270 | while data[index] == 0xff:
271 | index += 1
272 | ftype = data[index]
273 | index += 1
274 | size = struct.unpack('>H', data[index:index+2])[0]
275 | # We are at a SOFn block
276 | index+=3 # Skip `precision' byte.
277 | height, width = struct.unpack('>HH', data[index:index+4])
278 | except struct.error:
279 | raise ValueError("Invalid JPEG file")
280 | else:
281 | raise ValueError("Unsupported format")
282 |
283 | return width, height
284 |
285 |
286 |
287 | async def fetch_url(url, session:aiohttp.ClientSession,returntype='bin',**kwargs):
288 | resp = await session.get(url,**kwargs)
289 | resp.raise_for_status()
290 | if returntype == 'bin':
291 | return await resp.read()
292 | elif returntype == 'html':
293 | return await resp.text()
294 | elif returntype == 'json':
295 | return await resp.json()
296 | raise ValueError("Unsupported return type")
297 |
298 |
299 | async def download_image(url,gridpath,appid,session,retrycount=3):
300 | try:
301 | data, success = await retry_func_async(lambda:fetch_url(url,session,'bin'),
302 | lambda ex: print("Download error: {}, retry".format(ex)),retrycount)
303 | if not success:
304 | return False
305 | width, height = quick_get_image_size(data)
306 | if width == 600 and height == 900:
307 | filename = "{}p{}".format(appid,url[-4:])
308 | with open(os.path.join(gridpath,filename),"wb") as f:
309 | f.write(data)
310 | print("Saved to",filename)
311 | return True
312 | else:
313 | print("Image size incorrect:",width,height)
314 | except:
315 | traceback.print_exc()
316 |
317 | return False
318 |
319 |
320 | async def download_cover(appid,path,session,args,excludeid=-1,retrycount=3):
321 |
322 | try:
323 | rst = await query_cover_for_apps(appid,session,args.styles)
324 | except :
325 | print("Failed to retrive cover data")
326 | return False
327 | if rst["success"]:
328 | # sort by score
329 | covers = rst["data"]
330 | covers.sort(key=lambda x:x["score"],reverse=True)
331 | print("Found {} covers".format(len(covers)))
332 | for value in covers:
333 | if value["id"] == excludeid:
334 | continue
335 | print("Downloading cover {} by {}, url: {}".format(value["id"],value["author"]["name"],value["url"]))
336 | success = await download_image(value["url"],path,appid,session)
337 | if success:
338 | return True
339 | return False
340 |
341 | async def download_covers(appids,gridpath,namedict,args):
342 |
343 | batch_query_data = []
344 | query_size = 50
345 | tasks = []
346 | proxies = urllib.request.getproxies()
347 | result = {'total_downloaded':0}
348 | if 'http' in proxies:
349 | os.environ['HTTP_PROXY'] = proxies['http']
350 | os.environ['HTTPS_PROXY'] = proxies['http']
351 | async with aiohttp.ClientSession(trust_env=True) as session:
352 | for index,sublist in enumerate(split_list(appids,query_size)):
353 | sublist = [str(appid) for appid in sublist]
354 | print('Querying covers {}-{}'.format(index*query_size+1,index*query_size+len(sublist)))
355 | query_covers = lambda lst: lambda :query_cover_for_apps(lst,session,args.styles)
356 | tasks.append(asyncio.create_task(retry_func_async(query_covers(sublist))))
357 |
358 | rsts = await asyncio.gather(*tasks)
359 | for rst, success in rsts:
360 | if success and rst['success']:
361 | batch_query_data.extend(rst['data'])
362 | else:
363 | print("Failed to retrieve cover info")
364 | sys.exit(4)
365 | async def task(queue,downloadresult):
366 | while True:
367 | appid,queryresult = await queue.get()
368 | print("Found most voted cover for {} {} by {}".format(appid,namedict[appid],queryresult["author"]["name"]))
369 | print("Downloading cover {}, url: {}".format(queryresult["id"],queryresult["url"]))
370 | try:
371 | success = await download_image(queryresult['url'],gridpath,appid,session)
372 | if not success:
373 | print("Finding all covers for {} {}".format(appid,namedict[int(appid)]))
374 | success = await download_cover(appid,gridpath,queryresult['id'],args)
375 | if success:
376 | downloadresult['total_downloaded'] += 1
377 | except Exception as ex:
378 | print(ex)
379 | queue.task_done()
380 | tasks = []
381 | queue=asyncio.Queue()
382 |
383 | number_jobs = 0
384 |
385 | for appid,queryresult in batch_query_data:
386 | appid = int(appid)
387 | if not queryresult['success']:
388 | print("Error finding cover for {}, {}".format(appid,' '.join(queryresult['errors'])))
389 | elif len(queryresult['data']) == 0:
390 | print("No cover found for {} {}".format(appid,namedict[appid]))
391 | elif args.min_score!= None and queryresult['data'][0]['score'] < args.min_score:
392 | print("Most voted cover for {} {} has score of {} < {} , skipping.".format(appid,namedict[appid],queryresult['data'][0]['score'],args.min_score))
393 | else:
394 | number_jobs += 1
395 | queue.put_nowait((appid,queryresult['data'][0]))
396 |
397 | if number_jobs:
398 | print("Found {} covers, downloading...".format(number_jobs))
399 |
400 | consumers = [asyncio.create_task(task(queue,result)) for i in range(20)]
401 |
402 | await queue.join()
403 | for c in consumers:
404 | c.cancel()
405 | return result['total_downloaded']
406 |
407 |
408 | async def query_cover_for_app_html(appid,session):
409 | try:
410 | jsondata, success = await retry_func_async(lambda:query_sgdbid_for_appid(appid,session),
411 | lambda ex: print("Error getting sgdb id for {}: {}, retry".format(appid,ex)))
412 | if success and jsondata['success']:
413 | gameid=jsondata['data']['id']
414 | url = 'https://www.steamgriddb.com/game/{}'.format(gameid)
415 | html, success = await retry_func_async(lambda:fetch_url(url,session,'html'),
416 | lambda ex: print("Error getting html {}: {}, retry".format(url,ex)))
417 | if not success:
418 | print("Failed to retrive grids for {} frome steamgriddb",appid)
419 | return None, 0
420 | soup = BeautifulSoup(html)
421 | result = []
422 | grids = soup.select(".grid")
423 | for grid in grids:
424 | if len(grid.select("img.d600x900")) != 0:
425 | result.append(
426 | {
427 | 'id':int(grid['data-id']),
428 | 'url':grid.select('.dload')[0]['href'],
429 | 'score':0,
430 | 'author':grid.select('.details a')[0].text.strip()
431 | }
432 | )
433 | if len(result) == 0:
434 | return None,grids
435 | result.sort(key=lambda x:x["score"],reverse=True)
436 | return result[0],len(grids)
437 | except:
438 | pass
439 | return None,0
440 |
441 |
442 |
443 | async def download_covers_temp(appids,gridpath,namedict):
444 | from bs4 import BeautifulSoup
445 |
446 | queue=asyncio.Queue()
447 |
448 | proxies = urllib.request.getproxies()
449 | if 'http' in proxies:
450 | os.environ['HTTP_PROXY'] = proxies['http']
451 | os.environ['HTTPS_PROXY'] = proxies['http']
452 |
453 | async with aiohttp.ClientSession(trust_env=True) as session:
454 | async def get_url(sublist,queue):
455 | for appid in sublist:
456 | print("Finding cover for {} {}".format(appid,namedict[appid]))
457 | cover,total = await query_cover_for_app_html(appid,session)
458 | if not cover:
459 | print("No cover found for {} {}".format(appid,namedict[appid]))
460 | continue
461 |
462 | await queue.put((appid, cover, total, namedict[appid]))
463 |
464 | producers = [asyncio.create_task(get_url(sublist,queue)) for sublist in split_list(appids,len(appids)//20)]
465 |
466 | #use dict to pass by reference
467 | result = {'total_downloaded':0}
468 |
469 | async def download_img(queue,result):
470 | while True:
471 | appid, cover, total, name = await queue.get()
472 | print("Found {} covers for {} {}".format(total,appid,name))
473 | print("Downloading cover with highest score, id: {} score:{} by {}, url: {}".format(cover["id"],cover["score"],cover["author"],cover["url"]))
474 | success = await download_image(cover["url"],gridpath,appid,session)
475 | if success:
476 | result['total_downloaded'] += 1
477 | queue.task_done()
478 |
479 | consumers = [asyncio.create_task(download_img(queue,result)) for i in range(20)]
480 | await asyncio.gather(*producers)
481 | await queue.join()
482 | for c in consumers:
483 | c.cancel()
484 |
485 | return result['total_downloaded']
486 |
487 | def main():
488 |
489 | parser = argparse.ArgumentParser(description='Downloads missing covers for new steam UI. Covers are downloaded from steamgriddb.com')
490 | parser.add_argument('-p','--path', dest='steam_path', type=str, default=None,
491 | help='Set Steam installation path.')
492 | parser.add_argument('-l','--local', action='store_true', dest='local_mode',
493 | help='Local mode, this is the default operation.')
494 | parser.add_argument('-r','--remote', action='store_true', dest='remote_mode',
495 | help='Remote mode, if both local and remote are specified, will try local mode first.')
496 | parser.add_argument('-m','--minscore', dest='min_score', type=int, default=None,
497 | help='Set min score for a cover to be downloaded.')
498 | parser.add_argument('-s','--styles', dest='styles', type=str, default=None,
499 | help='Set styles of cover, can be comma separated list of alternate, blurred, white_logo, material or no_logo.')
500 | parser.add_argument('-o','--overwrite', action='store_true', dest='overwrite',
501 | help='Overwrite covers that are already present in local steam grid path.')
502 | parser.add_argument('-d','--delete-local', action='store_true', dest='delete_local',
503 | help='Delete local covers for games that already have official ones.')
504 |
505 | args = parser.parse_args()
506 |
507 | try:
508 | if args.steam_path:
509 | steam_path = args.steam_path
510 | else:
511 | steam_path = SteamDataReader.get_steam_installpath()
512 | except:
513 | print("Could not find steam install path")
514 | sys.exit(1)
515 | print("Steam path:",steam_path)
516 |
517 | local_mode = True
518 | remote_fallback = True
519 | if not args.local_mode and args.remote_mode:
520 | local_mode = False
521 | elif args.local_mode and not args.remote_mode:
522 | remote_fallback = False
523 |
524 | if local_mode:
525 | steam_data_reader = SteamDataReaderLocal(steam_path)
526 | try:
527 | steamid = steam_data_reader.get_steam_id()
528 | if not steamid.is_valid():
529 | steamid = SteamID(input_steamid())
530 | if not steamid.is_valid():
531 | print("Invalid steam id")
532 | sys.exit(2)
533 | print("SteamID:",steamid.as_32)
534 |
535 |
536 | except Exception as error:
537 | print(error)
538 | if remote_fallback:
539 | print("Switch to remote mode")
540 | local_mode = False
541 | else:
542 | sys.exit(2)
543 |
544 |
545 | if not local_mode:
546 | client = SteamClient()
547 | if client.cli_login() != EResult.OK:
548 | print("Login Error")
549 | sys.exit(3)
550 | else:
551 | print("Login Success")
552 |
553 | steam_data_reader = SteamDataReaderRemote(client)
554 |
555 | steamid = client.steam_id
556 | print("SteamID:",steamid.as_32)
557 |
558 | steam_grid_path = STEAM_GRIDPATH.format(steam_path,steamid.as_32)
559 | if not os.path.isdir(steam_grid_path):
560 | os.mkdir(steam_grid_path)
561 | print("Steam grid path:",steam_grid_path)
562 | missing_cover_app_dict = steam_data_reader.get_missing_cover_app_dict(not local_mode)
563 |
564 | print("Total games missing cover in library:",len(missing_cover_app_dict))
565 | local_cover_map = {int(file[:len(file)-5]):file for file in os.listdir(steam_grid_path) if re.match(r"^\d+p.(png|jpg)$",file)}
566 | local_cover_appids = set(local_cover_map.keys())
567 | print("Total local covers found:",len(local_cover_appids))
568 | local_missing_cover_appids = missing_cover_app_dict.keys() - local_cover_appids
569 | print("Total missing covers locally:",len(local_missing_cover_appids))
570 | if args.overwrite:
571 | local_missing_cover_appids = set(missing_cover_app_dict.keys())
572 |
573 | if args.delete_local:
574 | local_duplicate_cover_appids = local_cover_appids - missing_cover_app_dict.keys()
575 | print(f'Found {len(local_duplicate_cover_appids)} games already have official covers.')
576 | for appid in local_duplicate_cover_appids:
577 | path = os.path.join(steam_grid_path,local_cover_map[appid])
578 | print(f'Deleting file {path}')
579 | os.remove(path)
580 |
581 |
582 | print("Finding covers from steamgriddb.com")
583 | local_missing_cover_appids = list(local_missing_cover_appids)
584 | local_missing_cover_appids.sort()
585 |
586 | total_downloaded = asyncio.run(download_covers(local_missing_cover_appids,steam_grid_path,missing_cover_app_dict,args))
587 | print("Total cover downloaded:",total_downloaded)
588 |
589 |
590 | if __name__ == "__main__":
591 | main()
592 |
--------------------------------------------------------------------------------