├── theme-shop
├── requirements.txt
├── README.md
├── boss.py
└── download.py
├── super-mario-maker
├── example.env
├── requirements.txt
├── .gitignore
├── README.md
└── archive.py
├── animal-crossing-new-leaf
├── example.env
├── requirements.txt
├── create-database.py
├── .gitignore
├── README.md
└── archive.py
├── mario-sonic-rio-2016-3ds
├── example.env
├── requirements.txt
├── README.md
├── .gitignore
└── archive.py
├── mario-sonic-rio-2016-wiiu
├── requirements.txt
├── example.config.json
├── .gitignore
├── README.md
└── archive.py
├── mario-sonic-sochi-2014-wiiu
├── requirements.txt
├── example.config.json
├── .gitignore
├── README.md
└── archive.py
├── README.md
├── idbe
├── package.json
├── certs
│ ├── wiiu-common.key
│ └── wiiu-common.crt
├── scrape.js
├── README.md
├── .gitignore
├── get_versions.js
├── package-lock.json
└── LICENSE
└── spotpass
├── util.js
├── package.json
├── scrape.js
├── read-boss-db-3ds.js
├── constants.js
├── database.js
├── certs
├── wiiu-common.key
└── wiiu-common.crt
├── build-database.js
├── read-boss-db-wiiu.js
├── scrape-wiiu.js
├── 3ds-tasks-to-tid.js
├── .gitignore
├── scrape-3ds.js
├── README.md
└── wup-boss-apps.json
/theme-shop/requirements.txt:
--------------------------------------------------------------------------------
1 | pyctr
2 | pycurl
3 |
--------------------------------------------------------------------------------
/super-mario-maker/example.env:
--------------------------------------------------------------------------------
1 | NEX_USERNAME=1234567890
2 | NEX_PASSWORD=abcdefghijklmnop
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/example.env:
--------------------------------------------------------------------------------
1 | NEX_3DS_USERNAME=1234567890
2 | NEX_3DS_PASSWORD=abcdefghijklmnop
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-3ds/example.env:
--------------------------------------------------------------------------------
1 | NEX_3DS_USERNAME=1234567890
2 | NEX_3DS_PASSWORD=abcdefghijklmnop
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-wiiu/requirements.txt:
--------------------------------------------------------------------------------
1 | nintendoclients==0.0.5
2 | anyio==3.6.2
3 | anynet==0.0.18
--------------------------------------------------------------------------------
/mario-sonic-sochi-2014-wiiu/requirements.txt:
--------------------------------------------------------------------------------
1 | nintendoclients==0.0.5
2 | anyio==3.6.2
3 | anynet==0.0.18
--------------------------------------------------------------------------------
/super-mario-maker/requirements.txt:
--------------------------------------------------------------------------------
1 | nintendoclients==0.0.5
2 | anyio==3.6.2
3 | anynet==0.0.18
4 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/requirements.txt:
--------------------------------------------------------------------------------
1 | nintendoclients==0.0.5
2 | anyio==3.6.2
3 | anynet==0.0.18
4 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-3ds/requirements.txt:
--------------------------------------------------------------------------------
1 | nintendoclients==0.0.5
2 | anyio==3.6.2
3 | anynet==0.0.18
4 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Archival Tool
2 |
3 | ## What is this?
4 | Archival Tools is a collection of tools dedicated to archiving several types of data from many Nintendo WiiU and 3DS games. Each tool resides in it's own folder. See a tools folder for more details
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-wiiu/example.config.json:
--------------------------------------------------------------------------------
1 | {
2 | "DEVICE_ID": 0,
3 | "SERIAL_NUMBER": "",
4 | "SYSTEM_VERSION": 608,
5 | "REGION_ID": 4,
6 | "COUNTRY_NAME": "US",
7 | "LANGUAGE": "en",
8 | "USERNAME": "",
9 | "PASSWORD": ""
10 | }
--------------------------------------------------------------------------------
/mario-sonic-sochi-2014-wiiu/example.config.json:
--------------------------------------------------------------------------------
1 | {
2 | "DEVICE_ID": 0,
3 | "SERIAL_NUMBER": "",
4 | "SYSTEM_VERSION": 608,
5 | "REGION_ID": 4,
6 | "COUNTRY_NAME": "US",
7 | "LANGUAGE": "en",
8 | "USERNAME": "",
9 | "PASSWORD": ""
10 | }
--------------------------------------------------------------------------------
/idbe/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "idbe",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "keywords": [],
10 | "author": "",
11 | "license": "ISC",
12 | "dependencies": {
13 | "axios": "^1.6.7",
14 | "cheerio": "^1.0.0-rc.12",
15 | "fs-extra": "^11.2.0",
16 | "xmlbuilder2": "^3.1.1"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/spotpass/util.js:
--------------------------------------------------------------------------------
1 | function millisecondsToString(ms) {
2 | const seconds = Math.floor((ms / 1000) % 60);
3 | const minutes = Math.floor((ms / 1000 / 60) % 60);
4 | const hours = Math.floor((ms / 1000 / 3600 ) % 24)
5 |
6 | return [
7 | `${hours.toString().padStart(2, '0')}h`,
8 | `${minutes.toString().padStart(2, '0')}m`,
9 | `${seconds.toString().padStart(2, '0')}s`
10 | ].join(':');
11 | }
12 |
13 | module.exports = {
14 | millisecondsToString
15 | };
--------------------------------------------------------------------------------
/spotpass/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "spotpass",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "keywords": [],
10 | "author": "",
11 | "license": "ISC",
12 | "dependencies": {
13 | "axios": "^1.6.7",
14 | "fs-extra": "^11.2.0",
15 | "sqlite": "^5.1.1",
16 | "sqlite3": "^5.1.7",
17 | "xmlbuilder2": "^3.1.1"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/theme-shop/README.md:
--------------------------------------------------------------------------------
1 | # Nintendo 3DS Theme Shop
2 |
3 | ## Prerequisites
4 | * Python
5 | * libcurl 7.19.0 or greater
6 | * 3DS Common Prod Cert from any 3DS system
7 | * boot9.bin from any 3DS system
8 |
9 | ## Usage
10 | 1. Set these environment variables, like so:
11 |
12 | ```bash
13 | set CTR_PROD_3=/path/to/your/3ds_common_cert.pem
14 | set BOOT9_PATH=/path/to/your/boot9.bin
15 | ```
16 |
17 | 2. Run `pip install -r requirements.txt` to install dependencies
18 | 3. Run `python3 download.py`
19 |
--------------------------------------------------------------------------------
/spotpass/scrape.js:
--------------------------------------------------------------------------------
1 | const { performance } = require('node:perf_hooks');
2 | const fs = require('fs-extra');
3 | const scrapeWiiU = require('./scrape-wiiu');
4 | const scrape3DS = require('./scrape-3ds');
5 | const database = require('./database');
6 | const { millisecondsToString } = require('./util');
7 | const { COUNTRIES, LANGUAGES } = require('./constants');
8 |
9 | async function scrape() {
10 | await database.connect();
11 |
12 | const time = new Date().toISOString().split('T')[0]; // * YYYY-MM-DD
13 | const downloadBase = `data/${time}`;
14 |
15 | for (const country of COUNTRIES) {
16 | for (const language of LANGUAGES) {
17 | fs.ensureDirSync(`${downloadBase}/${country}/${language}`);
18 | }
19 | }
20 |
21 | // * Run both at the same time
22 | const startTime = performance.now();
23 |
24 | await Promise.all([
25 | scrapeWiiU(downloadBase),
26 | scrape3DS(downloadBase)
27 | ]);
28 |
29 | const endTime = performance.now();
30 | const executionTime = millisecondsToString(endTime - startTime);
31 |
32 | console.log(`Archived in ${executionTime}`);
33 |
34 | await database.close();
35 | }
36 |
37 | scrape();
--------------------------------------------------------------------------------
/spotpass/read-boss-db-3ds.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs-extra');
2 | const apps = require('./ctr-boss-apps.json');
3 |
4 | const REGEX = /(?:npdl|npfl)\.(?:cdn|c\.app)\.nintendowifi\.net\/p01\/(?:nsa|filelist)\/([A-z0-9]{16})\/(\w*)/g;
5 |
6 | const db = fs.readFileSync('./partitionA.bin', {
7 | encoding: 'utf8'
8 | });
9 | const matches = [...db.matchAll(REGEX)];
10 |
11 | const appsLengthBefore = apps.length;
12 | let newTasks = 0;
13 |
14 | // * 256 possible tasks
15 | for (const match of matches) {
16 | const appID = match[1];
17 | const task = match[2];
18 |
19 | let found = false;
20 |
21 | for (const app of apps) {
22 | if (app.app_id === appID) {
23 | found = true;
24 |
25 | if (!app.tasks.includes(task)) {
26 | app.tasks.push(task);
27 | newTasks += 1;
28 | }
29 |
30 | break;
31 | }
32 | }
33 |
34 | if (!found) {
35 | apps.push({
36 | app_id: appID,
37 | tasks: [ task ]
38 | });
39 |
40 | newTasks += 1;
41 | }
42 | }
43 |
44 | console.log(`Found ${apps.length-appsLengthBefore} new BOSS apps and ${newTasks} new tasks`);
45 |
46 | fs.writeJSONSync('./ctr-boss-apps.json', apps, {
47 | spaces: '\t'
48 | });
49 |
--------------------------------------------------------------------------------
/spotpass/constants.js:
--------------------------------------------------------------------------------
1 | // * All country codes, courtesy of https://www.3dbrew.org/wiki/Country_Code_List
2 | const COUNTRIES = [
3 | 'GB', 'US', 'IT', 'NL', 'DE', 'CA', 'FR', 'HU', 'CR', 'AU',
4 | 'BR', 'RO', 'CL', 'MX', 'RU', 'ES', 'JP', 'CZ', 'PT', 'MT',
5 | 'AR', 'SE', 'PL', 'IE', 'BE', 'HT', 'NO', 'FI', 'GR', 'BO',
6 | 'AT', 'VE', 'PA', 'PE', 'GF', 'SA', 'CO', 'LT', 'NA', 'CH',
7 | 'CY', 'RS', 'KY', 'GP', 'DK', 'KR', 'LU', 'SV', 'VA', 'GT',
8 | 'SK', 'HR', 'ZA', 'DO', 'UY', 'LV', 'HN', 'JM', 'TR', 'IN',
9 | 'ER', 'AW', 'NZ', 'EC', 'TW', 'EE', 'CN', 'SI', 'AI', 'BG',
10 | 'NI', 'IS', 'MQ', 'BZ', 'BA', 'MY', 'AZ', 'ZW', 'AL', 'IM',
11 | 'VG', 'VI', 'BM', 'GY', 'SR', 'MS', 'TC', 'BB', 'TT', 'AG',
12 | 'BS', 'DM', 'GD', 'AN', 'PY', 'KN', 'LC', 'VC', 'BW', 'LS',
13 | 'LI', 'MK', 'ME', 'MZ', 'SZ', 'ZM', 'MR', 'ML', 'NE', 'TD',
14 | 'SD', 'DJ', 'SO', 'AD', 'GI', 'JE', 'MC', 'HK', 'MO', 'ID',
15 | 'SG', 'TH', 'PH', 'AE', 'EG', 'OM', 'QA', 'KW', 'SY', 'BH',
16 | 'JO', 'SM', 'GG'
17 | ];
18 |
19 | const LANGUAGES = [
20 | 'en', 'it', 'de', 'fr', 'es', 'pt', 'ru', 'ja',
21 | 'nl', 'ko', 'zh', 'tw'
22 | ];
23 |
24 | module.exports = {
25 | COUNTRIES,
26 | LANGUAGES
27 | };
28 |
--------------------------------------------------------------------------------
/spotpass/database.js:
--------------------------------------------------------------------------------
1 | const sqlite = require('sqlite');
2 | const sqlite3 = require('sqlite3');
3 |
4 | let database;
5 |
6 | async function connect() {
7 | database = await sqlite.open({
8 | filename: 'tasks.db',
9 | driver: sqlite3.Database
10 | });
11 | }
12 |
13 | function verifyConnected() {
14 | if (!database) {
15 | throw new Error('Tried to interact with unopened database');
16 | }
17 | }
18 |
19 | function all(query, ...arguments) {
20 | verifyConnected();
21 |
22 | return database.all(query, arguments);
23 | }
24 |
25 | function exec(query) {
26 | verifyConnected();
27 |
28 | return database.exec(query);
29 | }
30 |
31 | function run(query, ...arguments) {
32 | verifyConnected();
33 |
34 | return database.run(query, arguments);
35 | }
36 |
37 | function getNextBatch(platform, size=50) {
38 | return all('SELECT * FROM tasks WHERE platform = ? AND processed = FALSE LIMIT ?', platform, size);
39 | }
40 |
41 | function rowProcessed(id) {
42 | return run('UPDATE tasks SET processed = TRUE WHERE id = ?', id);
43 | }
44 |
45 | function close() {
46 | verifyConnected();
47 |
48 | return database.close();
49 | }
50 |
51 | module.exports = {
52 | connect,
53 | all,
54 | exec,
55 | run,
56 | getNextBatch,
57 | rowProcessed,
58 | close
59 | };
--------------------------------------------------------------------------------
/idbe/certs/wiiu-common.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEpgIBAAKCAQEA81Vzs324jZwcNpbFESgDNooVTRP1TlxvYwz8bbHnJHhImjEJ
3 | NO29YSTpjmF7wonczooeKXfE/Ry2+ey9mk92UhzSnvuSHQ6P2zFBbcPnE8eBi73o
4 | DnErgixiWe1TKP1G5LvwOqrEkVmXLN/qnLrsfFp4QNyFc+PLvJ9IAfRSBwdRJHAi
5 | SgE9nB9eI7AGcM6DCw7+p9zEz6rNRHUVRc5I132wJpQa8aoWaqPW7LE8exEC3VSf
6 | DHRVPjZUMRhfoBVSi2NfiA3xYsqkv+Ct3E+bzW8y1aAQ7wIshQ/RGcLtVZE+tkoA
7 | znXewVLdKtcC67Vy4awhJ/BqK1tvc26qV3zIJwIDAQABAoIBAQDBTk4m9iYJoU2s
8 | dCPDmFTNG+8GF2fVw4rdVjCmeCDWkROkInZc7Mx4gtljub+WcOzPy1tgt/vu08Ps
9 | UYziLGQjoTAVCmct3CaeC8gdifZleSVJvSi/aFoXBGlxZR6ePm72QPL7uDOGAHUf
10 | OhboQXqi40AKzuTZhsqQYrzSiKQtXa8M0jMj3XWb7q+wLEyCtGJvUsFjfXiuxdrg
11 | ZMwJ3jroDkxAIatmZzzAcjbY6U78P574DQAeFJM/6KJrZXgmsPdkqEpUJWBs1Zro
12 | b7eEKjkM0tVfk6GrlIZgD5lhBxdyCci9UomO1JjJakKBhVeiB+HMC4sJtcJPyBdb
13 | ZH70dIJhAoGBAP74UyWJ/xnd12NrZrybOab5pCSYDcaoLpzghWcaxsX+AS70BpEZ
14 | wZU+DMxWtSfGTbVOlTlW0LEu3A9JHtisr+a3bf+ytv2zXRuDnJ3fhnFqRLRfopNs
15 | BEAePBUL2EkJ/8OTSsiZcmDoVyhnt14U4sleawsIi30T/KcQDtMS81YXAoGBAPRR
16 | F/oZcWHYNm1X8foHiPbltAji4u5M3McZT4wb3RZbWLJbUjSz/tATah36dxuT1v1z
17 | EwHWZ0vdN94MuIy1OnwIHvBXJy8m4Rq6VTQpzGPgy0clbZbtX3Cih+fQSvxJUe2m
18 | psaicsNSTtmd+btudZyG5qzILOU8afGmeB1wr3hxAoGBANZkcm3XSoUyj8FOdxXS
19 | pDiuI4KNxM+tbXyGIkZfMpMbkV0s3jS2ZpuakGJl6m/mhEMXL80GHfdOwsWro19o
20 | XYRv6vOeD9bmMj1HfrMVWFQXmmvdGrRBmJVdlwHPcu9/k+uc974ToSSxWVBlXb+j
21 | aksOtI2Tgs8KtmC31O9ROQHDAoGBAI0OnPdK5UmGmbX7xruCyjMyYAWZaUgInJdf
22 | J6xPEhCsYMNpMkc3fPEJpIT2bPpBGylt3RV8glsst+q+EXc70y51SdedmgQBQIo7
23 | 9qGNWHJ6ASNsmp8/IZFYZXsTqZeLhX/ebf/VHsliph/Cs8LhfYoH4Pr0/+bCQLDC
24 | Wis1OjohAoGBAJSivkSk2MKrg/t4CqNYLBiPp0fegm9ZL3187I/leVJGXIXsNqTq
25 | oa6qT5JxiJRQnkw2IzfI6icl+BJYydRU3oH4X0nvl0eyjiIfG/nfn4H7ckSKdzP/
26 | wVBdRbcC+/PX/1WpGiJM+kXrf/vLQN1B6iihrYEkS/ZIkzFWMuuKQRvQ
27 | -----END RSA PRIVATE KEY-----
--------------------------------------------------------------------------------
/spotpass/certs/wiiu-common.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIEpgIBAAKCAQEA81Vzs324jZwcNpbFESgDNooVTRP1TlxvYwz8bbHnJHhImjEJ
3 | NO29YSTpjmF7wonczooeKXfE/Ry2+ey9mk92UhzSnvuSHQ6P2zFBbcPnE8eBi73o
4 | DnErgixiWe1TKP1G5LvwOqrEkVmXLN/qnLrsfFp4QNyFc+PLvJ9IAfRSBwdRJHAi
5 | SgE9nB9eI7AGcM6DCw7+p9zEz6rNRHUVRc5I132wJpQa8aoWaqPW7LE8exEC3VSf
6 | DHRVPjZUMRhfoBVSi2NfiA3xYsqkv+Ct3E+bzW8y1aAQ7wIshQ/RGcLtVZE+tkoA
7 | znXewVLdKtcC67Vy4awhJ/BqK1tvc26qV3zIJwIDAQABAoIBAQDBTk4m9iYJoU2s
8 | dCPDmFTNG+8GF2fVw4rdVjCmeCDWkROkInZc7Mx4gtljub+WcOzPy1tgt/vu08Ps
9 | UYziLGQjoTAVCmct3CaeC8gdifZleSVJvSi/aFoXBGlxZR6ePm72QPL7uDOGAHUf
10 | OhboQXqi40AKzuTZhsqQYrzSiKQtXa8M0jMj3XWb7q+wLEyCtGJvUsFjfXiuxdrg
11 | ZMwJ3jroDkxAIatmZzzAcjbY6U78P574DQAeFJM/6KJrZXgmsPdkqEpUJWBs1Zro
12 | b7eEKjkM0tVfk6GrlIZgD5lhBxdyCci9UomO1JjJakKBhVeiB+HMC4sJtcJPyBdb
13 | ZH70dIJhAoGBAP74UyWJ/xnd12NrZrybOab5pCSYDcaoLpzghWcaxsX+AS70BpEZ
14 | wZU+DMxWtSfGTbVOlTlW0LEu3A9JHtisr+a3bf+ytv2zXRuDnJ3fhnFqRLRfopNs
15 | BEAePBUL2EkJ/8OTSsiZcmDoVyhnt14U4sleawsIi30T/KcQDtMS81YXAoGBAPRR
16 | F/oZcWHYNm1X8foHiPbltAji4u5M3McZT4wb3RZbWLJbUjSz/tATah36dxuT1v1z
17 | EwHWZ0vdN94MuIy1OnwIHvBXJy8m4Rq6VTQpzGPgy0clbZbtX3Cih+fQSvxJUe2m
18 | psaicsNSTtmd+btudZyG5qzILOU8afGmeB1wr3hxAoGBANZkcm3XSoUyj8FOdxXS
19 | pDiuI4KNxM+tbXyGIkZfMpMbkV0s3jS2ZpuakGJl6m/mhEMXL80GHfdOwsWro19o
20 | XYRv6vOeD9bmMj1HfrMVWFQXmmvdGrRBmJVdlwHPcu9/k+uc974ToSSxWVBlXb+j
21 | aksOtI2Tgs8KtmC31O9ROQHDAoGBAI0OnPdK5UmGmbX7xruCyjMyYAWZaUgInJdf
22 | J6xPEhCsYMNpMkc3fPEJpIT2bPpBGylt3RV8glsst+q+EXc70y51SdedmgQBQIo7
23 | 9qGNWHJ6ASNsmp8/IZFYZXsTqZeLhX/ebf/VHsliph/Cs8LhfYoH4Pr0/+bCQLDC
24 | Wis1OjohAoGBAJSivkSk2MKrg/t4CqNYLBiPp0fegm9ZL3187I/leVJGXIXsNqTq
25 | oa6qT5JxiJRQnkw2IzfI6icl+BJYydRU3oH4X0nvl0eyjiIfG/nfn4H7ckSKdzP/
26 | wVBdRbcC+/PX/1WpGiJM+kXrf/vLQN1B6iihrYEkS/ZIkzFWMuuKQRvQ
27 | -----END RSA PRIVATE KEY-----
--------------------------------------------------------------------------------
/idbe/certs/wiiu-common.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIEwzCCA6ugAwIBAgIBBjANBgkqhkiG9w0BAQsFADBtMQswCQYDVQQGEwJVUzET
3 | MBEGA1UECBMKV2FzaGluZ3RvbjEhMB8GA1UEChMYTmludGVuZG8gb2YgQW1lcmlj
4 | YSBJbmMuMQswCQYDVQQLEwJJUzEZMBcGA1UEAxMQTmludGVuZG8gQ0EgLSBHMzAe
5 | Fw0xMDA1MTMxOTE5NDZaFw0zNzEyMjIxOTE5NDZaMIGlMQswCQYDVQQGEwJVUzET
6 | MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEiMCAGA1UEChMZ
7 | TmludGVuZG8gb2YgQW1lcmljYSwgSW5jLjELMAkGA1UECxMCSVMxGjAYBgNVBAMT
8 | EUNUUiBDb21tb24gUHJvZCAxMSIwIAYJKoZIhvcNAQkBFhNjYUBub2EubmludGVu
9 | ZG8uY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA81Vzs324jZwc
10 | NpbFESgDNooVTRP1TlxvYwz8bbHnJHhImjEJNO29YSTpjmF7wonczooeKXfE/Ry2
11 | +ey9mk92UhzSnvuSHQ6P2zFBbcPnE8eBi73oDnErgixiWe1TKP1G5LvwOqrEkVmX
12 | LN/qnLrsfFp4QNyFc+PLvJ9IAfRSBwdRJHAiSgE9nB9eI7AGcM6DCw7+p9zEz6rN
13 | RHUVRc5I132wJpQa8aoWaqPW7LE8exEC3VSfDHRVPjZUMRhfoBVSi2NfiA3xYsqk
14 | v+Ct3E+bzW8y1aAQ7wIshQ/RGcLtVZE+tkoAznXewVLdKtcC67Vy4awhJ/BqK1tv
15 | c26qV3zIJwIDAQABo4IBMzCCAS8wCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYd
16 | T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFIzG7XO5Ojx2
17 | G45r5dTszWF1rcFtMIGXBgNVHSMEgY8wgYyAFATT3tP98MjrwlmSh/sf1z5y+O35
18 | oXGkbzBtMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEhMB8GA1UE
19 | ChMYTmludGVuZG8gb2YgQW1lcmljYSBJbmMuMQswCQYDVQQLEwJJUzEZMBcGA1UE
20 | AxMQTmludGVuZG8gQ0EgLSBHM4IBATA7BgNVHR8ENDAyMDCgLqAshipodHRwOi8v
21 | Y3JsLm5pbnRlbmRvLmNvbS9uaW50ZW5kby1jYS1nMy5jcmwwDQYJKoZIhvcNAQEL
22 | BQADggEBAEOXZ/3IkNuFUfdxHpP0vrcSCTnDqMk8gsLVbN39BJT8Wqm8e3MFNhS/
23 | Y1YOWgoIPtJp4cd2tXM3cXWzUZgm3SKd1XX/B81PFLEYlk+metUqB4jpF0ApCZs6
24 | RNoXDBTx6XzsC07CA3uaxEdeWjC5Nl29AHuZ1YC/Z+7Da57TwBaa+/APj4y5mGUa
25 | ahbvwpe1t3GSNOS5nBDSeCHAKLmzfnXpliA5qQZxo94RSXIVWK8hilXoFDQCL904
26 | OGpgZnAhz4p3rcJYTq9ub8n6NYr9OJKKbWXfJY1QK4pXFVcIuAph0o/EyzDIEXuT
27 | J4Q4b2km8uI0H4yxsQwUX9Epw6Vbujc=
28 | -----END CERTIFICATE-----
--------------------------------------------------------------------------------
/spotpass/certs/wiiu-common.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIEwzCCA6ugAwIBAgIBBjANBgkqhkiG9w0BAQsFADBtMQswCQYDVQQGEwJVUzET
3 | MBEGA1UECBMKV2FzaGluZ3RvbjEhMB8GA1UEChMYTmludGVuZG8gb2YgQW1lcmlj
4 | YSBJbmMuMQswCQYDVQQLEwJJUzEZMBcGA1UEAxMQTmludGVuZG8gQ0EgLSBHMzAe
5 | Fw0xMDA1MTMxOTE5NDZaFw0zNzEyMjIxOTE5NDZaMIGlMQswCQYDVQQGEwJVUzET
6 | MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEiMCAGA1UEChMZ
7 | TmludGVuZG8gb2YgQW1lcmljYSwgSW5jLjELMAkGA1UECxMCSVMxGjAYBgNVBAMT
8 | EUNUUiBDb21tb24gUHJvZCAxMSIwIAYJKoZIhvcNAQkBFhNjYUBub2EubmludGVu
9 | ZG8uY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA81Vzs324jZwc
10 | NpbFESgDNooVTRP1TlxvYwz8bbHnJHhImjEJNO29YSTpjmF7wonczooeKXfE/Ry2
11 | +ey9mk92UhzSnvuSHQ6P2zFBbcPnE8eBi73oDnErgixiWe1TKP1G5LvwOqrEkVmX
12 | LN/qnLrsfFp4QNyFc+PLvJ9IAfRSBwdRJHAiSgE9nB9eI7AGcM6DCw7+p9zEz6rN
13 | RHUVRc5I132wJpQa8aoWaqPW7LE8exEC3VSfDHRVPjZUMRhfoBVSi2NfiA3xYsqk
14 | v+Ct3E+bzW8y1aAQ7wIshQ/RGcLtVZE+tkoAznXewVLdKtcC67Vy4awhJ/BqK1tv
15 | c26qV3zIJwIDAQABo4IBMzCCAS8wCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYd
16 | T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFIzG7XO5Ojx2
17 | G45r5dTszWF1rcFtMIGXBgNVHSMEgY8wgYyAFATT3tP98MjrwlmSh/sf1z5y+O35
18 | oXGkbzBtMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEhMB8GA1UE
19 | ChMYTmludGVuZG8gb2YgQW1lcmljYSBJbmMuMQswCQYDVQQLEwJJUzEZMBcGA1UE
20 | AxMQTmludGVuZG8gQ0EgLSBHM4IBATA7BgNVHR8ENDAyMDCgLqAshipodHRwOi8v
21 | Y3JsLm5pbnRlbmRvLmNvbS9uaW50ZW5kby1jYS1nMy5jcmwwDQYJKoZIhvcNAQEL
22 | BQADggEBAEOXZ/3IkNuFUfdxHpP0vrcSCTnDqMk8gsLVbN39BJT8Wqm8e3MFNhS/
23 | Y1YOWgoIPtJp4cd2tXM3cXWzUZgm3SKd1XX/B81PFLEYlk+metUqB4jpF0ApCZs6
24 | RNoXDBTx6XzsC07CA3uaxEdeWjC5Nl29AHuZ1YC/Z+7Da57TwBaa+/APj4y5mGUa
25 | ahbvwpe1t3GSNOS5nBDSeCHAKLmzfnXpliA5qQZxo94RSXIVWK8hilXoFDQCL904
26 | OGpgZnAhz4p3rcJYTq9ub8n6NYr9OJKKbWXfJY1QK4pXFVcIuAph0o/EyzDIEXuT
27 | J4Q4b2km8uI0H4yxsQwUX9Epw6Vbujc=
28 | -----END CERTIFICATE-----
--------------------------------------------------------------------------------
/idbe/scrape.js:
--------------------------------------------------------------------------------
1 | const https = require('node:https');
2 | const fs = require('fs-extra');
3 | const axios = require('axios');
4 | const titles = require('./title-versions.json');
5 |
6 | const httpsAgent = new https.Agent({
7 | rejectUnauthorized: false,
8 | cert: fs.readFileSync('./certs/wiiu-common.crt'),
9 | key: fs.readFileSync('./certs/wiiu-common.key'),
10 | });
11 |
12 | const failed = [];
13 |
14 | async function downloadIcon(name, path, retries=0) {
15 | if (retries === 5) {
16 | failed.push(name);
17 | return;
18 | }
19 |
20 | try {
21 | // * ID field is unchecked, leave as 00 for now.
22 | // * Wii U server has icons for 3DS as well, leave as WUP for now.
23 | const response = await axios.get(`https://idbe-wup.cdn.nintendo.net/icondata/00/${name}.idbe`, {
24 | responseType: 'arraybuffer',
25 | validateStatus: () => {
26 | return true;
27 | },
28 | httpsAgent
29 | });
30 |
31 | // * Not all titles/versions have dedicated IDBE icons
32 | if (response.status !== 200) {
33 | return;
34 | }
35 |
36 | fs.writeFileSync(path, response.data);
37 | } catch {
38 | downloadIcon(name, path, retries++);
39 | }
40 | }
41 |
42 | async function main() {
43 | for (const titleID in titles) {
44 | const basePath = `./icons/${titleID}`;
45 |
46 | fs.ensureDirSync(basePath);
47 |
48 | // * Download the latest icon
49 | await downloadIcon(titleID, `${basePath}/latest.idbe`);
50 |
51 | // * Try all possible versions
52 | const versions = titles[titleID];
53 |
54 | for (const version of versions) {
55 | await downloadIcon(`${titleID}-${version}`, `${basePath}/${version}.idbe`);
56 | }
57 |
58 | // * Clean up empty folders
59 | const files = fs.readdirSync(basePath);
60 |
61 | if (files.length === 0) {
62 | fs.removeSync(basePath);
63 | }
64 | }
65 |
66 | console.log(failed);
67 | }
68 |
69 | main();
--------------------------------------------------------------------------------
/spotpass/build-database.js:
--------------------------------------------------------------------------------
1 | const { performance } = require('node:perf_hooks');
2 | const database = require('./database');
3 | const { millisecondsToString } = require('./util');
4 | const { COUNTRIES, LANGUAGES } = require('./constants');
5 | const appsWiiU = require('./wup-boss-apps.json');
6 | const apps3DS = require('./ctr-boss-apps.json');
7 |
8 | const apps = [
9 | ...appsWiiU.map(app => {
10 | app.platform = 'wup';
11 |
12 | return app;
13 | }),
14 | ...apps3DS.map(app => {
15 | app.platform = 'ctr';
16 |
17 | return app;
18 | })
19 | ];
20 |
21 | async function build() {
22 | await database.connect();
23 |
24 | await database.exec(`
25 | CREATE TABLE IF NOT EXISTS tasks (
26 | id INTEGER PRIMARY KEY,
27 | platform TEXT,
28 | app_id TEXT,
29 | task TEXT,
30 | country TEXT,
31 | language TEXT,
32 | processed BOOLEAN DEFAULT FALSE,
33 | UNIQUE (platform, app_id, task, country, language)
34 | )
35 | `);
36 |
37 | const startTime = performance.now();
38 |
39 | const values = [];
40 |
41 | for (const app of apps) {
42 | for (const task of app.tasks) {
43 | for (const country of COUNTRIES) {
44 | for (const language of LANGUAGES) {
45 | values.push(`("${app.platform}", "${app.app_id}", "${task}", "${country}", "${language}")`);
46 | }
47 | }
48 | }
49 | }
50 |
51 | // * Inserting all rows at once is basically instant.
52 | // * No point in filtering since this isn't user input
53 | const query = `INSERT INTO tasks (platform, app_id, task, country, language) VALUES ${values.join(', ')} ON CONFLICT(platform, app_id, task, country, language) DO NOTHING`;
54 |
55 | await database.run(query);
56 |
57 | const endTime = performance.now();
58 | const executionTime = millisecondsToString(endTime - startTime);
59 |
60 | console.log(`Database built in ${executionTime}`);
61 |
62 | await database.close();
63 | }
64 |
65 | build();
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-3ds/README.md:
--------------------------------------------------------------------------------
1 | # Mario & Sonic Rio 2016 (3DS)
2 | ## Download all leaderboard data for all available events
3 |
4 | # Usage
5 | Create `.env` from `example.env` and fill in your 3DS NEX details. To get your username and password, use this homebrew https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
6 |
7 | Run `python3 archive.py`
8 |
9 | # Meta Data
10 | This script will store the leaderboard data in the `data` directory. Each folder inside `data` is the leaderboards event ID
11 |
12 | Each event folder contains a `rankings.json.gz` file. Due to the large size of this data, the data is compressed with gzip at level 9. When decompressed, each file is a JSON array of ranking objects.
13 |
14 | The data is stored exactly as the server:
15 |
16 | - `category` - The ranking categroy (event ID in this case)
17 | - `common_data` - Metadata about the user *at the time of upload*. Unknown use. Seems to contain Mii data?
18 | - `groups` - Ranking groups. Unknown use. Other M&S games use this for character and country IDs
19 | - `param` - Flags used when uploading the score. Unknown use
20 | - `pid` - NEX PID of the user who owns the ranking
21 | - `rank` - Global leaderboard ranking
22 | - `score` - The actual ranking value. How this is used is context specific
23 | - `unique_id` - Unknown use. Usually 0
24 | - `update_time` - Date the score was uploaded. This is the only field not stored exactly as the server sends it. Converted from the timestamp to a readable date
25 |
26 | Example:
27 |
28 | ```json
29 | {
30 | "category": 0,
31 | "common_data": "UgBlAG0AeQAAAAAAAAAAAAAAAAAAAEAARwAqAQMAADBBP9/LsE9jgJn/R9iYQVyHQgoAAG5BUgBlAG0AeQAAAAAAAAAAAAAAAABAQCQAMwcfJcMW7jLFEI0OD2YPAAApAFJIUHIAZQBtAHkAAAAAAAAAAAAAAAAAAABh8w==",
32 | "groups": [
33 | 0,
34 | 0
35 | ],
36 | "param": 0,
37 | "pid": 1876712771,
38 | "rank": 1,
39 | "score": 82511,
40 | "unique_id": 0,
41 | "update_time": "2022-08-07T12:57:40+00:00"
42 | }
43 | ```
--------------------------------------------------------------------------------
/idbe/README.md:
--------------------------------------------------------------------------------
1 | # IDBE
2 | ## Download all known IDBE icons
3 |
4 | ## IDBE Files
5 | IDBE files contain icon graphics and metadata for a title. These are used in titles like Download Management on the Wii U, and the friends server on both platforms, to show a titles icon if it's not already installed locally. See https://nintendo-wiki.pretendo.network/docs/idbe for documentation on the files structure
6 |
7 | ## Usage
8 | - Install [NodeJS](https://nodejs.org)
9 | - `npm i`
10 | - `node get_versions.js` (Only if `title-versions.json` needs to be created/updated)
11 | - `node scrape.js`
12 |
13 | ## Downloads
14 | Files are downloaded into the `icons` directory. Each subfolder name is the titles title ID. Inside each folder will be at least one file, `latest.idbe`. This is the icon for the latest version of the title. Optionally there may be any number of other `.idbe` files, whose name is a previous version of the title. The highest number version is identical to `latest.idbe`.
15 |
16 | ## Title versions
17 | The IDBE server stores icons for both current and past releases of all titles. In order to get past releases, a titles previous version numbers must be known. These versions are scraped from various sources:
18 |
19 | - https://wiiubrew.org/wiki/Title_database - Contains lots of Wii U title versions not found in Tagaya.
20 | - https://yls8.mtheall.com/ninupdates/eshop/verlist_parser.php - The only known place to get previous 3DS title versions.
21 | - [Tagaya](https://nintendo-wiki.pretendo.network/docs/tagaya) (Wii U) - The Wii U Tagaya server contains all past version lists, which populate any missing versions from wiiubrew.
22 | - [Tagaya](https://nintendo-wiki.pretendo.network/docs/tagaya) (3DS) - The 3DS Tagaya server only contains a version list of the most recent title versions. This is why Yellows8's site is used.
23 |
24 | Yellows8's site only goes back to 2015, 4 years after the 3DS's launch. Due to this, there is 4 years worth of 3DS title versions missing from this archive unless another source is found which contains them.
--------------------------------------------------------------------------------
/spotpass/read-boss-db-wiiu.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs-extra');
2 | const apps = require('./wup-boss-apps.json');
3 |
4 | const TASK_SIZE = 0x1000; // * Size of each task entry
5 | const OFFSET_BASE = 0x103; // * Seems to have a 0x103 header
6 |
7 | const VALID_APP_ID_REGEX = /[A-z0-9]{16}/;
8 |
9 | // * task.db is a database of all registered BOSS tasks for a user.
10 | // * The file is preallocated to 0x00100103 bytes. Each task entry
11 | // * is 0x1000 bytes, so removing the first 0x103 bytes from the
12 | // * file (which is mostly empty) results in 256 possible slots for
13 | // * tasks.
14 | const db = fs.readFileSync('task.db');
15 |
16 | const appsLengthBefore = apps.length;
17 | let newTasks = 0;
18 |
19 | // * 256 possible tasks
20 | for (let i = 0; i < 256; i++) {
21 | const offset = OFFSET_BASE + (i * TASK_SIZE);
22 | const entry = db.subarray(offset, offset + TASK_SIZE);
23 |
24 | const task = entry.subarray(0x21, 0x2A).toString().replace(/\0/g, '');
25 | const appID = entry.subarray(0x7C1, 0x7D1).toString().replace(/\0/g, '');
26 |
27 | // * Not all entries are populated
28 | if (!task || !appID) {
29 | continue;
30 | }
31 |
32 | // * Not all BOSS tasks are for downloading content from the BOSS server.
33 | // * Some tasks upload content, some tasks download from a 3rd party server,
34 | // * etc. In these cases, there is no application ID in the entry. I'm sure
35 | // * there's some way to detect this in the entry, like through some flags,
36 | // * but this is enough for our purposes
37 | if (!VALID_APP_ID_REGEX.test(appID)) {
38 | continue;
39 | }
40 |
41 | let found = false;
42 |
43 | for (const app of apps) {
44 | if (app.app_id === appID) {
45 | found = true;
46 |
47 | if (!app.tasks.includes(task)) {
48 | app.tasks.push(task);
49 | newTasks += 1;
50 | }
51 |
52 | break;
53 | }
54 | }
55 |
56 | if (!found) {
57 | apps.push({
58 | app_id: appID,
59 | tasks: [ task ]
60 | });
61 |
62 | newTasks += 1;
63 | }
64 | }
65 |
66 | console.log(`Found ${apps.length-appsLengthBefore} new BOSS apps and ${newTasks} new tasks`);
67 |
68 | fs.writeJSONSync('./wup-boss-apps.json', apps, {
69 | spaces: '\t'
70 | });
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/create-database.py:
--------------------------------------------------------------------------------
1 | import os
2 | import anyio
3 | import sqlite3
4 | from dotenv import load_dotenv
5 | from nintendo.nex import backend, datastore, settings
6 |
7 | load_dotenv()
8 |
9 | # * Dump using https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
10 | NEX_USERNAME = os.getenv("NEX_3DS_USERNAME")
11 | NEX_PASSWORD = os.getenv("NEX_3DS_PASSWORD")
12 | datastore_client = None # * Gets set later
13 |
14 | conn = sqlite3.connect("./objects.db")
15 | cursor = conn.cursor()
16 |
17 | cursor.execute('''
18 | CREATE TABLE IF NOT EXISTS objects (
19 | id INTEGER PRIMARY KEY,
20 | processed BOOLEAN DEFAULT 0
21 | )
22 | ''')
23 | conn.commit()
24 |
25 | async def main():
26 | s = settings.default()
27 | s.configure("d6f08b40", 31017)
28 |
29 | async with backend.connect(s, "52.40.192.64", "60000") as be: # Skip NASC
30 | async with be.login(NEX_USERNAME, NEX_PASSWORD) as client:
31 | global datastore_client
32 |
33 | datastore_client = datastore.DataStoreClient(client)
34 |
35 | cursor.execute("SELECT *, MAX(id) FROM objects")
36 | start_data_id = cursor.fetchone()[0]
37 |
38 | if start_data_id is None:
39 | param = datastore.DataStoreSearchParam()
40 |
41 | param.result_order = 0 # * Ascending
42 | param.result_range.offset = 0
43 | param.result_range.size = 1
44 | param.result_option = 0
45 |
46 | search_object_response = await datastore_client.search_object(param)
47 | objects = search_object_response.result
48 |
49 | start_data_id = objects[0].data_id
50 |
51 | param = datastore.DataStoreSearchParam()
52 |
53 | param.result_order = 1 # * Descending
54 | param.result_range.offset = 0
55 | param.result_range.size = 1
56 | param.result_option = 0
57 |
58 | search_object_response = await datastore_client.search_object(param)
59 | objects = search_object_response.result
60 |
61 | end_data_id = objects[0].data_id
62 |
63 | await client.disconnect()
64 |
65 | for data_id in range(start_data_id, end_data_id+1):
66 | cursor.execute("INSERT INTO objects (id) VALUES (%d) ON CONFLICT (id) DO NOTHING" % data_id)
67 |
68 | conn.commit()
69 |
70 | anyio.run(main)
71 |
--------------------------------------------------------------------------------
/spotpass/scrape-wiiu.js:
--------------------------------------------------------------------------------
1 | const https = require('node:https');
2 | const axios = require('axios');
3 | const fs = require('fs-extra');
4 | const { create: xmlParser } = require('xmlbuilder2');
5 | const database = require('./database');
6 |
7 | const TASK_SHEET_URL_BASE = 'https://npts.app.nintendo.net/p01/tasksheet/1';
8 |
9 | const httpsAgent = new https.Agent({
10 | rejectUnauthorized: false,
11 | cert: fs.readFileSync('./certs/wiiu-common.crt'),
12 | key: fs.readFileSync('./certs/wiiu-common.key'),
13 | });
14 |
15 | async function scrapeWiiU(downloadBase) {
16 | let batch = await database.getNextBatch('wup');
17 |
18 | while (batch.length !== 0) {
19 | await Promise.all(batch.map(async (task) => {
20 | await scrapeTask(downloadBase, task);
21 | await database.rowProcessed(task.id);
22 | }));
23 |
24 | batch = await database.getNextBatch('wup');
25 | }
26 | }
27 |
28 | async function scrapeTask(downloadBase, task) {
29 | const response = await axios.get(`${TASK_SHEET_URL_BASE}/${task.app_id}/${task.task}?c=${task.country}&l=${task.language}`, {
30 | validateStatus: () => {
31 | return true;
32 | },
33 | httpsAgent
34 | });
35 |
36 | if (!response.headers['content-type'] || !response.headers['content-type'].startsWith('application/xml')) {
37 | return;
38 | }
39 |
40 | fs.ensureDirSync(`${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}`);
41 | fs.writeFileSync(`${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}/tasksheet.xml`, response.data);
42 |
43 | const data = xmlParser(response.data).toObject();
44 |
45 | if (!data || !data.TaskSheet || !data.TaskSheet.Files || !data.TaskSheet.Files.File) {
46 | return;
47 | }
48 |
49 | let files = [];
50 |
51 | if (Array.isArray(data.TaskSheet.Files.File)) {
52 | files = data.TaskSheet.Files.File;
53 | } else {
54 | files.push(data.TaskSheet.Files.File);
55 | }
56 |
57 | for (const file of files) {
58 | const response = await axios.get(file.Url, {
59 | responseType: 'arraybuffer',
60 | httpsAgent
61 | });
62 | const fileData = Buffer.from(response.data, 'binary');
63 |
64 | fs.writeFileSync(`${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}/${file.Filename}.boss`, fileData);
65 | }
66 | }
67 |
68 | module.exports = scrapeWiiU;
--------------------------------------------------------------------------------
/spotpass/3ds-tasks-to-tid.js:
--------------------------------------------------------------------------------
1 | const https = require('node:https');
2 | const axios = require('axios');
3 | const fs = require('fs-extra');
4 | const { create: xmlParser } = require('xmlbuilder2');
5 | const { COUNTRIES, LANGUAGES } = require('./constants');
6 | const apps = require('./ctr-boss-apps.json');
7 |
8 | const TASK_SHEET_URL_BASE = 'https://npts.app.nintendo.net/p01/tasksheet/1';
9 |
10 | const httpsAgent = new https.Agent({
11 | rejectUnauthorized: false,
12 | cert: fs.readFileSync('./certs/wiiu-common.crt'),
13 | key: fs.readFileSync('./certs/wiiu-common.key'),
14 | });
15 |
16 | async function main() {
17 | for (const app of apps) {
18 | if (app.title_id) {
19 | // * Skip any titles who already have this set.
20 | // * This includes those set to "unknown"
21 | continue;
22 | }
23 |
24 | // * Many nested loops are used here.
25 | // * If a title ID is found, we need
26 | // * to bail out of them all
27 | let deepExit = false;
28 |
29 | for (const country of COUNTRIES) {
30 | if (deepExit) {
31 | break;
32 | }
33 |
34 | for (const language of LANGUAGES) {
35 | if (deepExit) {
36 | break;
37 | }
38 |
39 | // * Most BOSS apps are region-agnostic, but some require
40 | // * specific combinations. Try every country/language
41 | // * combination until a title ID is found
42 | const titleID = await getTitleID(app, country, language);
43 |
44 | if (titleID) {
45 | app.title_id = titleID;
46 | deepExit = true;
47 | }
48 | }
49 | }
50 |
51 | // * If there still wasn't one set, assume the server couldn't
52 | // * handle the BOSS application
53 | if (!app.title_id) {
54 | app.title_id = 'unknown';
55 | }
56 |
57 | fs.writeJSONSync('./ctr-boss-apps.json', apps, {
58 | spaces: '\t'
59 | });
60 | }
61 | }
62 |
63 | async function getTitleID(app, country, language) {
64 | // * Sometimes a task may not work, so try them all
65 | for (const task of app.tasks) {
66 | const response = await axios.get(`${TASK_SHEET_URL_BASE}/${app.app_id}/${task}?c=${country}&l=${language}`, {
67 | validateStatus: () => {
68 | return true;
69 | },
70 | httpsAgent
71 | });
72 |
73 | if (!response.headers['content-type'] || !response.headers['content-type'].startsWith('application/xml')) {
74 | continue;
75 | }
76 |
77 | const xml = xmlParser(response.data).toObject();
78 |
79 | if (!xml || !xml.TaskSheet || !xml.TaskSheet.TitleId) {
80 | continue;
81 | }
82 |
83 | const titleID = xml.TaskSheet.TitleId.toUpperCase();
84 |
85 | return titleID;
86 | }
87 | }
88 |
89 | main();
--------------------------------------------------------------------------------
/idbe/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 | # custom
133 | icons
--------------------------------------------------------------------------------
/spotpass/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 | # custom
133 | data
134 | *.db
135 | *.bin
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-3ds/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # custom
141 | data
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-wiiu/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # custom
141 | config.json
142 | data
--------------------------------------------------------------------------------
/mario-sonic-sochi-2014-wiiu/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # custom
141 | config.json
142 | data
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # custom
141 | objects
142 | .env
143 | last-checked-offset.txt
144 | *.db
145 | *.db-journal
--------------------------------------------------------------------------------
/super-mario-maker/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
140 | # custom
141 | objects
142 | metadata
143 | custom-rankings
144 | buffer-queues
145 | course-records
146 | .env
147 | last-checked-timestamp.txt
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/README.md:
--------------------------------------------------------------------------------
1 | # Animal Crossing: New Leaf
2 | ## Download all DataStore objects and their rankings
3 |
4 | # Usage
5 | Create `.env` from `example.env` and fill in your 3DS NEX details. To get your username and password, use this homebrew https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
6 |
7 | Run `python3 archive.py`
8 |
9 | # DataStore objects
10 | This script downloads all available objects from DataStore, assuming the object is allowed to be returned. Not all objects may be downloaded, as DataStore may block public access to them. Not all objects may be Dream Worlds. To know what type of object a given object is, refer to it's metadata file
11 |
12 | # DataStore object versions
13 | DataStore objects can be updated. When this happens, the objects "version" number is incremented internally. The last number of the objects S3 key is the version number. DataStore only ever returns S3 URLs for the latest version, meaning all past versions are lost. This script will track the version number in the file name, allowing for multiple versions of the object to be downloaded if a newer object is uploaded, assuming this script is ran multiple times
14 |
15 | # DataStore metadata
16 | For every object downloaded, an associated metadata file is also saved. The contents of this file is the objects `DataStoreMetaInfo` serialized as JSON. To know which type of object a given object is, see `data_type` in the metadata file
17 |
18 | ```json
19 | {
20 | "data_id": 1000001,
21 | "owner_id": 147204330,
22 | "size": 465056,
23 | "name": "Ninten@ninten",
24 | "data_type": 1,
25 | "meta_binary": "",
26 | "permission": {
27 | "permission": 0,
28 | "recipients": []
29 | },
30 | "delete_permission": {
31 | "permission": 3,
32 | "recipients": []
33 | },
34 | "create_time": {
35 | "original_value": 135337759007,
36 | "standard": "2016-11-01 05:04:31"
37 | },
38 | "update_time": {
39 | "original_value": 135337873922,
40 | "standard": "2016-11-02 01:08:02"
41 | },
42 | "period": 365,
43 | "status": 0,
44 | "referred_count": 973714,
45 | "refer_data_id": 0,
46 | "flag": 2,
47 | "referred_time": {
48 | "original_value": 135810914673,
49 | "standard": "2023-11-27 01:37:49"
50 | },
51 | "expire_time": {
52 | "original_value": 135877892465,
53 | "standard": "2024-11-26 01:37:49"
54 | },
55 | "tags": [
56 | "LNinten",
57 | "N00",
58 | "Paaron",
59 | "Pchloe",
60 | "Pkaren",
61 | "Pninten",
62 | "UL_A00010027",
63 | "UL_C0001",
64 | "V4254"
65 | ],
66 | "ratings": [
67 | {
68 | "slot": 0,
69 | "info": {
70 | "total_value": 0,
71 | "count": 0,
72 | "initial_value": 0
73 | }
74 | },
75 | {
76 | "slot": 1,
77 | "info": {
78 | "total_value": 0,
79 | "count": 0,
80 | "initial_value": 0
81 | }
82 | }
83 | ]
84 | }
85 | ```
--------------------------------------------------------------------------------
/spotpass/scrape-3ds.js:
--------------------------------------------------------------------------------
1 | const https = require('node:https');
2 | const axios = require('axios');
3 | const fs = require('fs-extra');
4 | const database = require('./database');
5 |
6 | const NPFL_URL_BASE = 'https://npfl.c.app.nintendowifi.net/p01/filelist';
7 | const NPDL_URL_BASE = 'https://npdl.cdn.nintendowifi.net/p01/nsa';
8 |
9 | const httpsAgent = new https.Agent({
10 | rejectUnauthorized: false,
11 | cert: fs.readFileSync('./certs/wiiu-common.crt'), // * Hey, it works lol
12 | key: fs.readFileSync('./certs/wiiu-common.key'),
13 | });
14 |
15 | async function scrape3DS(downloadBase) {
16 | let batch = await database.getNextBatch('ctr');
17 |
18 | while (batch.length !== 0) {
19 | await Promise.all(batch.map(async (task) => {
20 | await scrapeTask(downloadBase, task);
21 | await database.rowProcessed(task.id);
22 | }));
23 |
24 | batch = await database.getNextBatch('ctr');
25 | }
26 | }
27 |
28 | async function scrapeTask(downloadBase, task) {
29 | const response = await axios.get(`${NPFL_URL_BASE}/${task.app_id}/${task.task}?c=${task.country}&l=${task.language}`, {
30 | validateStatus: () => {
31 | return true;
32 | },
33 | httpsAgent
34 | });
35 |
36 | if (!response.headers['content-type'] || !response.headers['content-type'].startsWith('text/plain')) {
37 | return;
38 | }
39 |
40 | fs.ensureDirSync(`${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}`);
41 | fs.writeFileSync(`${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}/filelist.txt`, response.data);
42 |
43 | const lines = response.data.split('\r\n').filter(line => line);
44 | const files = lines.splice(2)
45 |
46 | // * There's like 5 ways the 3DS can format these download URLs, just pray this works I guess.
47 | // * Not sure any better way to do this.
48 | for (const file of files) {
49 | const parts = file.split('\t');
50 | const fileName = parts[0];
51 |
52 | // * There are 5 possible formats for NPDL URLs.
53 | // * This tries all of them, one after the other, from least
54 | // * specific to most specific. This should result in the most
55 | // * specific version of each file being downloaded, overwriting
56 | // * less specific ones. Not all files work with all formats, so
57 | // * we just have to try them all and pray.
58 | // * This is pretty slow, but it at least should get all the data.
59 | const downloadPath = `${downloadBase}/${task.country}/${task.language}/${task.app_id}/${task.task}/${fileName}.boss`;
60 |
61 | let success = await downloadContentFile(`${NPDL_URL_BASE}/${task.app_id}/${task.task}/${task.country}/${task.language}/${fileName}`, downloadPath);
62 |
63 | if (success) {
64 | continue;
65 | }
66 |
67 | success = await downloadContentFile(`${NPDL_URL_BASE}/${task.app_id}/${task.task}/${task.language}_${task.country}/${fileName}`, downloadPath);
68 |
69 | if (success) {
70 | continue;
71 | }
72 |
73 | success = await downloadContentFile(`${NPDL_URL_BASE}/${task.app_id}/${task.task}/${task.country}/${fileName}`, downloadPath);
74 |
75 | if (success) {
76 | continue;
77 | }
78 |
79 | success = await downloadContentFile(`${NPDL_URL_BASE}/${task.app_id}/${task.task}/${task.language}/${fileName}`, downloadPath);
80 |
81 | if (success) {
82 | continue;
83 | }
84 |
85 | await downloadContentFile(`${NPDL_URL_BASE}/${task.app_id}/${task.task}/${fileName}`, downloadPath);
86 | }
87 | }
88 |
89 | async function downloadContentFile(url, downloadPath) {
90 | const response = await axios.get(url, {
91 | responseType: 'arraybuffer',
92 | validateStatus: () => {
93 | return true;
94 | },
95 | httpsAgent
96 | });
97 |
98 | if (response.status !== 200) {
99 | return false;
100 | }
101 |
102 | const fileData = Buffer.from(response.data, 'binary');
103 |
104 | fs.writeFileSync(downloadPath, fileData);
105 |
106 | return true;
107 | }
108 |
109 | module.exports = scrape3DS;
110 |
--------------------------------------------------------------------------------
/mario-sonic-sochi-2014-wiiu/README.md:
--------------------------------------------------------------------------------
1 | # Mario & Sonic Sochi 2014
2 | ## Download all leaderboard data for all available events
3 |
4 | ## THIS IS VERY SLOW. IT MAKES SEVERAL NEX REQUESTS AND DOWNLOADS FILES FROM AN S3 SERVER
5 | ## GETTING A FULL DUMP OF ALL DATA WILL TAKE SEVERAL DAYS, OR EVEN WEEKS, RUNNING NONSTOP
6 |
7 | # Usage
8 | Create `config.json` from `example.config.json` and fill in your console and NNID details
9 | Run `python3 archive.py`
10 |
11 | # Meta Data
12 | This script will store the leaderboard and user data in the `data` directory. The `data` folder contains the following folders
13 |
14 | - `data/meta_binaries` - MetaBinary blobs for objects in DataStore. File name is the DataStore DataID. Usually a users user data
15 | - `data/objects` - Object blobs for objects in DataStore. File name is the DataStore DataID. Usually a users best run for an event
16 | - `data/rankings` - Contains JSON files of all the rankings for a given event. File name is the event ID
17 |
18 | All data is compressed with gzip at level 9. When decompressed, each file in `data/rankings` is a JSON array of ranking objects
19 |
20 | The format of a ranking object is as follows:
21 |
22 | - `event` - The event ID the record is for (same as the parent folder)
23 | - `name` - The users name in game
24 | - `pid` - The users NNID PID
25 | - `score` - The score for the ranking. Different events format this value differently
26 | - `place` - The records global ranking (`1` = 1st, `2` = 2nd, etc)
27 | - `mii_data` - The users NNID Mii data. Extracted from the `BPFC` data
28 | - `meta_binary.id` - DataStore DataID for the users meta binary. Contains unknown data
29 | - `meta_binary.created` - Time the users data was updated in DataStore
30 | - `meta_binary.updated` - Time the users data was updated in DataStore
31 | - `completed_country.id` - The country (flag) ID the user was using at the time of making the entry
32 | - `completed_country.name` - The country (flag) name the user was using at the time of making the entry. "Unknown" if unknown
33 | - `completed_character.id` - The character ID the user was using at the time of making the entry
34 | - `completed_character.name` - The character name the user was using at the time of making the entry. "Unknown" if unknown
35 | - `bpfc_data` - Users `BPFC` data. Contains a small header, followed by normal Mii data and a small footer. Header and footer contain unknown data
36 | - `best_run.id` - DataStore DataID for the users best run. 0 if no best run available. Contains unknown data
37 | - `best_run.created` - Time the users best run was updated in DataStore. Empty string if no best run available
38 | - `best_run.updated` - Time the users best run was updated in DataStore. Empty string if no best run available
39 | - `ranking_raw` - The raw ranking data as sent by the server
40 |
41 | Example:
42 |
43 | ```json
44 | {
45 | "event": 10,
46 | "name": "★Harrison☆",
47 | "pid": 1783814945,
48 | "score": 84510,
49 | "place": 1,
50 | "mii_data": "AwAAQODjGSQAhODQ3F6pumzhV6Qy9AAAYgIFJkgAYQByAHIAaQBzAG8AbgAGJkBAAgAFA6VmYxahNEUUYRQPZA4AACmoWUhQTQBlACAAbABvAGwAAABhAHcAYQAAALsN",
51 | "meta_binary": {
52 | "id": 1807365,
53 | "created": "2015-01-10T15:24:20+00:00",
54 | "updated": "2023-09-24T17:58:09+00:00"
55 | },
56 | "completed_country": {
57 | "id": 58,
58 | "name": "Great Britain"
59 | },
60 | "completed_character": {
61 | "id": 18,
62 | "name": "Metal Sonic"
63 | },
64 | "bpfc_data": "AwAAQODjGSQAhODQ3F6pumzhV6Qy9AAAYgIFJkgAYQByAHIAaQBzAG8AbgAGJkBAAgAFA6VmYxahNEUUYRQPZA4AACmoWUhQTQBlACAAbABvAGwAAABhAHcAYQAAALsN",
65 | "best_run": {
66 | "id": 2785933,
67 | "created": "2023-08-05T14:49:23+00:00",
68 | "updated": "2023-08-05T14:49:23+00:00"
69 | }
70 | }
71 | ```
72 |
73 | ## Notes
74 |
75 | 1. Not every character name is known. The ID is stored as well as the name. If the name is not known, `name` will be `Unknown`
76 | 2. Nintendo seems to have cases where several records have the same rank position. No filtering is done to prevent this, the data is saved exactly as Nintendo sends it
--------------------------------------------------------------------------------
/theme-shop/boss.py:
--------------------------------------------------------------------------------
1 | from pyctr.crypto.engine import CryptoEngine, Keyslot
2 | from io import BytesIO
3 | from typing import IO
4 | import os
5 |
6 | def readle(i: IO, s) -> int:
7 | return int.from_bytes(i.read(s), 'little')
8 |
9 | def readbe(i: IO, s) -> int:
10 | return int.from_bytes(i.read(s), 'big')
11 |
12 | class BOSSHeader:
13 | magic: str
14 | magic_num: int
15 | filesize: int
16 | unk_int: int # * BE bytes (well not BE but read as BE), does not seem to be a date
17 | reserved: int
18 | padding: int
19 | content_header_hash_type: int
20 | content_header_rsa_size: int
21 | initial_iv_bytes_part: bytes
22 |
23 | def load(self, fd: IO):
24 | with BytesIO(fd.read(self.SIZE)) as dt:
25 | self.magic = dt.read(4).decode('ascii')
26 |
27 | if self.magic != 'boss':
28 | raise Exception("boss header magic mismatch")
29 |
30 | self.magic_num = readbe(dt, 4)
31 |
32 | if self.magic_num != 0x10001:
33 | raise Exception("boss header magic number mismatch")
34 |
35 | self.filesize = readbe(dt, 4)
36 | self.unk_int = readbe(dt, 8)
37 | self.reserved = readbe(dt, 2)
38 | self.padding = readbe(dt, 2)
39 | self.content_header_hash_type = readbe(dt, 2)
40 | self.content_header_rsa_size = readbe(dt, 2)
41 | self.initial_iv_bytes_part = dt.read(12)
42 |
43 | SIZE = 40
44 |
45 | class BOSSContentHeader:
46 | unk_0x10_bytes: bytes
47 | filepath_part: bytes
48 | sha256_hash: bytes
49 | signature: bytes
50 |
51 | def load(self, fd: IO):
52 | with BytesIO(fd.read(self.SIZE)) as dt:
53 | self.unk_0x10_bytes = dt.read(0x10)
54 | self.filepath_part = dt.read(2)
55 | self.sha256_hash = dt.read(0x20)
56 | self.signature = dt.read(0x100)
57 |
58 | SIZE = 0x132
59 |
60 | class BOSSPayloadContentHeader:
61 | title_id: int
62 | unk0: bytes
63 | content_datatype: int
64 | payload_size: int
65 | ns_data_id: int
66 | maybe_version: int
67 | sha256_hash: bytes
68 | signature: bytes
69 |
70 | def load(self, fd: IO):
71 | with BytesIO(fd.read(self.SIZE)) as dt:
72 | self.title_id = readbe(dt, 8)
73 | self.unk0 = dt.read(4)
74 | self.content_datatype = readbe(dt, 4)
75 | self.ns_data_id = readbe(dt, 4)
76 | self.payload_size = readbe(dt, 4)
77 | self.maybe_version = readbe(dt, 4)
78 | self.sha256_hash = dt.read(0x20)
79 | self.signature = dt.read(0x100)
80 |
81 | SIZE = 0x13C
82 |
83 | class BOSSFile:
84 | header: BOSSHeader
85 | content_header: BOSSContentHeader
86 | payload_content_header: BOSSPayloadContentHeader
87 |
88 | SIZE = BOSSHeader.SIZE + BOSSContentHeader.SIZE + BOSSPayloadContentHeader.SIZE
89 |
90 | payload: bytes
91 |
92 | def load(self, path: str):
93 | if not os.path.isfile(path):
94 | raise FileNotFoundError(path)
95 |
96 | siz = os.path.getsize(path)
97 |
98 | if siz <= self.SIZE:
99 | raise Exception("File is too short for a BOSS container")
100 |
101 | with open(path, "rb") as f:
102 | self.header = BOSSHeader()
103 | self.header.load(f)
104 |
105 | if siz < self.header.filesize:
106 | raise Exception(f"incomplete boss file. expected {self.header.filesize} bytes but for {siz} bytes instead")
107 |
108 | encrypted_payload = f.read() # * read remaining encrypted data
109 |
110 | ce = CryptoEngine()
111 | iv = self.header.initial_iv_bytes_part + b'\x00\x00\x00\x01'
112 | cipher = ce.create_ctr_cipher(Keyslot.BOSS, int.from_bytes(iv, "big"))
113 | dec_data = cipher.decrypt(encrypted_payload)
114 | with BytesIO(dec_data) as decrypted_payload:
115 | self.content_header = BOSSContentHeader()
116 | self.content_header.load(decrypted_payload)
117 | self.payload_content_header = BOSSPayloadContentHeader()
118 | self.payload_content_header.load(decrypted_payload)
119 | payload_pos = decrypted_payload.tell()
120 | self.payload = dec_data[payload_pos:payload_pos + self.payload_content_header.payload_size]
121 |
122 | def export_decrypted_payload(self, path: str):
123 | with open(path, "wb") as f:
124 | f.write(self.payload)
125 |
--------------------------------------------------------------------------------
/spotpass/README.md:
--------------------------------------------------------------------------------
1 | # SpotPass (BOSS)
2 | ## Download all known SpotPass/BOSS content
3 |
4 | # Usage
5 | - Install NodeJS
6 | - Add any missing BOSS tasks to `ctr-boss-apps.json` (3DS) and/or `wup-boss-apps.json` (Wii U)
7 | - `npm i`
8 | - Create the tasks [database](#database)
9 | - `node scrape`
10 |
11 | # Database
12 | This scraper relies on a database of BOSS tasks to archive. This is done so archiving may be done batches rather than all at once. The SQLite schema looks as follows
13 |
14 | ```sql
15 | CREATE TABLE IF NOT EXISTS tasks (
16 | id INTEGER PRIMARY KEY,
17 | platform TEXT,
18 | app_id TEXT,
19 | task TEXT,
20 | country TEXT,
21 | language TEXT,
22 | processed BOOLEAN
23 | )
24 | ```
25 |
26 | The archiver pulls unprocessed rows in batches and processes them concurrently. This way the archiver may be stopped and started without losing progress and redownloading existing content. This is useful for batch archiving or in the event that the tool crashes.
27 |
28 | To build the database:
29 |
30 | - Ensure both `ctr-boss-apps.json` (3DS) and `wup-boss-apps.json` (Wii U) contain all BOSS tasks to be archived
31 | - `node build-database`
32 |
33 | This creates a row for every task, in every app, for every possible country and region combination. The database will be somewhat large and take some time to build, as each task needs 1,157 rows.
34 |
35 | # SpotPass/BOSS content
36 | SpotPass, aka BOSS, content is region specific data used by titles for title-specific tasks. There is nearly no overlap in BOSS files content between games. Because of this, each game must have all it's regions checked manually.
37 |
38 | # Tasks
39 | Each title has a BOSS application ID associated with it. Each BOSS application can register a number of tasks, and these tasks download the content/files. For example, Super Mario Maker uses the application ID `vGwChBW1ExOoHDsm` for the US region. This application uses a task named `CHARA`, which downloads the costumes used in game.
40 |
41 | There is no simple way to know a games BOSS application ID and associated tasks without checking your network traffic. See https://pretendo.network/docs/network-dumps#spotpass for more information on how to dump your traffic. This repository contains JSON lists of known applications and tasks, but this is far from complete.
42 |
43 | # Console databases
44 | Both consoles have databases for storing lists of BOSS tasks. These can be used to build the JSON files by using either `read-boss-db-wiiu.js` or `read-boss-db-3ds.js` depending on your console.
45 |
46 | A BOSS task must be registered in order to appear in the database. Typically a game will register all of it's tasks once SpotPass is enabled for the game. A game may require the user to be online before asking to enable SpotPass, but this depends on the game.
47 |
48 | ### Wii U BOSS database
49 | The Wii U stores a separate database of BOSS tasks per user. Each one must be dumped individually.
50 |
51 | - Connect to the Wii U using FTP
52 | - Navigate to `/storage_mlc/usr/save/system/boss`
53 | - Find the folder for the user you want to dump the database for
54 | - Dump the `task.db` file
55 | - Place the `task.db` file here and run `node read-boss-db-wiiu`
56 |
57 | ### 3DS BOSS database
58 | The 3DS stores BOSS tasks in a single save file in the BOSS sysmodule.
59 |
60 | - Launch GodMode9
61 | - Navigate to `SYSNAND CTRNAND > data > longstring > sysdata > 00010034`
62 | - Open `00000000`. If your file is not named `00000000` you may still continue, though we cannot guarantee this is the correct file. If you have more than one file, repeat the following steps for each
63 | - Select `Mount as DISA image`
64 | - Press `A` to mount and enter the image
65 | - Select `PartitionA.bin`. If your file is not named `PartitionA.bin` you may still continue, though we cannot guarantee this is the correct file. If you have more than one file, repeat the following steps for each
66 | - Select `Copy to 0:/gm9/out`
67 | - Turn off your console and eject the SD card
68 | - Open your SD card on your computer and place the `sd:/gm9/out/PartitionA.bin` file here
69 | - Run `node read-boss-db-3ds`
70 |
71 | # Downloads
72 | Content is downloaded into the `data` folder. Since BOSS content may update over time, each run of the scraper is placed into it's own folder inside `data` with the name being the current date in `YYYY-MM-DD` format. Since BOSS content is region specific the following subdirectories are the country and language code. Finally, each BOSS application has it's own folder which has additional folders for each task. These folders contain the `.boss` content files, as well as a `filelist.txt` (3DS) or `tasksheet.xml` (Wii U) file depending on the console.
73 |
74 | An example download path would be `data/2024-01-27/GB/en/0hFlOFo7pNTU2dyE/RNG_EC1` which holds the GB-en region content for BOSS task `RNG_EC1` in application `0hFlOFo7pNTU2dyE`.
75 |
76 | Downloads brute-force the regions, these will take a while to finish downloading. This will create lots of duplicate data, resulting in very large archive sizes.
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-3ds/archive.py:
--------------------------------------------------------------------------------
1 | '''
2 | Pretendo Network 2023
3 |
4 | This will download rankings from M&S Rio 2016 (WiiU) using NEX to automate the process
5 |
6 | Use at your own risk, we are not resposible for any bans
7 |
8 | Requires Python 3 and https://github.com/Kinnay/NintendoClients
9 | '''
10 |
11 | import os
12 | import json
13 | import gzip
14 | import anyio
15 | import base64
16 | from dotenv import load_dotenv
17 | from nintendo.nex import backend, ranking, settings
18 | from anynet import http
19 |
20 | load_dotenv()
21 |
22 | # * Dump using https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
23 | NEX_USERNAME = os.getenv('NEX_3DS_USERNAME')
24 | NEX_PASSWORD = os.getenv('NEX_3DS_PASSWORD')
25 | NEX_VERSION = 30901 # * 3.9.1
26 | ACCESS_KEY = "a2dbfa39"
27 |
28 | ranking_client = None
29 |
30 | '''
31 | NintendoClients does not implement this properly
32 | '''
33 | def new_RankingRankData_load(self, stream, version):
34 | self.pid = stream.pid()
35 | self.unique_id = stream.u64()
36 | self.rank = stream.u32()
37 | self.category = stream.u32()
38 | self.score = stream.u32()
39 | self.groups = stream.list(stream.u8)
40 | self.param = stream.u64()
41 | self.common_data = stream.buffer()
42 | if version >= 1:
43 | self.update_time = stream.datetime()
44 |
45 | '''
46 | Gets rid of the "unexpected version" warning
47 | '''
48 | def new_RankingRankData_max_version(self, settings):
49 | return 1
50 |
51 | ranking.RankingRankData.load = new_RankingRankData_load
52 | ranking.RankingRankData.max_version = new_RankingRankData_max_version
53 |
54 | async def main():
55 | global ranking_client
56 |
57 | os.makedirs("./data", exist_ok=True)
58 |
59 | s = settings.default()
60 | s.configure(ACCESS_KEY, NEX_VERSION)
61 |
62 | # * Skip NASC
63 | async with backend.connect(s, "34.208.166.202", "40760") as be:
64 | async with be.login(NEX_USERNAME, NEX_PASSWORD) as client:
65 | ranking_client = ranking.RankingClient(client)
66 |
67 | await scrape()
68 |
69 | async def scrape():
70 | # * Ordered as they appear in-game
71 | categories = [
72 | 0x00,
73 | 0x01,
74 | 0x02,
75 | 0x03,
76 | 0x04,
77 | 0x05,
78 | 0x06,
79 | 0x07,
80 | 0x09,
81 | 0x08,
82 | 0x0B,
83 | 0x0A,
84 | 0x0D,
85 | 0x0C,
86 | 0x0E,
87 | 0x0F,
88 | 0x10,
89 | 0x11,
90 | 0x12,
91 | 0x13,
92 | 0x14,
93 | 0x15,
94 | 0x16,
95 | 0x17,
96 | 0x18
97 | ]
98 |
99 | for category in categories:
100 | '''
101 | Make an initial request to get the total number of rankings in the category.
102 | Using mode 0 to get the latest results
103 | '''
104 | mode = 0
105 | order_param = ranking.RankingOrderParam()
106 | unique_id = 0
107 | principal_id = 0
108 |
109 | order_param.offset = 0
110 | order_param.count = 1
111 |
112 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
113 |
114 | offset = 0
115 | total = result.total
116 | remaining = result.total
117 |
118 | principal_id = result.data[0].pid
119 |
120 | leaderboard = []
121 | seen_rankings = []
122 |
123 | while remaining > 0:
124 | print("Category {0} on offset {1}. {2}/{3} remaining".format(category, offset, remaining, total))
125 |
126 | '''
127 | Using mode 1 as a hack to get around the 1000 offset limit.
128 | Mode 1 selects entries around "your" entry, but the server
129 | does not verify if the currently logged in user is the same
130 | as the user being used in this mode. Thus we can pretend to
131 | be the last user and continue past the offset limit
132 | '''
133 | mode = 1
134 | order_param = ranking.RankingOrderParam()
135 | unique_id = 0
136 |
137 | order_param.offset = 0
138 | order_param.count = 0xFF # * Max we can do in one go
139 | order_param.order_calc = 1 # * Ordinal (1234) rankings. Prevents duplicate ranking positions (no ties)
140 |
141 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
142 | rankings = result.data
143 |
144 | for entry in rankings:
145 | ranking_entry = {
146 | "pid": entry.pid,
147 | "unique_id": entry.unique_id,
148 | "rank": entry.rank,
149 | "category": entry.category,
150 | "score": entry.score,
151 | "groups": entry.groups,
152 | "param": entry.param,
153 | "common_data": base64.b64encode(entry.common_data).decode("utf-8"),
154 | "update_time": entry.update_time.standard_datetime().isoformat(),
155 | }
156 |
157 | if ranking_entry in seen_rankings:
158 | # * Ignore duplicates
159 | continue
160 |
161 | leaderboard.append(ranking_entry)
162 | principal_id = entry.pid
163 | offset += 1
164 | remaining -= 1
165 | seen_rankings.append(ranking_entry)
166 |
167 | print("Writing ./data/{0}/rankings.json.gz".format(category))
168 | leaderboard_data = json.dumps(leaderboard)
169 | os.makedirs("./data/{0}".format(category), exist_ok=True)
170 | await write_to_file("./data/{0}/rankings.json.gz".format(category), leaderboard_data.encode("utf-8"))
171 |
172 | async def write_to_file(path, data):
173 | with gzip.open(path, "w", compresslevel=9) as f:
174 | f.write(data)
175 |
176 | anyio.run(main)
177 |
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-wiiu/README.md:
--------------------------------------------------------------------------------
1 | # Mario & Sonic Rio 2016 (Wii U)
2 | ## Download all leaderboard data for all available events
3 |
4 | # Usage
5 | Create `config.json` from `example.config.json` and fill in your console and NNID details
6 | Run `python3 archive.py`
7 |
8 | # Meta Data
9 | This script will store the leaderboard data in the `data` directory. Each folder inside `data` is the leaderboards event ID
10 |
11 | - 1 = BMX
12 | - 5 = 100m
13 | - 6 = Rhythmic Gynmastics
14 | - 9 = 4 x 100m Relay
15 | - 10 = Javelin Throw
16 | - 11 = Triple Jump
17 | - 12 = Swimming
18 | - 13 = Equestrian
19 | - 14 = Archery
20 |
21 | Each event folder contains a `rankings.json.gz` file. Due to the large size of this data, the data is compressed with gzip at level 9. When decompressed, each file is a JSON array of ranking objects
22 |
23 | The format of a ranking object is as follows:
24 |
25 | - `event` - The event ID the record is for (same as the parent folder)
26 | - `name` - The users name in game
27 | - `pid` - The users NNID PID
28 | - `score` - The score for the ranking. Different events format this value differently
29 | - `place` - The records global ranking (`1` = 1st, `2` = 2nd, etc)
30 | - `update_time` - The time this record was created/updated
31 | - `mii_data` - The users NNID Mii data
32 | - `completed_country.id` - The country (flag) ID the user was using at the time of making the entry
33 | - `completed_country.name` - The country (flag) name the user was using at the time of making the entry. "Unknown" if unknown
34 | - `completed_character.id` - The character ID the user was using at the time of making the entry
35 | - `completed_character.name` - The character name the user was using at the time of making the entry. "Unknown" if unknown
36 | - `user_country.id` - The country (flag) ID the user typically uses
37 | - `user_country.name` - The country (flag) name the user typically uses. "Unknown" if unknown
38 | - `tournaments.cleared` - The number of tournaments cleared
39 | - `tournaments.gold_medals` - The number of gold medals earned in tournaments
40 | - `leagues.cleared` - The number of leagues cleared
41 | - `leagues.gold_medals` - The number of gold medals earned in leagues
42 | - `favorite_event.id` - The event ID for the users favorite event
43 | - `favorite_event.name` - The event name for the users favorite event. "Unknown" if unknown
44 | - `favorite_character.id` - The character ID for the users favorite character
45 | - `favorite_character.name` - The character name for the users favorite character. "Unknown" if unknown
46 | - `total_coins_earned` - The total number of coins the user has earned
47 | - `total_rings_earned` - The total number of rings the user has earned
48 | - `clear_counts.special_prizes` - Number of special prizes
49 | - `clear_counts.ghost_match_victories` - Number of ghost matches the user has won
50 | - `clear_counts.carnival_challenges` - Number of carnival challenges the user has cleared
51 | - `clear_counts.guests` - Number of guests the user has unlocked
52 | - `collectables.flags` - Number of flags the user has collected
53 | - `collectables.tips` - Number of tips the user has collected
54 | - `collectables.mii_wear` - Number of Mii wear the user has collected
55 | - `collectables.music_tracks` - Number of music tracks the user has collected
56 | - `collectables.stamps` - Number of stamps the user has collected
57 | - `unknown_common_data` - The unknown section of the rankings common data
58 | - `ranking_raw` - The raw ranking data as sent by the server
59 |
60 | Example:
61 |
62 | ```json
63 | {
64 | "event": 6,
65 | "name": "UniteKoopa",
66 | "pid": 1761782268,
67 | "score": 19855,
68 | "place": 1,
69 | "update_time": "2017-10-21T03:34:40+00:00",
70 | "mii_data": "AwAAQApkmkXgRHBA2QAdZWLzgumdlwAAAFhVAG4AaQB0AGUASwBvAG8AcABhAFI9AgAzByBpRBTvNEUMgRAIZg0AACkAUkhQQwBoAGEAcgBnAGUAAAAAAAAAAAAAAKYn",
71 | "completed_country": {
72 | "id": 77,
73 | "name": "Australia"
74 | },
75 | "completed_character": {
76 | "id": 11,
77 | "name": "Tails"
78 | },
79 | "user_country": {
80 | "id": 33,
81 | "name": "USA"
82 | },
83 | "tournaments": {
84 | "cleared": 68,
85 | "gold_medals": 68
86 | },
87 | "leagues": {
88 | "cleared": 0,
89 | "gold_medals": 0
90 | },
91 | "favorite_event": {
92 | "id": 6,
93 | "name": "Rhythmic Gynmastics"
94 | },
95 | "favorite_character": {
96 | "id": 11,
97 | "name": "Tails"
98 | },
99 | "total_coins_earned": 49599,
100 | "total_rings_earned": 67530,
101 | "clear_counts": {
102 | "special_prizes": 68,
103 | "ghost_match_victories": 335,
104 | "carnival_challenges": 68,
105 | "guests": 14
106 | },
107 | "collectables": {
108 | "flags": 113,
109 | "tips": 91,
110 | "mii_wear": 402,
111 | "music_tracks": 57,
112 | "stamps": 100
113 | }
114 | }
115 | ```
116 |
117 | ## Notes
118 |
119 | 1. Not every event name is known. The ID is stored as well as the name. If the name is not known, `name` will be `Unknown`
120 | 2. Not every character name is known. The ID is stored as well as the name. If the name is not known, `name` will be `Unknown`
121 | 3. Nintendo seems to have cases where several records have the same rank position. No filtering is done to prevent this, the data is saved exactly as Nintendo sends it
122 | 4. The character and flag the user was using at the time of making the record can differ from their favorite character and flag. As such it is stored separately
--------------------------------------------------------------------------------
/animal-crossing-new-leaf/archive.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import gzip
4 | import anyio
5 | import sqlite3
6 | import asyncio
7 | from dotenv import load_dotenv
8 | from nintendo.nex import backend, datastore, settings
9 | from anynet import http
10 |
11 | load_dotenv()
12 |
13 | # * Dump using https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
14 | NEX_USERNAME = os.getenv("NEX_3DS_USERNAME")
15 | NEX_PASSWORD = os.getenv("NEX_3DS_PASSWORD")
16 |
17 | datastore_client = None # * Gets set later
18 | conn = None # * Gets set later
19 | cursor = None # * Gets set later
20 |
21 | def is_valid_json_file(path: str) -> bool:
22 | try:
23 | with open(path, "r") as json_file:
24 | # * Attempt to load the JSON data
25 | json.load(json_file)
26 | return True
27 | except json.JSONDecodeError:
28 | return False
29 | except FileNotFoundError:
30 | return False
31 |
32 | def should_download_object(data_id: int, expected_object_size: int, expected_object_version: int) -> bool:
33 | object_path = "./objects/%d_v%d.bin" % (data_id, expected_object_version)
34 | metadata_path = "./objects/%d_v%d_metadata.json" % (data_id, expected_object_version)
35 |
36 | if not os.path.exists(object_path):
37 | return True
38 |
39 | if not os.path.exists(metadata_path):
40 | return True
41 |
42 | if os.path.getsize(object_path) != expected_object_size:
43 | return True
44 |
45 | if not is_valid_json_file(metadata_path):
46 | return True
47 |
48 | return False # * If nothing bails early, assume the object does not need to be redownloaded
49 |
50 | async def process_datastore_object(obj: datastore.DataStoreMetaInfo):
51 | param = datastore.DataStorePrepareGetParam()
52 | param.data_id = obj.data_id
53 |
54 | get_object_response = await datastore_client.prepare_get_object(param)
55 |
56 | headers = {header.key: header.value for header in get_object_response.headers}
57 | s3_url = get_object_response.url
58 | object_version = int(s3_url.split("/")[-1].split("-")[1])
59 |
60 | if not should_download_object(get_object_response.data_id, get_object_response.size, object_version):
61 | # * Object data already downloaded
62 | print("Skipping %d" % get_object_response.data_id)
63 | return
64 |
65 | response = await http.get(s3_url, headers=headers)
66 |
67 | object_file = open("./objects/%d_v%d.bin" % (get_object_response.data_id, object_version), "wb")
68 | object_file.write(response.body)
69 | object_file.close()
70 |
71 | metadata = {
72 | "data_id": obj.data_id,
73 | "owner_id": obj.owner_id,
74 | "size": obj.size,
75 | "name": obj.name,
76 | "data_type": obj.data_type,
77 | "meta_binary": obj.meta_binary.hex(),
78 | "permission": {
79 | "permission": obj.permission.permission,
80 | "recipients": obj.permission.recipients
81 | },
82 | "delete_permission": {
83 | "permission": obj.delete_permission.permission,
84 | "recipients": obj.delete_permission.recipients
85 | },
86 | "create_time": {
87 | "original_value": obj.create_time.value(),
88 | "standard": obj.create_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
89 | },
90 | "update_time": {
91 | "original_value": obj.update_time.value(),
92 | "standard": obj.update_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
93 | },
94 | "period": obj.period,
95 | "status": obj.status,
96 | "referred_count": obj.referred_count,
97 | "refer_data_id": obj.refer_data_id,
98 | "flag": obj.flag,
99 | "referred_time": {
100 | "original_value": obj.referred_time.value(),
101 | "standard": obj.referred_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
102 | },
103 | "expire_time": {
104 | "original_value": obj.expire_time.value(),
105 | "standard": obj.expire_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
106 | },
107 | "tags": obj.tags,
108 | "ratings": [
109 | {
110 | "slot": rating.slot,
111 | "info": {
112 | "total_value": rating.info.total_value,
113 | "count": rating.info.count,
114 | "initial_value": rating.info.initial_value
115 | }
116 | }
117 | for rating in obj.ratings
118 | ]
119 | }
120 |
121 | with gzip.open("./objects/%d_v%d_metadata.json.gz" % (get_object_response.data_id, object_version), "wb") as metadata_file:
122 | metadata_file.write(json.dumps(metadata).encode("utf-8"))
123 |
124 | async def process_pending_objects():
125 | global cursor
126 |
127 | os.makedirs("./objects", exist_ok=True)
128 |
129 | cursor = conn.cursor()
130 |
131 | s = settings.default()
132 | s.configure("d6f08b40", 31017)
133 |
134 | async with backend.connect(s, "52.40.192.64", "60000") as be: # * Skip NASC
135 | async with be.login(NEX_USERNAME, NEX_PASSWORD) as client:
136 | global datastore_client
137 |
138 | datastore_client = datastore.DataStoreClient(client)
139 |
140 | while True:
141 | cursor.execute("SELECT id FROM objects WHERE processed = 0 LIMIT 100")
142 | rows = cursor.fetchall()
143 |
144 | if not rows:
145 | break
146 |
147 | print("Checking objects %d through %d" % (rows[0][0], rows[-1][0]))
148 |
149 | params = []
150 |
151 | for row in rows:
152 | data_id = row[0]
153 | param = datastore.DataStoreGetMetaParam()
154 | param.data_id = data_id
155 | param.result_option = 0xFF
156 |
157 | params.append(param)
158 |
159 | metas = await datastore_client.get_metas_multiple_param(params)
160 | objects = []
161 |
162 | for i in range(len(rows)):
163 | row = rows[i]
164 | data_id = row[0]
165 |
166 | obj = metas.infos[i]
167 |
168 | if obj.data_id == 0:
169 | cursor.execute("UPDATE objects SET processed = 1 WHERE id = %d" % data_id)
170 | else:
171 | objects.append(obj)
172 |
173 | async with anyio.create_task_group() as tg:
174 | for obj in objects:
175 | tg.start_soon(process_datastore_object, obj)
176 |
177 | cursor.execute("UPDATE objects SET processed = 1 WHERE id = %d" % obj.data_id)
178 |
179 | conn.commit()
180 |
181 | print("All objects processed")
182 |
183 | async def main():
184 | global conn
185 |
186 | conn = sqlite3.connect("./objects.db")
187 | cursor = conn.cursor()
188 |
189 | cursor.execute("SELECT COUNT(*) FROM objects WHERE processed = 0")
190 | objects_remaining = cursor.fetchone()[0]
191 |
192 | print("Number of objects left to check: %d" % objects_remaining)
193 |
194 | await process_pending_objects()
195 |
196 | conn.close()
197 |
198 | anyio.run(main)
199 |
--------------------------------------------------------------------------------
/super-mario-maker/README.md:
--------------------------------------------------------------------------------
1 | # Super Mario Maker
2 | ## Download all DataStore objects (makers, courses, etc) and their rankings/records
3 |
4 | # Usage
5 | Create `.env` from `example.env` and fill in your NEX details. There are multiple ways to get your NEX details. NEX details for both the WiiU and 3DS will work here:
6 |
7 | - 3DS: To get your username and password from a 3DS, use this homebrew https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password
8 | - WiiU: To get your username and password from a WiiU, use a proxy server like Fiddler or Charles and look for the response from https://account.nintendo.net/v1/api/provider/nex_token/@me. Your username is the `pid` field, and your password is the `password` field
9 |
10 | Run `python3 archive.py`
11 |
12 | # DataStore objects
13 | This script downloads all available objects from DataStore, assuming the object is allowed to be returned. Not all objects may be downloaded, as DataStore may block public access to them. Not all objects may be Dream Worlds. To know what type of object a given object is, refer to it's metadata file
14 |
15 | # DataStore object versions
16 | DataStore objects can be updated. When this happens, the objects "version" number is incremented internally. The last number of the objects S3 key is the version number. DataStore only ever returns S3 URLs for the latest version, meaning all past versions are lost. This script will track the version number in the file name, allowing for multiple versions of the object to be downloaded if a newer object is uploaded, assuming this script is ran multiple times
17 |
18 | # DataStore metadata
19 | For every object downloaded, an associated metadata file is also saved. The contents of this file is the objects `DataStoreMetaInfo` serialized as JSON. To know which type of object a given object is, see `data_type` in the metadata file
20 |
21 | ```json
22 | {
23 | "data_id": 915012,
24 | "owner_id": 1770180745,
25 | "size": 37788,
26 | "name": "Key to My Heart - by SethBling",
27 | "data_type": 12,
28 | "meta_binary": "000000020000000200000df4000000f800002f240000558c00000002c24ba7925b08b3bc62880bf37206322f004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004c006100200063006c00e90020006400650020006d006f006e00200063015300750072002000640065002000530065007400680042006c0069006e006700000000004c006c00610076006500730020007000720065006300690061006400610073002c002000640065002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000004b0065007900200074006f0020004d00790020004800650061007200740020002d002000620079002000530065007400680042006c0069006e0067000000000000",
29 | "permission": {
30 | "permission": 0,
31 | "recipients": []
32 | },
33 | "delete_permission": {
34 | "permission": 3,
35 | "recipients": []
36 | },
37 | "create_time": {
38 | "original_value": 135308465418,
39 | "standard": "2016-04-01 21:20:10"
40 | },
41 | "update_time": {
42 | "original_value": 135308465418,
43 | "standard": "2016-04-01 21:20:10"
44 | },
45 | "period": 64306,
46 | "status": 0,
47 | "referred_count": 0,
48 | "refer_data_id": 0,
49 | "flag": 3840,
50 | "referred_time": {
51 | "original_value": 135308465418,
52 | "standard": "2016-04-01 21:20:10"
53 | },
54 | "expire_time": {
55 | "original_value": 671075926016,
56 | "standard": "9999-12-31 00:00:00"
57 | },
58 | "tags": [
59 | "AYMHAAACAAADVHlDoA9gFw"
60 | ],
61 | "ratings": [
62 | {
63 | "slot": 0,
64 | "info": {
65 | "total_value": 165896,
66 | "count": 165896,
67 | "initial_value": 0
68 | }
69 | },
70 | {
71 | "slot": 1,
72 | "info": {
73 | "total_value": 11908666,
74 | "count": 124183,
75 | "initial_value": 0
76 | }
77 | },
78 | {
79 | "slot": 2,
80 | "info": {
81 | "total_value": 23870,
82 | "count": 165896,
83 | "initial_value": 0
84 | }
85 | },
86 | {
87 | "slot": 3,
88 | "info": {
89 | "total_value": 1253174,
90 | "count": 165896,
91 | "initial_value": 0
92 | }
93 | },
94 | {
95 | "slot": 4,
96 | "info": {
97 | "total_value": 1229304,
98 | "count": 165896,
99 | "initial_value": 0
100 | }
101 | },
102 | {
103 | "slot": 5,
104 | "info": {
105 | "total_value": 137298,
106 | "count": 137298,
107 | "initial_value": 0
108 | }
109 | },
110 | {
111 | "slot": 6,
112 | "info": {
113 | "total_value": 847,
114 | "count": 847,
115 | "initial_value": 0
116 | }
117 | }
118 | ]
119 | }
120 | ```
121 |
122 | # DataStore ratings
123 | DataStore objects have any number of `ratings`. The meaning of each is context dependent, and changes based on the game and `data_type` of the object
124 |
125 | # Custom Rankings
126 | Super Mario Maker implements "custom rankings". These extend the DataStore rating system to more freely rank objects based on custom criteria, by using dynamically generated "application IDs". The meaning of each "application ID" also changes based on the `data_type` of the object, much like ratings
127 |
128 | # Buffer Queues
129 | Super Mario Maker implements "buffer queues" as a way to store some forms of arbitrary binary data for objects. An object can have any number of unique buffers in any of it's buffer queue slots. The meaning of each slot, and it's buffers, also changes based on the `data_type` of the object, much like ratings
130 |
131 | # Course Records
132 | A "course record" is downloaded for every object, even non-courses. This is expected to create many empty files, as only course objects have records. Since Super Mario Maker uses several different `data_type` values for courses, it's safer to just try to download a record for every object rather than check the objects type. This results in potentially millions of useless files, but ensures no data is missed
--------------------------------------------------------------------------------
/idbe/get_versions.js:
--------------------------------------------------------------------------------
1 | const https = require('node:https');
2 | const fs = require('fs-extra');
3 | const axios = require('axios');
4 | const cheerio = require('cheerio');
5 | const { create: xmlParser } = require('xmlbuilder2');
6 |
7 | const titles = {};
8 |
9 | function addVersionsToTitle(titleID, versions) {
10 | if (!titles[titleID]) {
11 | titles[titleID] = [0]; // * Always check for version 0. This exists for lots of titles, but is never in any version lists
12 | }
13 |
14 | for (let version of versions) {
15 | version = Number(version);
16 |
17 | if (!titles[titleID].includes(version)) {
18 | titles[titleID].push(version);
19 | }
20 | }
21 | }
22 |
23 | async function scrapeWiiUBrew() {
24 | const response = await axios.get('https://wiiubrew.org/wiki/Title_database');
25 | const $ = cheerio.load(response.data);
26 |
27 | const tables = $('.wikitable.sortable');
28 |
29 | for (const table of tables) {
30 | const rows = $(table).find('tr');
31 |
32 | // * Start at 1 to skip the header
33 | for (let i = 1; i < rows.length; i++) {
34 | const row = rows[i];
35 | const sections = $(row).text().split('\n\n').map(section => section.trim());
36 | let titleID = sections[0].replace('-', '').toUpperCase();
37 | let versionsIndex;
38 |
39 | switch (titleID.substring(0, 8)) {
40 | case '00050010':
41 | case '0005001B':
42 | case '00050030':
43 | case '0005000C':
44 | case '0005000E':
45 | case '00000007':
46 | case '00070002':
47 | case '00070008':
48 | versionsIndex = 3;
49 | break;
50 | case '00050000':
51 | case '00050002':
52 | versionsIndex = 5;
53 | break;
54 | default:
55 | throw new Error(titleID);
56 | }
57 |
58 | const versions = sections[versionsIndex].split(',').map(version => {
59 | return version
60 | .trim()
61 | .replace('v', '')
62 | .split(' ')[0]
63 | .split('(')[0]
64 | }).filter(version => version);
65 |
66 | addVersionsToTitle(titleID, versions);
67 |
68 | // * Catch any versions assigned to updates or DLC which aren't assinged to the base title
69 | if (titleID.startsWith('0005000E') || titleID.startsWith('0005000C')) {
70 | titleID = `00050000${titleID.substring(8)}`;
71 |
72 | addVersionsToTitle(titleID, versions);
73 | }
74 | }
75 | }
76 | }
77 |
78 | async function scrapeYellows8() {
79 | let response = await axios.get('https://yls8.mtheall.com/ninupdates/eshop/verlist_parser.php');
80 | let $ = cheerio.load(response.data);
81 | const anchors = $('table#table tbody tr td a');
82 | const links = anchors.toArray().map(anchor => `https://yls8.mtheall.com/ninupdates/eshop/verlist_parser.php${anchor.attribs.href}`);
83 |
84 | for (const link of links) {
85 | response = await axios.get(link);
86 | $ = cheerio.load(response.data);
87 | const tableDatas = $('td');
88 |
89 | // * Page is structured as a table of rows which 2 td's each
90 | for (let i = 0; i < tableDatas.length; i+=2) {
91 | let titleID = $(tableDatas[i]).text();
92 | const version = $(tableDatas[i+1]).text();
93 |
94 | addVersionsToTitle(titleID, [version]);
95 |
96 | // * Catch any versions assigned to updates or DLC which aren't assinged to the base title
97 | if (titleID.startsWith('0004000E') || titleID.startsWith('0004000C')) {
98 | titleID = `00040000${titleID.substring(8)}`;
99 |
100 | addVersionsToTitle(titleID, [version]);
101 | }
102 | }
103 | }
104 | }
105 |
106 |
107 | async function scrapeTagayaWUP() {
108 | const httpsAgent = new https.Agent({
109 | rejectUnauthorized: false,
110 | cert: fs.readFileSync('./certs/wiiu-common.crt'),
111 | key: fs.readFileSync('./certs/wiiu-common.key'),
112 | });
113 |
114 | // * Despite taking in a region and country, this server seems to
115 | // * ignore both and send the same data no matter what. Just use
116 | // * USA/US for now
117 | let response = await axios.get('https://tagaya-wup.cdn.nintendo.net/tagaya/versionlist/USA/US/latest_version', {
118 | validateStatus: () => {
119 | return true;
120 | },
121 | httpsAgent
122 | });
123 | const data = xmlParser(response.data).toObject();
124 | const latestVersion = Number(data.version_list_info.version);
125 |
126 | for (let i = 1; i < latestVersion; i++) {
127 | response = await axios.get(`https://tagaya-wup.cdn.nintendo.net/tagaya/versionlist/USA/US/list/${i}.versionlist`, {
128 | validateStatus: () => {
129 | return true;
130 | },
131 | httpsAgent
132 | });
133 |
134 | const xml = xmlParser(response.data).toObject();
135 |
136 | if (!xml.version_list) {
137 | continue;
138 | }
139 |
140 | let versionList = xml.version_list.titles.title;
141 |
142 | if (!versionList) {
143 | continue;
144 | }
145 |
146 | if (!Array.isArray(versionList)) {
147 | // * Only one item in the version list
148 | versionList = [versionList];
149 | }
150 |
151 | for (const title of versionList) {
152 | let titleID = title.id.replace('-', '').toUpperCase();
153 | const versions = [title.version];
154 |
155 | addVersionsToTitle(titleID, versions);
156 |
157 | // * Catch any versions assigned to updates or DLC which aren't assinged to the base title
158 | if (titleID.startsWith('0005000E') || titleID.startsWith('0005000C')) {
159 | titleID = `00050000${titleID.substring(8)}`;
160 |
161 | addVersionsToTitle(titleID, versions);
162 | }
163 | }
164 | }
165 | }
166 |
167 | async function scrapeTagayaCTR() {
168 | const httpsAgent = new https.Agent({
169 | rejectUnauthorized: false,
170 | cert: fs.readFileSync('./certs/wiiu-common.crt'),
171 | key: fs.readFileSync('./certs/wiiu-common.key'),
172 | });
173 |
174 | // * This server has no way of getting old title versions
175 | const response = await axios.get('https://tagaya-ctr.cdn.nintendo.net/tagaya/versionlist', {
176 | responseType: 'arraybuffer',
177 | validateStatus: () => {
178 | return true;
179 | },
180 | httpsAgent
181 | });
182 |
183 | const versionList = response.data;
184 |
185 | for (let i = 0; i < versionList.length; i+=0x10) {
186 | let titleID = versionList.readBigUInt64LE(i).toString(16).padStart('16', 0).toUpperCase();
187 | const version = versionList.readUInt32LE(i+8);
188 |
189 | addVersionsToTitle(titleID, [version]);
190 |
191 | // * Catch any versions assigned to updates or DLC which aren't assinged to the base title
192 | if (titleID.startsWith('0004000E') || titleID.startsWith('0004000C')) {
193 | titleID = `00040000${titleID.substring(8)}`;
194 |
195 | addVersionsToTitle(titleID, [version]);
196 | }
197 | }
198 | }
199 |
200 | async function main() {
201 | await scrapeWiiUBrew(); // * Initial Wii U versions which may be missing from Tagaya
202 | await scrapeYellows8(); // * Contains 3DS version info going back as far as 2015. Thank you yellows, sorry for scraping you :(
203 | await scrapeTagayaWUP(); // * Anything not on wiiubrew
204 | await scrapeTagayaCTR(); // * Latest 3DS version list (does not contain legacy version lists)
205 |
206 | fs.writeFileSync('./title-versions.json', JSON.stringify(titles));
207 | }
208 |
209 |
210 | main();
--------------------------------------------------------------------------------
/spotpass/wup-boss-apps.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "app_id": "WJDaV6ePVgrS0TRa",
4 | "tasks": [
5 | "olvinfo"
6 | ]
7 | },
8 | {
9 | "app_id": "VFoY6V7u7UUq1EG5",
10 | "tasks": [
11 | "olvinfo",
12 | "oltopic"
13 | ]
14 | },
15 | {
16 | "app_id": "8MNOVprfNVAJjfCM",
17 | "tasks": [
18 | "olvinfo"
19 | ]
20 | },
21 | {
22 | "app_id": "v1cqzWykBKUg0rHQ",
23 | "tasks": [
24 | "solv"
25 | ]
26 | },
27 | {
28 | "app_id": "bieC9ACJlisFg5xS",
29 | "tasks": [
30 | "solv"
31 | ]
32 | },
33 | {
34 | "app_id": "tOaQcoBLtPTgVN3Y",
35 | "tasks": [
36 | "solv"
37 | ]
38 | },
39 | {
40 | "app_id": "HX8a16MMNn6i1z0Y",
41 | "tasks": [
42 | "wood1",
43 | "woodBGM"
44 | ]
45 | },
46 | {
47 | "app_id": "07E3nY6lAwlwrQRo",
48 | "tasks": [
49 | "wood1",
50 | "woodBGM"
51 | ]
52 | },
53 | {
54 | "app_id": "8UsM86l8xgkjFk8z",
55 | "tasks": [
56 | "wood1",
57 | "woodBGM"
58 | ]
59 | },
60 | {
61 | "app_id": "IXmFUqR2qenXfF61",
62 | "tasks": [
63 | "promo1",
64 | "promo2",
65 | "promo3",
66 | "push"
67 | ]
68 | },
69 | {
70 | "app_id": "BMQAm5iUVtPsJVsU",
71 | "tasks": [
72 | "sysmsg1",
73 | "sysmsg2"
74 | ]
75 | },
76 | {
77 | "app_id": "LRmanFo4Tx3kEGDp",
78 | "tasks": [
79 | "sysmsg1",
80 | "sysmsg2"
81 | ]
82 | },
83 | {
84 | "app_id": "TZr27FE8wzKiEaTO",
85 | "tasks": [
86 | "sysmsg1",
87 | "sysmsg2"
88 | ]
89 | },
90 | {
91 | "app_id": "JnIrm9c4E9JBmxBo",
92 | "tasks": [
93 | "news"
94 | ]
95 | },
96 | {
97 | "app_id": "dadlI27Ww8H2d56x",
98 | "tasks": [
99 | "news",
100 | "plyrepo"
101 | ]
102 | },
103 | {
104 | "app_id": "RaPn5saabzliYrpo",
105 | "tasks": [
106 | "news",
107 | "plyrepo"
108 | ]
109 | },
110 | {
111 | "app_id": "14VFIK3rY2SP0WRE",
112 | "tasks": [
113 | "news",
114 | "plyrepo"
115 | ]
116 | },
117 | {
118 | "app_id": "RbEQ44t2AocC4rvu",
119 | "tasks": [
120 | "news"
121 | ]
122 | },
123 | {
124 | "app_id": "287gv3WZdxo1QRhl",
125 | "tasks": [
126 | "news"
127 | ]
128 | },
129 | {
130 | "app_id": "bb6tOEckvgZ50ciH",
131 | "tasks": [
132 | "optdat2",
133 | "schdat2",
134 | "schdata",
135 | "optdata"
136 | ]
137 | },
138 | {
139 | "app_id": "rjVlM7hUXPxmYQJh",
140 | "tasks": [
141 | "optdat2",
142 | "schdat2",
143 | "schdata",
144 | "optdata2",
145 | "schdata2",
146 | "test",
147 | "preport",
148 | "otpdata2",
149 | "scddata2",
150 | "otpdat2",
151 | "optdata"
152 | ]
153 | },
154 | {
155 | "app_id": "zvGSM4kOrXpkKnpT",
156 | "tasks": [
157 | "optdat2",
158 | "schdat2",
159 | "schdata",
160 | "optdata"
161 | ]
162 | },
163 | {
164 | "app_id": "m8KJPtmPweiPuETE",
165 | "tasks": [
166 | "sp1_ans",
167 | "sp1_rnk",
168 | "sp1_evt"
169 | ]
170 | },
171 | {
172 | "app_id": "pO72Hi5uqf5yuNd8",
173 | "tasks": [
174 | "sp1_ans",
175 | "sp1_rnk",
176 | "sp1_evt"
177 | ]
178 | },
179 | {
180 | "app_id": "4m8Xme1wKgzwslTJ",
181 | "tasks": [
182 | "sp1_ans",
183 | "sp1_rnk",
184 | "sp1_evt"
185 | ]
186 | },
187 | {
188 | "app_id": "ESLqtAhxS8KQU4eu",
189 | "tasks": [
190 | "CHARA"
191 | ]
192 | },
193 | {
194 | "app_id": "vGwChBW1ExOoHDsm",
195 | "tasks": [
196 | "CHARA"
197 | ]
198 | },
199 | {
200 | "app_id": "IeUc4hQsKKe9rJHB",
201 | "tasks": [
202 | "CHARA"
203 | ]
204 | },
205 | {
206 | "app_id": "4krJA4Gx3jF5nhQf",
207 | "tasks": [
208 | "histgrm"
209 | ]
210 | },
211 | {
212 | "app_id": "9jRZEoWYLc3OG9a8",
213 | "tasks": [
214 | "histgrm"
215 | ]
216 | },
217 | {
218 | "app_id": "VWqUTspR5YtjDjxa",
219 | "tasks": [
220 | "histgrm"
221 | ]
222 | },
223 | {
224 | "app_id": "Ge1KtMu8tYlf4AUM",
225 | "tasks": [
226 | "notice1"
227 | ]
228 | },
229 | {
230 | "app_id": "gycVtTzCouZmukZ6",
231 | "tasks": [
232 | "NEWS",
233 | "amiibo",
234 | "friend",
235 | "CONQ"
236 | ]
237 | },
238 | {
239 | "app_id": "o2Ug1pIp9Uhri6Nh",
240 | "tasks": [
241 | "amiibo",
242 | "NEWS",
243 | "friend",
244 | "CONQ"
245 | ]
246 | },
247 | {
248 | "app_id": "n6rAJ1nnfC1Sgcpl",
249 | "tasks": [
250 | "amiibo",
251 | "NEWS",
252 | "friend",
253 | "CONQ"
254 | ]
255 | },
256 | {
257 | "app_id": "CHUN6T1m7Xk4EBg4",
258 | "tasks": [
259 | "ptcbnws"
260 | ]
261 | },
262 | {
263 | "app_id": "zyXdCW9jGdi9rjaz",
264 | "tasks": [
265 | "news"
266 | ]
267 | },
268 | {
269 | "app_id": "jPHLlJr2fJyTzffp",
270 | "tasks": [
271 | "news"
272 | ]
273 | },
274 | {
275 | "app_id": "YsXB6IRGSI56tPxl",
276 | "tasks": [
277 | "news"
278 | ]
279 | },
280 | {
281 | "app_id": "Lbqp9Sg1i0xUzFFa",
282 | "tasks": [
283 | "PTS"
284 | ]
285 | },
286 | {
287 | "app_id": "DwU7n0FidGrLNiOo",
288 | "tasks": [
289 | "boss1",
290 | "boss2",
291 | "boss3"
292 | ]
293 | },
294 | {
295 | "app_id": "yIUkFmuGVkGP8pDb",
296 | "tasks": [
297 | "notice1"
298 | ]
299 | },
300 | {
301 | "app_id": "v4WRObSzD7VU3dcJ",
302 | "tasks": [
303 | "notice1"
304 | ]
305 | },
306 | {
307 | "app_id": "3zDjXIA57bSceyaw",
308 | "tasks": [
309 | "param"
310 | ]
311 | },
312 | {
313 | "app_id": "NL38jhExI2CQqhWd",
314 | "tasks": [
315 | "schdata",
316 | "optdata"
317 | ]
318 | },
319 | {
320 | "app_id": "sE6KwEpQYyg6tdU7",
321 | "tasks": [
322 | "schdata",
323 | "optdata"
324 | ]
325 | },
326 | {
327 | "app_id": "pTKZ9q5KrCP3gBag",
328 | "tasks": [
329 | "schdata",
330 | "optdata"
331 | ]
332 | },
333 | {
334 | "app_id": "CJT88RO008LAnD51",
335 | "tasks": [
336 | "PE_GAK",
337 | "PE_ZNG"
338 | ]
339 | },
340 | {
341 | "app_id": "FyyMFzEByuQJc6sJ",
342 | "tasks": [
343 | "PTS"
344 | ]
345 | },
346 | {
347 | "app_id": "A4yyXWKZZUToFtrt",
348 | "tasks": [
349 | "PTS"
350 | ]
351 | },
352 | {
353 | "app_id": "HauaFQ1sPsnQ6rBj",
354 | "tasks": [
355 | "annouce"
356 | ]
357 | },
358 | {
359 | "app_id": "qDUeFmk0Az71nHyD",
360 | "tasks": [
361 | "DLCINFO"
362 | ]
363 | },
364 | {
365 | "app_id": "yVsSPM2E0DEOxroT",
366 | "tasks": [
367 | "DLCINFO"
368 | ]
369 | },
370 | {
371 | "app_id": "Xw6OvZkQofQ3O8Bi",
372 | "tasks": [
373 | "DLCINFO"
374 | ]
375 | },
376 | {
377 | "app_id": "LUQX5swEjBUPQ8nR",
378 | "tasks": [
379 | "OR2H000"
380 | ]
381 | },
382 | {
383 | "app_id": "y4pXrgLe0JGao3No",
384 | "tasks": [
385 | "OR2H000"
386 | ]
387 | },
388 | {
389 | "app_id": "j01mRJ9sNe00MWPC",
390 | "tasks": [
391 | "CHR_GAK",
392 | "CHR_ZNG"
393 | ]
394 | },
395 | {
396 | "app_id": "P45xuCJjERf6MNWG",
397 | "tasks": [
398 | "movie"
399 | ]
400 | },
401 | {
402 | "app_id": "PQWAfUmDpVo0u9Fi",
403 | "tasks": [
404 | "Card"
405 | ]
406 | },
407 | {
408 | "app_id": "EA9wpEnmZmeX70YS",
409 | "tasks": [
410 | "ADDCHR0"
411 | ]
412 | },
413 | {
414 | "app_id": "Iq5CNAngvR9auXFO",
415 | "tasks": [
416 | "param"
417 | ]
418 | },
419 | {
420 | "app_id": "ZtwtVqJkmGE2LloD",
421 | "tasks": [
422 | "PTS"
423 | ]
424 | },
425 | {
426 | "app_id": "eAzIbHvwKNHwz85M",
427 | "tasks": [
428 | "news"
429 | ]
430 | },
431 | {
432 | "app_id": "z4d72slRF5GX0cEr",
433 | "tasks": [
434 | "annouce"
435 | ]
436 | },
437 | {
438 | "app_id": "5iKeqk6fQq3wwfgy",
439 | "tasks": [
440 | "OR2H000"
441 | ]
442 | },
443 | {
444 | "app_id": "rvI5oS5jSZ0aLpeo",
445 | "tasks": [
446 | "demo1"
447 | ]
448 | },
449 | {
450 | "app_id": "R5WU9gZtFShZlf6j",
451 | "tasks": [
452 | "movie",
453 | "Histo"
454 | ]
455 | },
456 | {
457 | "app_id": "78QqMzbyBbwEpzVg",
458 | "tasks": [
459 | "Tvars"
460 | ]
461 | },
462 | {
463 | "app_id": "I8IZTXQyDnUnFo77",
464 | "tasks": [
465 | "Tvars"
466 | ]
467 | },
468 | {
469 | "app_id": "XcawL2u1CU624gg3",
470 | "tasks": [
471 | "histgrm"
472 | ]
473 | },
474 | {
475 | "app_id": "uNRNThGetHLXasV9",
476 | "tasks": [
477 | "param"
478 | ]
479 | },
480 | {
481 | "app_id": "MBOU6MNVQRdTT1QA",
482 | "tasks": [
483 | "param"
484 | ]
485 | },
486 | {
487 | "app_id": "zBlJpj2pXcFeJYJI",
488 | "tasks": [
489 | "param"
490 | ]
491 | },
492 | {
493 | "app_id": "dHWbU7brnq9QKaKA",
494 | "tasks": [
495 | "param"
496 | ]
497 | }
498 | ]
499 |
--------------------------------------------------------------------------------
/theme-shop/download.py:
--------------------------------------------------------------------------------
1 | import pycurl, os, sys, binascii, hashlib, struct
2 | from io import BytesIO, SEEK_CUR
3 | from boss import BOSSFile
4 | from pathlib import Path
5 | from pycurl import Curl
6 |
7 | CTR_PEM_PATH = os.environ["CTR_PROD_3"]
8 | OUTPUT_PATH = os.curdir
9 | DL_MAX_CONN = 100
10 |
11 | COUNTRIES = [
12 | 'JP', 'AI', 'AG', 'AR', 'AW', 'BS', 'BB', 'BZ',
13 | 'BO', 'BR', 'VG', 'CA', 'KY', 'CL', 'CO', 'CR',
14 | 'DM', 'DO', 'EC', 'SV', 'GF', 'GD', 'GP', 'GT',
15 | 'GY', 'HT', 'HN', 'JM', 'MQ', 'MX', 'MS', 'AN',
16 | 'NI', 'PA', 'PY', 'PE', 'KN', 'LC', 'VC', 'SR',
17 | 'TT', 'TC', 'US', 'UY', 'VI', 'VE', 'AL', 'AU',
18 | 'AT', 'BE', 'BA', 'BW', 'BG', 'HR', 'CY', 'CZ',
19 | 'DK', 'EE', 'FI', 'FR', 'DE', 'GR', 'HU', 'IS',
20 | 'IE', 'IT', 'LV', 'LS', 'LI', 'LT', 'LU', 'MK',
21 | 'MT', 'ME', 'MZ', 'NA', 'NL', 'NZ', 'NO', 'PL',
22 | 'PT', 'RO', 'RU', 'RS', 'SK', 'SI', 'ZA', 'ES',
23 | 'SZ', 'SE', 'CH', 'TR', 'GB', 'ZM', 'ZW', 'AZ',
24 | 'MR', 'ML', 'NE', 'TD', 'SD', 'ER', 'DJ', 'SO',
25 | 'AD', 'GI', 'GG', 'IM', 'JE', 'MC', 'TW', 'KR',
26 | 'HK', 'MO', 'ID', 'SG', 'TH', 'PH', 'MY', 'CN',
27 | 'AE', 'IN', 'EG', 'OM', 'QA', 'KW', 'SA', 'SY',
28 | 'BH', 'JO', 'SM', 'VA', 'BM'
29 | ]
30 |
31 | LANGUAGES = [ 'ja', 'en', 'fr', 'de', 'it', 'es', 'zh', 'ko', 'nl', 'pt', 'ru', 'zh_trad' ]
32 |
33 | BOSS_IDS = {
34 | "EUR": "dMtiFHzm5OOf0y2O",
35 | "USA": "YapN7dMun6U6CVPx",
36 | "JPN": "110Rzo2E1vYSfAz6",
37 | }
38 |
39 | class DownloadTask:
40 | curl: Curl
41 | filename: str
42 | url: str
43 | region: str
44 | country: str
45 | language: str
46 | boss_id: str
47 | buffer: BytesIO
48 | header_buffer: BytesIO
49 |
50 | def __init__(self) -> None:
51 | self.curl = Curl()
52 |
53 | class FileListEntry:
54 | itemcode_1: str
55 | price: str
56 | itemcode_2: str
57 | unk_id: str
58 | unknum: int
59 | timestamp: int
60 |
61 | @classmethod
62 | def parse_from_tsv(cls, tsv_line: list[str]):
63 | o = cls()
64 | o.itemcode_1 = tsv_line[0]
65 | o.price = tsv_line[1]
66 | o.itemcode_2 = tsv_line[2]
67 | o.unk_id = tsv_line[3]
68 | o.unknum = int(tsv_line[4])
69 | o.timestamp = int(tsv_line[5])
70 | return o
71 |
72 | def parse_filelist(path: str) -> list[FileListEntry]:
73 | actualsize = os.path.getsize(path)
74 |
75 | with open(path, "rb") as f:
76 | sha1_hash = binascii.unhexlify(f.read(40))
77 | f.seek(2, SEEK_CUR)
78 | actual_data = f.read().decode("utf-8")
79 |
80 | # * first verify the sha1 checksum of the data
81 | checksum = hashlib.sha1(actual_data.encode("utf-8")).digest()
82 | if checksum != sha1_hash:
83 | raise Exception(f"hash mismatch, expected {sha1_hash.hex()} but got {checksum.hex()} instead")
84 |
85 | lines = actual_data.rstrip("\r\n").split("\r\n") # * CRLF? really?
86 | expected_content_len = int(lines[0].lstrip(" "))
87 | lines = lines[1:]
88 |
89 | if not lines:
90 | return []
91 |
92 | # * verify that the file is of correct size
93 | if actualsize != expected_content_len:
94 | raise Exception(f"File is incomplete; expected size is {expected_content_len}, got {actualsize} instead")
95 |
96 | return [ FileListEntry.parse_from_tsv(line.split("\t")) for line in lines ]
97 |
98 | def align(x: int, y: int) -> int:
99 | return (x + y - 1) & ~(y - 1)
100 |
101 | class ThmTopCategory:
102 | name: str # * utf-16 name, wchar[96] (192 bytes)
103 | category_id: int # * u32
104 | unknown_id: int # * u32
105 | image_descriptor: tuple[int, int]
106 |
107 | def __init__(self) -> None:
108 | self.image_descriptor = (0,0)
109 |
110 | @classmethod
111 | def from_bytes(cls, b: bytes):
112 | a = cls()
113 | dt = struct.unpack("<192s4I" if len(b) == 208 else "<192s2I", b)
114 | a.name = dt[0].decode("utf-16-le").rstrip("\x00")
115 | a.category_id = dt[1]
116 | a.unknown_id = dt[2]
117 | if len(b) == 208:
118 | a.image_descriptor = (dt[3], dt[4])
119 | return a
120 |
121 | def __str__(self) -> str:
122 | return \
123 | f"Name: {self.name}\n" \
124 | f"Category ID: {self.category_id}\n" \
125 | f"Unknown ID: {self.unknown_id}\n" + \
126 | (f"Has image: No\n" if self.image_descriptor == (0,0) else f"Yes, offset: {hex(self.image_descriptor[1])}, size: {hex(self.image_descriptor[0])}\n")
127 |
128 |
129 | class ThmTopFile:
130 | basefile: BOSSFile
131 |
132 | version: int # * u8
133 | topimg_count: int # * u8
134 | home_theme_category_count: int # * u8, amount of theme categories with icons that are shown when theme shop is opened
135 | all_theme_category_count: int # * u8, amount of theme categories (without icons) that are shown when user presses "show more"
136 | all_theme_category_offset: int # * u32
137 |
138 | unk_int2: int # * u64 seems to be 0x1 for USA, and 0x2 for JPN, EUR, not sure what this is, doesn't affect data, maybe changes something when rendering on a 3ds
139 |
140 | topimg_descriptors: list[tuple[int, int]] # * these are pointing to the JPEG images shown on the top screen
141 | """
142 | tuple of (size, offset)
143 | """
144 |
145 | home_theme_categories: list[ThmTopCategory]
146 | all_theme_categories: list[ThmTopCategory]
147 |
148 | def __str__(self) -> str:
149 | return \
150 | f"Version: {self.version}\n" \
151 | f"Top image count: {self.topimg_count}\n" \
152 | f"Home theme category count: {self.home_theme_category_count}\n" \
153 | f"All theme category count: {self.all_theme_category_count}\n" \
154 | f"All theme category offset: 0x{self.all_theme_category_offset:X}\n" \
155 | f"Unknown: {self.unk_int2}\n"
156 |
157 | def load_from_bossfile(self, file: BOSSFile):
158 | self.basefile = file
159 |
160 | with BytesIO(file.payload) as payload:
161 | self.version = payload.read(1)[0]
162 | self.topimg_count = payload.read(1)[0]
163 | self.home_theme_category_count = payload.read(1)[0]
164 | self.all_theme_category_count = payload.read(1)[0]
165 | self.all_theme_category_offset = int.from_bytes(payload.read(4), "little")
166 | self.unk_int2 = int.from_bytes(payload.read(8), "little")
167 |
168 | self.topimg_descriptors = []
169 |
170 | for _ in range(self.topimg_count):
171 | b = struct.unpack("<2I", payload.read(8))
172 | self.topimg_descriptors.append(b)
173 |
174 | self.home_theme_categories = []
175 |
176 | for _ in range(self.home_theme_category_count):
177 | self.home_theme_categories.append(ThmTopCategory.from_bytes(payload.read(208)))
178 |
179 | self.all_theme_categories = []
180 |
181 | # * so, this is possible somehow
182 | if self.all_theme_category_count == 0:
183 | return
184 |
185 | # * must seek to the last category icon's end offset here, and align by 16, to get to the entire category list
186 | s = self.home_theme_categories[-1].image_descriptor
187 | allcategory_off = align(s[1] + s[0], 16)
188 | payload.seek(allcategory_off)
189 |
190 | for _ in range(self.all_theme_category_count):
191 | self.all_theme_categories.append(ThmTopCategory.from_bytes(payload.read(200)))
192 |
193 | def create_thmtop_indata(region: str, boss_id: str, country: str, language: str):
194 | _url = f"https://npdl.cdn.nintendowifi.net/p01/nsa/{boss_id}/thmtop/{country}/{language}/top?tm=4"
195 | _path = os.path.join(OUTPUT_PATH, region, country, language, "thmtop.bin")
196 | return (_url, _path, boss_id, region, country, language)
197 |
198 | def create_thmlist_indata(region: str, boss_id: str, country: str, language: str, category: str):
199 | _url = f"https://npdl.cdn.nintendowifi.net/p01/nsa/{boss_id}/thmlist/{country}/{language}/{category}?tm=4"
200 | _path = os.path.join(OUTPUT_PATH, region, country, language, f"ct_{category}.bin")
201 | return (_url, _path, boss_id, region, country, language)
202 |
203 | def create_single_theme_detail_indata(region: str, boss_id: str, country: str, language: str, thmid: str):
204 | _url = f"https://npdl.cdn.nintendowifi.net/p01/nsa/{boss_id}/thmdtls/{country}/{language}/{thmid}?tm=4"
205 | _path = os.path.join(OUTPUT_PATH, region, country, language, "thmdtls", f"{thmid}.bin")
206 | return (_url, _path, boss_id, region, country, language)
207 |
208 | def create_all_theme_detail_indata(region: str, boss_id: str, country: str, language: str, index: int):
209 | _url = f"https://npfl.c.app.nintendowifi.net/p01/filelist/{boss_id}/thmdtls?c={country}&l={language}&a3={index}"
210 | _path = os.path.join(OUTPUT_PATH, region, country, language, f"thmdtls_filelist_{index}.txt")
211 | return (_url, _path, boss_id, region, country, language)
212 |
213 | def run_downloader(queue: list[tuple[str, str, str, str, str, str]], num_conn: int = 50):
214 | num_urls = len(queue)
215 | num_conn = min(num_conn, num_urls)
216 |
217 | handles: list[DownloadTask] = []
218 | lookup: dict[Curl, DownloadTask] = { }
219 | m = pycurl.CurlMulti()
220 | for _ in range(num_conn):
221 | c = DownloadTask()
222 | c.curl.setopt(pycurl.SSL_VERIFYHOST, False)
223 | c.curl.setopt(pycurl.SSL_VERIFYSTATUS, False)
224 | c.curl.setopt(pycurl.SSL_VERIFYPEER, False)
225 | c.curl.setopt(pycurl.SSLCERT, CTR_PEM_PATH)
226 | handles.append(c)
227 | lookup[c.curl] = c
228 |
229 | freelist = handles[:]
230 | num_processed = 0
231 | while num_processed < num_urls:
232 | while queue and freelist:
233 | url, fname, boss_id, region, country, language = queue.pop(0)
234 | c = freelist.pop()
235 | c.boss_id, c.region, c.country, c.language = boss_id, region, country, language;
236 | c.buffer = BytesIO()
237 | c.filename = fname
238 | c.header_buffer = BytesIO()
239 | c.curl.setopt(pycurl.URL, url)
240 | c.curl.setopt(pycurl.WRITEFUNCTION, c.buffer.write)
241 | c.curl.setopt(pycurl.HEADERFUNCTION, c.header_buffer.write)
242 | m.add_handle(c.curl)
243 | c.url = url
244 | while 1:
245 | ret, _ = m.perform()
246 | if ret != pycurl.E_CALL_MULTI_PERFORM:
247 | break
248 | while 1:
249 | num_q, ok_list, err_list = m.info_read()
250 | for y in ok_list:
251 | c = lookup[y]
252 | m.remove_handle(c.curl)
253 | if c.curl.getinfo(pycurl.RESPONSE_CODE) == 200:
254 | Path(os.path.dirname(c.filename)).mkdir(parents=True, exist_ok=True)
255 | with open(c.filename, "wb") as f:
256 | f.write(c.buffer.getvalue())
257 | with open(f"{c.filename}_headers.txt", "wb") as f:
258 | f.write(c.header_buffer.getvalue())
259 | print("200:", c.filename, c.url)
260 | c.header_buffer.close()
261 | del c.header_buffer
262 | c.buffer.close()
263 | del c.buffer
264 | freelist.append(c)
265 | for y, errno, errmsg in err_list:
266 | c = lookup[y]
267 | c.buffer.close()
268 | del c.buffer
269 | c.header_buffer.close()
270 | del c.header_buffer
271 | m.remove_handle(c.curl)
272 | sys.exit("Failed: {} {} {} {}".format(c.filename, c.url, errno, errmsg))
273 | num_processed = num_processed + len(ok_list) + len(err_list)
274 | if num_q == 0:
275 | break
276 | m.select(1.0)
277 |
278 | for c in handles:
279 | if hasattr(c, "buffer") and c.buffer is not None:
280 | c.buffer.close()
281 | del c.buffer
282 | if hasattr(c, "header_buffer") and c.header_buffer is not None:
283 | c.header_buffer.close()
284 | del c.header_buffer
285 | c.curl.close()
286 | m.close()
287 |
288 | q = []
289 |
290 | for region, boss_id in BOSS_IDS.items():
291 | for country in COUNTRIES:
292 | for language in LANGUAGES:
293 | for i in range(10):
294 | # * we'll do like 10 max because there is no way in hell there will be more than 2000 themes in any given region
295 | q.append(create_all_theme_detail_indata(region, boss_id, country, language, i))
296 | q.append(create_thmtop_indata(region, boss_id, country, language))
297 |
298 | run_downloader(q, num_conn=DL_MAX_CONN)
299 |
300 | q.clear()
301 |
302 | for region in BOSS_IDS:
303 | countries_dled = os.listdir(os.path.join(OUTPUT_PATH, region))
304 | for country, country_path in [(x, os.path.join(OUTPUT_PATH, region, x)) for x in countries_dled]:
305 | languages_dled = os.listdir(country_path)
306 | for language, language_path in [(x, os.path.join(country_path, x)) for x in languages_dled]:
307 | thmtop_file = os.path.join(language_path, "thmtop.bin")
308 | thmdtls_filelists = [ os.path.join(language_path, x) for x in os.listdir(language_path) if x.startswith("thmdtls_filelist") and not "headers" in x ]
309 |
310 | if os.path.isfile(thmtop_file):
311 | bf = BOSSFile()
312 | bf.load(thmtop_file)
313 | thmtop = ThmTopFile()
314 | thmtop.load_from_bossfile(bf)
315 |
316 | for category in thmtop.all_theme_categories:
317 | q.append(create_thmlist_indata(region, BOSS_IDS[region], country, language, str(category.category_id)))
318 |
319 | if len(thmdtls_filelists) > 0:
320 | for thmdtls_filelist in thmdtls_filelists:
321 | filelist = parse_filelist(thmdtls_filelist)
322 | for filelist_entry in filelist:
323 | # * item code 1 and item code 2 are identical in all cases i tested
324 | # * however just to be safe, if they are differing, add them separately
325 |
326 | q.append(create_single_theme_detail_indata(region, BOSS_IDS[region], country, language, filelist_entry.itemcode_1))
327 | if filelist_entry.itemcode_1 != filelist_entry.itemcode_2:
328 | q.append(create_single_theme_detail_indata(region, BOSS_IDS[region], country, language, filelist_entry.itemcode_2))
329 |
330 | run_downloader(q, num_conn=DL_MAX_CONN)
331 |
--------------------------------------------------------------------------------
/mario-sonic-sochi-2014-wiiu/archive.py:
--------------------------------------------------------------------------------
1 | '''
2 | Pretendo Network 2023
3 |
4 | This will download rankings and "best runs" from M&S Sochi 2014 (WiiU) using NEX to automate the process
5 |
6 | Use at your own risk, we are not resposible for any bans
7 |
8 | Requires Python 3 and https://github.com/Kinnay/NintendoClients
9 | '''
10 |
11 | from nintendo.nex import backend, ranking, datastore, settings
12 | from nintendo import nnas
13 | from anynet import http
14 | import anyio
15 | import os
16 | import json
17 | import gzip
18 | import base64
19 | import logging
20 | import struct
21 |
22 | logging.basicConfig(level=logging.ERROR)
23 |
24 | json_file = open('config.json')
25 | config = json.load(json_file)
26 |
27 | # * Unique device info
28 | DEVICE_ID = config["DEVICE_ID"]
29 | SERIAL_NUMBER = config["SERIAL_NUMBER"]
30 | SYSTEM_VERSION = config["SYSTEM_VERSION"]
31 | REGION_ID = config["REGION_ID"]
32 | COUNTRY_NAME = config["COUNTRY_NAME"]
33 | LANGUAGE = config["LANGUAGE"]
34 |
35 | USERNAME = config["USERNAME"] # * Nintendo Network ID username
36 | PASSWORD = config["PASSWORD"] # * Nintendo Network ID password
37 |
38 | '''
39 | Globals, set later
40 | '''
41 | nex_token = None
42 | ranking_client = None
43 | datastore_client = None
44 |
45 | TITLE_ID_US = 0x0005000010106900
46 | TITLE_VERSION_US = 0x20
47 | GAME_SERVER_ID = 0x10106900
48 | NEX_VERSION = 30413 # * 3.4.13
49 | ACCESS_KEY = "585214a5"
50 |
51 | async def main():
52 | os.makedirs("./data", exist_ok=True)
53 | os.makedirs("./data/rankings", exist_ok=True) # * Stores ranking data
54 | os.makedirs("./data/objects", exist_ok=True) # * Stores "best run" DataStore objects
55 | os.makedirs("./data/meta_binaries", exist_ok=True) # * Stores the meta binary for DataStore objects
56 |
57 | await nas_login() # * login with NNID
58 | await backend_setup() # * setup the backend NEX client and start scraping
59 |
60 | async def nas_login():
61 | global nex_token
62 |
63 | nas = nnas.NNASClient()
64 | nas.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION)
65 | nas.set_title(TITLE_ID_US, TITLE_VERSION_US)
66 | nas.set_locale(REGION_ID, COUNTRY_NAME, LANGUAGE)
67 |
68 | access_token = await nas.login(USERNAME, PASSWORD)
69 | nex_token = await nas.get_nex_token(access_token.token, GAME_SERVER_ID)
70 |
71 | async def backend_setup():
72 | global ranking_client
73 | global datastore_client
74 |
75 | s = settings.default()
76 | s.configure(ACCESS_KEY, NEX_VERSION)
77 |
78 | async with backend.connect(s, nex_token.host, nex_token.port) as be:
79 | async with be.login(str(nex_token.pid), nex_token.password) as client:
80 | ranking_client = ranking.RankingClient(client)
81 | datastore_client = datastore.DataStoreClient(client)
82 |
83 | await scrape() # * start ripping courses
84 |
85 | async def scrape():
86 | events = {
87 | 0x0A: "Alpine Skiing Downhill",
88 | 0x0B: "Ski Jumping Large Hill",
89 | 0x0C: "Freestyle Ski Cross",
90 | 0x0D: "Biathlon",
91 | 0x0E: "Snowboard Parallel Giant Slalom",
92 | 0x0F: "Snowboard Cross",
93 | 0x10: "Speed Skating 500m",
94 | 0x11: "Short Track Speed Skating 1000m",
95 | 0x12: "Skeleton",
96 | 0x13: "4-man Bobsleigh",
97 | 0x14: "Winter Sports Champion Race",
98 | 0x15: "Groove Pipe Snowboard",
99 | 0x16: "Roller Coaster Bobsleigh",
100 | 0x17: "Bullet Bill Sledge Race",
101 | }
102 |
103 | characters = {
104 | 0x04: "Daisy",
105 | 0x08: "Yoshi",
106 | 0x09: "Donkey Kong",
107 | 0x0A: "Bowser Jr.",
108 | 0x0B: "Sonic",
109 | 0x10: "Shadow",
110 | 0x12: "Metal Sonic",
111 | 0x14: "Vector",
112 | 0x15: "Mii",
113 | }
114 |
115 | countries = {
116 | 0x01: "Algeria", # * This is a guess based on the flag order in Sochi 2014
117 | 0x02: "Angola", # * This is a guess based on the flag order in Sochi 2014
118 | 0x03: "Ivory Coast", # * This is a guess based on the flag order in Sochi 2014
119 | 0x04: "Egypt", # * This is a guess based on the flag order in Sochi 2014
120 | 0x05: "Ethiopia", # * This is a guess based on the flag order in Sochi 2014
121 | 0x06: "Gambia", # * This is a guess based on the flag order in Sochi 2014
122 | 0x07: "Ghana", # * This is a guess based on the flag order in Sochi 2014
123 | 0x08: "Guinea", # * This is a guess based on the flag order in Sochi 2014
124 | 0x09: "Kenya", # * This is a guess based on the flag order in Sochi 2014
125 | 0x0A: "Morocco",
126 | 0x0B: "Nigeria",
127 | 0x0C: "South Africa", # * This is a guess based on the flag order in Sochi 2014
128 | 0x0D: "Senegal", # * This is a guess based on the flag order in Sochi 2014
129 | 0x0E: "Togo", # * This is a guess based on the flag order in Sochi 2014
130 | 0x0F: "Tunisia", # * This is a guess based on the flag order in Sochi 2014
131 | 0x10: "Argentina", # * This is a guess based on the flag order in Sochi 2014
132 | 0x11: "Bahamas", # * This is a guess based on the flag order in Sochi 2014
133 | 0x12: "Bolivia", # * This is a guess based on the flag order in Sochi 2014
134 | 0x13: "Brazil",
135 | 0x14: "Canada", # * This is a guess based on the flag order in Sochi 2014
136 | 0x15: "Chile",
137 | 0x16: "Colombia",
138 | 0x17: "Costa Rica", # * This is a guess based on the flag order in Sochi 2014
139 | 0x18: "Cuba", # * This is a guess based on the flag order in Sochi 2014
140 | 0x19: "Ecuador", # * This is a guess based on the flag order in Sochi 2014
141 | 0x1A: "Honduras", # * This is a guess based on the flag order in Sochi 2014
142 | 0x1B: "Jamaica", # * This is a guess based on the flag order in Sochi 2014
143 | 0x1C: "Mexico",
144 | 0x1D: "Paraguay", # * This is a guess based on the flag order in Sochi 2014
145 | 0x1E: "Peru", # * This is a guess based on the flag order in Sochi 2014
146 | 0x1F: "Trinidad", # * This is a guess based on the flag order in Sochi 2014
147 | 0x20: "Uruguay", # * This is a guess based on the flag order in Sochi 2014
148 | 0x21: "USA",
149 | 0x22: "China", # * This is a guess based on the flag order in Sochi 2014
150 | 0x23: "Hong Kong", # * This is a guess based on the flag order in Sochi 2014
151 | 0x24: "Indonesia", # * This is a guess based on the flag order in Sochi 2014
152 | 0x25: "India", # * This is a guess based on the flag order in Sochi 2014
153 | 0x26: "Iran", # * This is a guess based on the flag order in Sochi 2014
154 | 0x27: "Japan",
155 | 0x28: "Korea", # * This is a guess based on the flag order in Sochi 2014
156 | 0x29: "Saudi Arabia", # * This is a guess based on the flag order in Sochi 2014
157 | 0x2A: "Malaysia", # * This is a guess based on the flag order in Sochi 2014
158 | 0x2B: "Pakistan", # * This is a guess based on the flag order in Sochi 2014
159 | 0x2C: "Philippines", # * This is a guess based on the flag order in Sochi 2014
160 | 0x2D: "Singapore", # * This is a guess based on the flag order in Sochi 2014
161 | 0x2E: "Thailand", # * This is a guess based on the flag order in Sochi 2014
162 | 0x2F: "United Arab Emirates",
163 | 0x30: "Uzbekistan", # * This is a guess based on the flag order in Sochi 2014
164 | 0x31: "Austria", # * This is a guess based on the flag order in Sochi 2014
165 | 0x32: "Belgium", # * This is a guess based on the flag order in Sochi 2014
166 | 0x33: "Bulgaria", # * This is a guess based on the flag order in Sochi 2014
167 | 0x34: "Croatia", # * This is a guess based on the flag order in Sochi 2014
168 | 0x35: "Czechia", # * This is a guess based on the flag order in Sochi 2014
169 | 0x36: "Denmark", # * This is a guess based on the flag order in Sochi 2014
170 | 0x37: "Spain", # * This is a guess based on the flag order in Sochi 2014
171 | 0x38: "Finland", # * This is a guess based on the flag order in Sochi 2014
172 | 0x39: "France",
173 | 0x3A: "Great Britain",
174 | 0x3B: "Germany",
175 | 0x3C: "Greece",
176 | 0x3D: "Hungary", # * This is a guess based on the flag order in Sochi 2014
177 | 0x3E: "Ireland", # * This is a guess based on the flag order in Sochi 2014
178 | 0x3F: "Israel", # * This is a guess based on the flag order in Sochi 2014
179 | 0x40: "Italy",
180 | 0x41: "Netherlands",
181 | 0x42: "Norway", # * This is a guess based on the flag order in Sochi 2014
182 | 0x43: "Poland", # * This is a guess based on the flag order in Sochi 2014
183 | 0x44: "Portugal", # * This is a guess based on the flag order in Sochi 2014
184 | 0x45: "Romania", # * This is a guess based on the flag order in Sochi 2014
185 | 0x46: "Russia", # * This is a guess based on the flag order in Sochi 2014
186 | 0x47: "Slovenia", # * This is a guess based on the flag order in Sochi 2014
187 | 0x48: "Switzerland", # * This is a guess based on the flag order in Sochi 2014
188 | 0x49: "Slovakia", # * This is a guess based on the flag order in Sochi 2014
189 | 0x4A: "Sweden", # * This is a guess based on the flag order in Sochi 2014
190 | 0x4B: "Turkey", # * This is a guess based on the flag order in Sochi 2014
191 | 0x4C: "Ukraine", # * This is a guess based on the flag order in Sochi 2014
192 | 0x4D: "Australia",
193 | 0x4E: "Fiji", # * This is a guess based on the flag order in Sochi 2014
194 | 0x4F: "New Zealand", # * This is a guess based on the flag order in Sochi 2014
195 | }
196 |
197 | categories = [
198 | 0x0A, # * Alpine Skiing Downhill
199 | 0x0B, # * Ski Jumping Large Hill
200 | 0x0C, # * Freestyle Ski Cross
201 | 0x0D, # * Biathlon
202 | 0x0E, # * Snowboard Parallel Giant Slalom
203 | 0x0F, # * Snowboard Cross
204 | 0x10, # * Speed Skating 500m
205 | 0x11, # * Short Track Speed Skating 1000m
206 | 0x12, # * Skeleton
207 | 0x13, # * 4-man Bobsleigh
208 | 0x14, # * Winter Sports Champion Race
209 | 0x15, # * Groove Pipe Snowboard
210 | 0x16, # * Roller Coaster Bobsleigh
211 | 0x17, # * Bullet Bill Sledge Race
212 | ]
213 |
214 | for category in categories:
215 | '''
216 | Make 1 request to get the total number of entries first.
217 | Using mode 0 to get the latest results
218 | '''
219 | mode = 0
220 | order_param = ranking.RankingOrderParam()
221 | unique_id = 0
222 | principal_id = 0
223 |
224 | order_param.offset = 0
225 | order_param.count = 1
226 |
227 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
228 |
229 | offset = 0
230 | total = result.total
231 | remaining = result.total
232 |
233 | leaderboard = []
234 | seen_rankings = []
235 |
236 | principal_id = result.data[0].pid
237 |
238 | while remaining > 0:
239 | print("{0} on offset {1}. {2}/{3} remaining".format(events[category], offset, remaining, total))
240 |
241 | '''
242 | Using mode 1 as a hack to get around the 1000 offset limit.
243 | Mode 1 selects entries around "your" entry, but the server
244 | does not verify if the currently logged in user is the same
245 | as the user being used in this mode. Thus we can pretend to
246 | be the last user and continue past the offset limit
247 | '''
248 | mode = 1
249 | order_param = ranking.RankingOrderParam()
250 | unique_id = 0
251 |
252 | order_param.offset = 0
253 | order_param.count = 0xFF # * Max we can do in one go
254 | order_param.order_calc = 1 # * Ordinal (1234) rankings. Prevents duplicate ranking positions (no ties)
255 |
256 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
257 | rankings = result.data
258 |
259 | for user in rankings:
260 | ranking_entry = {
261 | "pid": user.pid,
262 | "unique_id": user.unique_id,
263 | "rank": user.rank,
264 | "category": user.category,
265 | "score": user.score,
266 | "groups": user.groups,
267 | "param": user.param,
268 | "common_data": base64.b64encode(user.common_data).decode("utf-8")
269 | }
270 |
271 | if ranking_entry in seen_rankings:
272 | # * Ignore duplicates
273 | continue
274 |
275 | [completed_country, completed_character] = user.groups
276 |
277 | common_data = user.common_data
278 |
279 | name_block = common_data[0x0:0x18]
280 | bpfc = common_data[0x18:] # * Contains a header, Mii data, and a footer?
281 | mii_data = bpfc[0x18:0x78] # * For easier access
282 |
283 | name = name_block.split(b'\x00\x00')[0].decode("utf-16be", "replace")
284 |
285 | param = datastore.DataStoreGetMetaParam()
286 | param.persistence_target.owner_id = user.pid
287 | param.persistence_target.persistence_id = 14
288 | param.result_option = 4
289 |
290 | result = await datastore_client.get_meta(param)
291 |
292 | if len(result.meta_binary) != 0:
293 | await write_to_file("./data/meta_binaries/{0}.bin.gz".format(result.data_id), result.meta_binary)
294 |
295 | user_data = {
296 | "event": category,
297 | "name": name,
298 | "pid": user.pid,
299 | "score": user.score,
300 | "place": user.rank,
301 | "mii_data": base64.b64encode(mii_data).decode("utf-8"),
302 | "meta_binary": {
303 | "id": result.data_id,
304 | "created": result.create_time.standard_datetime().isoformat(),
305 | "updated": result.update_time.standard_datetime().isoformat(),
306 | },
307 | "completed_country": {
308 | "id": completed_country,
309 | "name": countries.get(completed_country)
310 | },
311 | "completed_character": {
312 | "id": completed_character,
313 | "name": characters.get(completed_character, "Unknown")
314 | },
315 | "bpfc_data": base64.b64encode(mii_data).decode("utf-8"),
316 | "best_run": {
317 | "id": user.param,
318 | "created": "",
319 | "updated": "",
320 | },
321 | "ranking_raw": ranking_entry
322 | }
323 |
324 | '''
325 | Entry has a "best run" object in DataStore
326 | '''
327 | if user.param != 0:
328 | param = datastore.DataStoreGetMetaParam()
329 | param.data_id = user.param
330 |
331 | result = await datastore_client.get_meta(param)
332 |
333 | user_data["best_run"]["created"] = result.create_time.standard_datetime().isoformat();
334 | user_data["best_run"]["updated"] = result.update_time.standard_datetime().isoformat();
335 |
336 | if len(result.meta_binary) != 0:
337 | await write_to_file("./data/meta_binaries/{0}.bin.gz".format(result.data_id), result.meta_binary)
338 |
339 | param = datastore.DataStorePrepareGetParam()
340 | param.data_id = user.param
341 |
342 | result = await datastore_client.prepare_get_object(param)
343 |
344 | headers = {header.key: header.value for header in result.headers}
345 | url = result.url
346 |
347 | response = await http.get(url, headers=headers)
348 |
349 | await write_to_file("./data/objects/{0}.bin.gz".format(user.param), response.body)
350 |
351 | leaderboard.append(user_data)
352 | principal_id = user.pid
353 | offset += 1
354 | remaining -= 1
355 | seen_rankings.append(ranking_entry)
356 |
357 | print("Writing ./data/rankings/{0}.json.gz".format(category))
358 | leaderboard_data = json.dumps(leaderboard)
359 | await write_to_file("./data/rankings/{0}.json.gz".format(category), leaderboard_data.encode("utf-8"))
360 |
361 | async def write_to_file(path, data):
362 | with gzip.open(path, "w", compresslevel=9) as f:
363 | f.write(data)
364 |
365 | anyio.run(main)
366 |
--------------------------------------------------------------------------------
/super-mario-maker/archive.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import gzip
4 | import anyio
5 | from dotenv import load_dotenv
6 | from nintendo.nex import common, rmc, backend, datastore_smm, settings, streams
7 | from anynet import http
8 |
9 | load_dotenv()
10 |
11 | """
12 | Beginning of everything not implemented in NintendoClients
13 | """
14 |
15 | class DataStoreGetCustomRankingByDataIdParam(common.Structure):
16 | def __init__(self):
17 | super().__init__()
18 | self.application_id = None
19 | self.data_id_list = None
20 | self.result_option = None
21 |
22 | def load(self, stream: streams.StreamIn, version: int):
23 | self.application_id = stream.u32()
24 | self.data_id_list = stream.list(stream.u64)
25 | self.result_option = stream.u8()
26 |
27 | def save(self, stream: streams.StreamIn, version: int):
28 | stream.u32(self.application_id)
29 | stream.list(self.data_id_list, stream.u64)
30 | stream.u8(self.result_option)
31 |
32 | class DataStoreCustomRankingResult(common.Structure):
33 | def __init__(self):
34 | super().__init__()
35 | self.order = None
36 | self.score = None
37 | self.meta_info = None
38 |
39 | def load(self, stream: streams.StreamIn, version: int):
40 | self.order = stream.u32()
41 | self.score = stream.u32()
42 | self.meta_info = stream.extract(datastore_smm.DataStoreMetaInfo)
43 |
44 | def save(self, stream: streams.StreamIn, version: int):
45 | stream.u32(self.order)
46 | stream.u32(self.score)
47 | stream.add(self.meta_info)
48 |
49 | class BufferQueueParam(common.Structure):
50 | def __init__(self):
51 | super().__init__()
52 | self.data_id = None
53 | self.slot = None
54 |
55 | def load(self, stream: streams.StreamIn, version: int):
56 | self.data_id = stream.u64()
57 | self.slot = stream.u32()
58 |
59 | def save(self, stream: streams.StreamIn, version: int):
60 | stream.u64(self.data_id)
61 | stream.u32(self.slot)
62 |
63 | class DataStoreGetCourseRecordParam(common.Structure):
64 | def __init__(self):
65 | super().__init__()
66 | self.data_id = None
67 | self.slot = None
68 |
69 | def load(self, stream: streams.StreamIn, version: int):
70 | self.data_id = stream.u64()
71 | self.slot = stream.u8()
72 |
73 | def save(self, stream: streams.StreamIn, version: int):
74 | stream.u64(self.data_id)
75 | stream.u8(self.slot)
76 |
77 | class DataStoreGetCourseRecordResult(common.Structure):
78 | def __init__(self):
79 | super().__init__()
80 | self.data_id = None
81 | self.slot = None
82 | self.first_pid = None
83 | self.best_pid = None
84 | self.best_score = None
85 | self.created_time = None
86 | self.updated_time = None
87 |
88 | def load(self, stream: streams.StreamIn, version: int):
89 | self.data_id = stream.u64()
90 | self.slot = stream.u8()
91 | self.first_pid = stream.u32()
92 | self.best_pid = stream.u32()
93 | self.best_score = stream.s32()
94 | self.created_time = stream.datetime()
95 | self.updated_time = stream.datetime()
96 |
97 | def save(self, stream: streams.StreamIn, version: int):
98 | stream.u64(self.data_id)
99 | stream.u8(self.slot)
100 | stream.u32(self.first_pid)
101 | stream.u32(self.best_pid)
102 | stream.s32(self.best_score)
103 | stream.datetime(self.created_time)
104 | stream.datetime(self.updated_time)
105 |
106 | async def get_custom_ranking_by_data_id(param: DataStoreGetCustomRankingByDataIdParam) -> rmc.RMCResponse:
107 | # * --- request ---
108 | stream = streams.StreamOut(datastore_smm_client.settings)
109 | stream.add(param)
110 | data = await datastore_smm_client.client.request(datastore_smm_client.PROTOCOL_ID, 50, stream.get())
111 |
112 | # * --- response ---
113 | stream = streams.StreamIn(data, datastore_smm_client.settings)
114 |
115 | obj = rmc.RMCResponse()
116 | obj.ranking_result = stream.list(DataStoreCustomRankingResult)
117 | obj.results = stream.list(common.Result)
118 |
119 | return obj
120 |
121 | async def get_buffer_queue(param: BufferQueueParam) -> list[bytes]:
122 | # * --- request ---
123 | stream = streams.StreamOut(datastore_smm_client.settings)
124 | stream.add(param)
125 | data = await datastore_smm_client.client.request(datastore_smm_client.PROTOCOL_ID, 54, stream.get())
126 |
127 | # * --- response ---
128 | stream = streams.StreamIn(data, datastore_smm_client.settings)
129 |
130 | result = stream.list(stream.qbuffer)
131 |
132 | return result
133 |
134 | async def get_course_record(param: DataStoreGetCourseRecordParam) -> DataStoreGetCourseRecordResult:
135 | # * --- request ---
136 | stream = streams.StreamOut(datastore_smm_client.settings)
137 | stream.add(param)
138 | data = await datastore_smm_client.client.request(datastore_smm_client.PROTOCOL_ID, 72, stream.get())
139 |
140 | # * --- response ---
141 | stream = streams.StreamIn(data, datastore_smm_client.settings)
142 |
143 | result = stream.extract(DataStoreGetCourseRecordResult)
144 |
145 | return result
146 |
147 | """
148 | End of everything not implemented in NintendoClients
149 | """
150 |
151 | # * Dump using https://github.com/Stary2001/nex-dissector/tree/master/get_3ds_pid_password or from network dumps
152 | NEX_USERNAME = os.getenv('NEX_USERNAME')
153 | NEX_PASSWORD = os.getenv('NEX_PASSWORD')
154 | datastore_smm_client = None # * Gets set later
155 |
156 | KNOWN_BUFFER_QUEUE_SLOTS = [ 0, 2, 3 ]
157 |
158 | # * These apply to all objects at all times
159 | # * SMM has time-specific application IDs but these don't matter here
160 | KNOWN_CUSTOM_RANKING_APPLICATION_IDS = [
161 | 0,
162 | 2400,
163 | 3600,
164 | 200000000,
165 | 200002400,
166 | 200003600,
167 | 300000000,
168 | 300002400,
169 | 300003600,
170 | ]
171 |
172 | KNOWN_COURSE_RECORD_SLOTS = [ 0 ]
173 |
174 | if os.path.isfile('last-checked-timestamp.txt') and os.access('last-checked-timestamp.txt', os.R_OK):
175 | last_checked_timestamp_file = open('last-checked-timestamp.txt', 'r+')
176 | last_checked_timestamp = int(last_checked_timestamp_file.read())
177 | else:
178 | last_checked_timestamp_file = open('last-checked-timestamp.txt', 'x+')
179 | last_checked_timestamp_file.write("135271087238")
180 | last_checked_timestamp = 135271087238 # * 4-11-2015 15:50:06, date of first objects upload
181 |
182 | os.makedirs('./objects', exist_ok=True)
183 | os.makedirs('./metadata', exist_ok=True)
184 | os.makedirs('./custom-rankings', exist_ok=True)
185 | os.makedirs('./buffer-queues', exist_ok=True)
186 | os.makedirs('./course-records', exist_ok=True)
187 |
188 | def should_download_object(data_id: int, expected_object_size: int, expected_object_version: int) -> bool:
189 | object_path = './objects/%d_v%d.bin' % (data_id, expected_object_version)
190 | metadata_path = './metadata/%d_v%d.json.gz' % (data_id, expected_object_version)
191 | custom_rankings_path = './custom-rankings/%d_v%d.json.gz' % (data_id, expected_object_version)
192 | buffer_queues_path = './buffer-queues/%d_v%d.json.gz' % (data_id, expected_object_version)
193 | course_records_path = './course-records/%d_v%d.json.gz' % (data_id, expected_object_version)
194 |
195 | if not os.path.exists(object_path):
196 | return True
197 |
198 | if not os.path.exists(metadata_path):
199 | return True
200 |
201 | if not os.path.exists(custom_rankings_path):
202 | return True
203 |
204 | if not os.path.exists(buffer_queues_path):
205 | return True
206 |
207 | if not os.path.exists(course_records_path):
208 | return True
209 |
210 | if os.path.getsize(object_path) != expected_object_size:
211 | return True
212 |
213 | return False # * If nothing bails early, assume the object does not need to be redownloaded
214 |
215 | async def download_object_buffer_queues(buffer_queues: list[dict], data_id: int, slot: int):
216 | try:
217 | param = BufferQueueParam()
218 | param.data_id = data_id
219 | param.slot = slot
220 |
221 | response = await get_buffer_queue(param)
222 |
223 | buffer_queues.append({
224 | "slot": slot,
225 | "buffers": [buffer.hex() for buffer in response]
226 | })
227 | except:
228 | # * Eat errors
229 | # * SMM will throw errors if an object has no buffers in the slot
230 | return
231 |
232 | async def download_object_custom_ranking(custom_rankings: list[dict], data_id: int, application_id: int):
233 | try:
234 | param = DataStoreGetCustomRankingByDataIdParam()
235 | param.application_id = application_id
236 | param.data_id_list = [data_id]
237 | param.result_option = 0
238 |
239 | response = await get_custom_ranking_by_data_id(param)
240 |
241 | custom_rankings.append({
242 | "application_id": application_id,
243 | "score": response.ranking_result[0].score
244 | })
245 | except:
246 | # * Eat errors
247 | # * SMM will throw errors if an object has no ranking in the application ID
248 | return
249 |
250 | async def download_course_record(course_records: list[dict], data_id: int, slot: int):
251 | # * This is expected to fail OFTEN
252 | # * Only course objects have records
253 | try:
254 | param = DataStoreGetCourseRecordParam()
255 | param.data_id = data_id
256 | param.slot = slot
257 |
258 | response = await get_course_record(param)
259 |
260 | course_records.append({
261 | "slot": response.slot,
262 | "first_pid": response.first_pid,
263 | "best_pid": response.best_pid,
264 | "best_score": response.best_score,
265 | "created_time": {
266 | 'original_value': response.created_time.value(),
267 | 'standard': response.created_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
268 | },
269 | "updated_time": {
270 | 'original_value': response.updated_time.value(),
271 | 'standard': response.updated_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
272 | }
273 | })
274 | except:
275 | # * Eat errors
276 | # * SMM will throw errors if an object has no record in the slot
277 | return
278 |
279 | async def write_compressed_json(path: str, data: dict):
280 | with gzip.open(path, 'wb', compresslevel=6) as metadata_file:
281 | metadata_file.write(json.dumps(data).encode('utf-8'))
282 |
283 | async def process_datastore_object(obj: datastore_smm.DataStoreMetaInfo):
284 | param = datastore_smm.DataStorePrepareGetParam()
285 | param.data_id = obj.data_id
286 |
287 | get_object_response = await datastore_smm_client.prepare_get_object(param)
288 |
289 | s3_headers = {header.key: header.value for header in get_object_response.headers}
290 | s3_url = get_object_response.url
291 | data_id = get_object_response.data_id
292 | object_version = int(s3_url.split('/')[-1].split('-')[1].split('?')[0])
293 |
294 | if not should_download_object(data_id, get_object_response.size, object_version):
295 | # * Object data already downloaded
296 | print("Skipping %d" % data_id)
297 | return
298 |
299 | buffer_queues = []
300 |
301 | async with anyio.create_task_group() as tg:
302 | for slot in KNOWN_BUFFER_QUEUE_SLOTS:
303 | tg.start_soon(download_object_buffer_queues, buffer_queues, data_id, slot)
304 |
305 | custom_rankings = []
306 |
307 | async with anyio.create_task_group() as tg:
308 | for application_id in KNOWN_CUSTOM_RANKING_APPLICATION_IDS:
309 | tg.start_soon(download_object_custom_ranking, custom_rankings, data_id, application_id)
310 |
311 | course_records = []
312 |
313 | async with anyio.create_task_group() as tg:
314 | for slot in KNOWN_COURSE_RECORD_SLOTS:
315 | tg.start_soon(download_course_record, course_records, data_id, slot)
316 |
317 | s3_response = await http.get(s3_url, headers=s3_headers)
318 |
319 | object_file = open('./objects/%d_v%d.bin' % (data_id, object_version), 'wb')
320 | object_file.write(s3_response.body)
321 | object_file.close()
322 |
323 | metadata = {
324 | 'data_id': obj.data_id,
325 | 'owner_id': obj.owner_id,
326 | 'size': obj.size,
327 | 'name': obj.name,
328 | 'data_type': obj.data_type,
329 | 'meta_binary': obj.meta_binary.hex(),
330 | 'permission': {
331 | 'permission': obj.permission.permission,
332 | 'recipients': obj.permission.recipients
333 | },
334 | 'delete_permission': {
335 | 'permission': obj.delete_permission.permission,
336 | 'recipients': obj.delete_permission.recipients
337 | },
338 | 'create_time': {
339 | 'original_value': obj.create_time.value(),
340 | 'standard': obj.create_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
341 | },
342 | 'update_time': {
343 | 'original_value': obj.update_time.value(),
344 | 'standard': obj.update_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
345 | },
346 | 'period': obj.period,
347 | 'status': obj.status,
348 | 'referred_count': obj.referred_count,
349 | 'refer_data_id': obj.refer_data_id,
350 | 'flag': obj.flag,
351 | 'referred_time': {
352 | 'original_value': obj.referred_time.value(),
353 | 'standard': obj.referred_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
354 | },
355 | 'expire_time': {
356 | 'original_value': obj.expire_time.value(),
357 | 'standard': obj.expire_time.standard_datetime().strftime("%Y-%m-%d %H:%M:%S")
358 | },
359 | 'tags': obj.tags,
360 | 'ratings': [
361 | {
362 | 'slot': rating.slot,
363 | 'info': {
364 | 'total_value': rating.info.total_value,
365 | 'count': rating.info.count,
366 | 'initial_value': rating.info.initial_value
367 | }
368 | }
369 | for rating in obj.ratings
370 | ]
371 | }
372 |
373 | files = [
374 | ('./metadata/%d_v%d.json.gz' % (data_id, object_version), metadata),
375 | ('./custom-rankings/%d_v%d.json.gz' % (data_id, object_version), custom_rankings),
376 | ('./buffer-queues/%d_v%d.json.gz' % (data_id, object_version), buffer_queues),
377 | ('./course-records/%d_v%d.json.gz' % (data_id, object_version), course_records),
378 | ]
379 |
380 | # * Write all files at once
381 | async with anyio.create_task_group() as tg:
382 | for f in files:
383 | path, data = f
384 | tg.start_soon(write_compressed_json, path, data)
385 |
386 | async def main():
387 | s = settings.default()
388 | s.configure("9f2b4678", 30810)
389 |
390 | async with backend.connect(s, "52.40.192.64", "59900") as be: # * Skip NNID API
391 | async with be.login(NEX_USERNAME, NEX_PASSWORD) as client:
392 | global datastore_smm_client
393 | datastore_smm_client = datastore_smm.DataStoreClientSMM(client)
394 |
395 | current_timestamp = last_checked_timestamp
396 | twelve_hours = 43200 # * Grab objects in 12 hour chunks
397 | max_timestamp = common.DateTime.make(2024, 4, 1).value() # * Stop searching after April 1st, 2024 (official shut down)
398 | keep_searching = True
399 |
400 | while keep_searching:
401 | start_datetime = common.DateTime(current_timestamp)
402 | end_datetime = common.DateTime.fromtimestamp(common.DateTime(current_timestamp).timestamp() + twelve_hours)
403 |
404 | print("Downloading next 100 objects between %s to %s" % (start_datetime, end_datetime))
405 |
406 | param = datastore_smm.DataStoreSearchParam()
407 | param.created_after = start_datetime
408 | param.created_before = end_datetime
409 | param.result_range.size = 100 # * Throws DataStore::InvalidArgument for anything higher than 100
410 | param.result_option = 0xFF
411 |
412 | search_object_response = await datastore_smm_client.search_object(param)
413 | objects = search_object_response.result
414 |
415 | print("Found %d objects" % len(objects))
416 |
417 | # * Process all objects at once
418 | async with anyio.create_task_group() as tg:
419 | for obj in objects:
420 | tg.start_soon(process_datastore_object, obj)
421 |
422 | last_checked_timestamp_file.seek(0)
423 | last_checked_timestamp_file.write(str(current_timestamp))
424 |
425 | last_object_upload_timestamp = objects[-1].create_time.value()
426 |
427 | if last_object_upload_timestamp >= max_timestamp:
428 | print("Max timestamp reached. Stop searching")
429 | keep_searching = False
430 | elif len(objects) > 0:
431 | print("More objects may be available, trying new offset!")
432 | # * Set new timestamp to the upload date of the last
433 | # * returned object, so we don't skip any
434 | current_timestamp = last_object_upload_timestamp
435 | else:
436 | print("No more objects available!")
437 | keep_searching = False
438 |
439 | anyio.run(main)
--------------------------------------------------------------------------------
/mario-sonic-rio-2016-wiiu/archive.py:
--------------------------------------------------------------------------------
1 | '''
2 | Pretendo Network 2023
3 |
4 | This will download rankings from M&S Rio 2016 (WiiU) using NEX to automate the process
5 |
6 | Use at your own risk, we are not resposible for any bans
7 |
8 | Requires Python 3 and https://github.com/Kinnay/NintendoClients
9 | '''
10 |
11 | from nintendo.nex import backend, ranking, settings
12 | from nintendo import nnas
13 | import anyio
14 | import os
15 | import json
16 | import gzip
17 | import base64
18 | import logging
19 | import struct
20 |
21 | logging.basicConfig(level=logging.ERROR)
22 |
23 | json_file = open('config.json')
24 | config = json.load(json_file)
25 |
26 | # * Unique device info
27 | DEVICE_ID = config["DEVICE_ID"]
28 | SERIAL_NUMBER = config["SERIAL_NUMBER"]
29 | SYSTEM_VERSION = config["SYSTEM_VERSION"]
30 | REGION_ID = config["REGION_ID"]
31 | COUNTRY_NAME = config["COUNTRY_NAME"]
32 | LANGUAGE = config["LANGUAGE"]
33 |
34 | USERNAME = config["USERNAME"] # * Nintendo Network ID username
35 | PASSWORD = config["PASSWORD"] # * Nintendo Network ID password
36 |
37 | '''
38 | Globals, set later
39 | '''
40 | nex_token = None
41 | ranking_client = None
42 |
43 | TITLE_ID_US = 0x00050000101E5300
44 | TITLE_VERSION_US = 0x10
45 | GAME_SERVER_ID = 0x10190300
46 | NEX_VERSION = 30901 # * 3.9.1
47 | ACCESS_KEY = "63fecb0f"
48 |
49 | '''
50 | NintendoClients does not implement this properly
51 | '''
52 | def new_RankingRankData_load(self, stream, version):
53 | self.pid = stream.pid()
54 | self.unique_id = stream.u64()
55 | self.rank = stream.u32()
56 | self.category = stream.u32()
57 | self.score = stream.u32()
58 | self.groups = stream.list(stream.u8)
59 | self.param = stream.u64()
60 | self.common_data = stream.buffer()
61 | if version >= 1:
62 | self.update_time = stream.datetime()
63 |
64 | '''
65 | Gets rid of the "unexpected version" warning
66 | '''
67 | def new_RankingRankData_max_version(self, settings):
68 | return 1
69 |
70 | ranking.RankingRankData.load = new_RankingRankData_load
71 | ranking.RankingRankData.max_version = new_RankingRankData_max_version
72 |
73 | async def main():
74 | os.makedirs("./data", exist_ok=True)
75 |
76 | await nas_login() # * login with NNID
77 | await backend_setup() # * setup the backend NEX client and start scraping
78 |
79 | async def nas_login():
80 | global nex_token
81 |
82 | nas = nnas.NNASClient()
83 | nas.set_device(DEVICE_ID, SERIAL_NUMBER, SYSTEM_VERSION)
84 | nas.set_title(TITLE_ID_US, TITLE_VERSION_US)
85 | nas.set_locale(REGION_ID, COUNTRY_NAME, LANGUAGE)
86 |
87 | access_token = await nas.login(USERNAME, PASSWORD)
88 | nex_token = await nas.get_nex_token(access_token.token, GAME_SERVER_ID)
89 |
90 | async def backend_setup():
91 | global ranking_client
92 |
93 | s = settings.default()
94 | s.configure(ACCESS_KEY, NEX_VERSION)
95 |
96 | async with backend.connect(s, nex_token.host, nex_token.port) as be:
97 | async with be.login(str(nex_token.pid), nex_token.password) as client:
98 | ranking_client = ranking.RankingClient(client)
99 |
100 | await scrape() # * start ripping courses
101 |
102 | async def scrape():
103 | events = {
104 | 0x01: "BMX",
105 | 0x02: "Unknown",
106 | 0x03: "Unknown",
107 | 0x04: "Unknown",
108 | 0x05: "100m",
109 | 0x06: "Rhythmic Gynmastics",
110 | 0x07: "Boxing",
111 | 0x08: "Unknown",
112 | 0x09: "4 x 100m Relay",
113 | 0x0A: "Javelin Throw",
114 | 0x0B: "Triple Jump",
115 | 0x0C: "Swimming",
116 | 0x0D: "Equestrian",
117 | 0x0E: "Archery",
118 | 0x0F: "Unknown",
119 | 0x10: "Unknown",
120 | 0x11: "Unknown",
121 | }
122 |
123 | characters = {
124 | 0x00: "Mario",
125 | 0x01: "Unknown",
126 | 0x02: "Peach",
127 | 0x03: "Daisy",
128 | 0x04: "Bowser",
129 | 0x05: "Unknown",
130 | 0x06: "Unknown",
131 | 0x07: "Yoshi",
132 | 0x08: "DK",
133 | 0x09: "Bowser Jr.",
134 | 0x0A: "Sonic",
135 | 0x0B: "Tails",
136 | 0x0C: "Knuckles",
137 | 0x0D: "Amy",
138 | 0x0E: "Unknown",
139 | 0x0F: "Shadow",
140 | 0x10: "Silver",
141 | 0x11: "Unknown",
142 | 0x12: "Blaze",
143 | 0x13: "Vector",
144 | 0x14: "Mii",
145 | 0x15: "Unknown",
146 | 0x17: "Rosalina",
147 | 0x18: "Unknown",
148 | 0x1A: "Unknown",
149 | 0x1B: "Unknown",
150 | 0x1C: "Unknown",
151 | 0x1D: "Wave",
152 | 0x1F: "Unknown",
153 | 0x20: "Unknown",
154 | 0x22: "Unknown",
155 | }
156 |
157 | countries = {
158 | 0x01: "Algeria", # * This is a guess based on the flag order in Sochi 2014
159 | 0x02: "Angola", # * This is a guess based on the flag order in Sochi 2014
160 | 0x03: "Ivory Coast", # * This is a guess based on the flag order in Sochi 2014
161 | 0x04: "Egypt", # * This is a guess based on the flag order in Sochi 2014
162 | 0x05: "Ethiopia", # * This is a guess based on the flag order in Sochi 2014
163 | 0x06: "Gambia", # * This is a guess based on the flag order in Sochi 2014
164 | 0x07: "Ghana", # * This is a guess based on the flag order in Sochi 2014
165 | 0x08: "Guinea", # * This is a guess based on the flag order in Sochi 2014
166 | 0x09: "Kenya", # * This is a guess based on the flag order in Sochi 2014
167 | 0x0A: "Morocco",
168 | 0x0B: "Nigeria",
169 | 0x0C: "South Africa", # * This is a guess based on the flag order in Sochi 2014
170 | 0x0D: "Senegal", # * This is a guess based on the flag order in Sochi 2014
171 | 0x0E: "Togo", # * This is a guess based on the flag order in Sochi 2014
172 | 0x0F: "Tunisia", # * This is a guess based on the flag order in Sochi 2014
173 | 0x10: "Argentina", # * This is a guess based on the flag order in Sochi 2014
174 | 0x11: "Bahamas", # * This is a guess based on the flag order in Sochi 2014
175 | 0x12: "Bolivia", # * This is a guess based on the flag order in Sochi 2014
176 | 0x13: "Brazil",
177 | 0x14: "Canada", # * This is a guess based on the flag order in Sochi 2014
178 | 0x15: "Chile",
179 | 0x16: "Colombia",
180 | 0x17: "Costa Rica", # * This is a guess based on the flag order in Sochi 2014
181 | 0x18: "Cuba", # * This is a guess based on the flag order in Sochi 2014
182 | 0x19: "Ecuador", # * This is a guess based on the flag order in Sochi 2014
183 | 0x1A: "Honduras", # * This is a guess based on the flag order in Sochi 2014
184 | 0x1B: "Jamaica", # * This is a guess based on the flag order in Sochi 2014
185 | 0x1C: "Mexico",
186 | 0x1D: "Paraguay", # * This is a guess based on the flag order in Sochi 2014
187 | 0x1E: "Peru", # * This is a guess based on the flag order in Sochi 2014
188 | 0x1F: "Trinidad", # * This is a guess based on the flag order in Sochi 2014
189 | 0x20: "Uruguay", # * This is a guess based on the flag order in Sochi 2014
190 | 0x21: "USA",
191 | 0x22: "China", # * This is a guess based on the flag order in Sochi 2014
192 | 0x23: "Hong Kong", # * This is a guess based on the flag order in Sochi 2014
193 | 0x24: "Indonesia", # * This is a guess based on the flag order in Sochi 2014
194 | 0x25: "India", # * This is a guess based on the flag order in Sochi 2014
195 | 0x26: "Iran", # * This is a guess based on the flag order in Sochi 2014
196 | 0x27: "Japan",
197 | 0x28: "Korea", # * This is a guess based on the flag order in Sochi 2014
198 | 0x29: "Saudi Arabia", # * This is a guess based on the flag order in Sochi 2014
199 | 0x2A: "Malaysia", # * This is a guess based on the flag order in Sochi 2014
200 | 0x2B: "Pakistan", # * This is a guess based on the flag order in Sochi 2014
201 | 0x2C: "Philippines", # * This is a guess based on the flag order in Sochi 2014
202 | 0x2D: "Singapore", # * This is a guess based on the flag order in Sochi 2014
203 | 0x2E: "Thailand", # * This is a guess based on the flag order in Sochi 2014
204 | 0x2F: "United Arab Emirates",
205 | 0x30: "Uzbekistan", # * This is a guess based on the flag order in Sochi 2014
206 | 0x31: "Austria", # * This is a guess based on the flag order in Sochi 2014
207 | 0x32: "Belgium", # * This is a guess based on the flag order in Sochi 2014
208 | 0x33: "Bulgaria", # * This is a guess based on the flag order in Sochi 2014
209 | 0x34: "Croatia", # * This is a guess based on the flag order in Sochi 2014
210 | 0x35: "Czechia", # * This is a guess based on the flag order in Sochi 2014
211 | 0x36: "Denmark", # * This is a guess based on the flag order in Sochi 2014
212 | 0x37: "Spain", # * This is a guess based on the flag order in Sochi 2014
213 | 0x38: "Finland", # * This is a guess based on the flag order in Sochi 2014
214 | 0x39: "France",
215 | 0x3A: "Great Britain",
216 | 0x3B: "Germany",
217 | 0x3C: "Greece",
218 | 0x3D: "Hungary", # * This is a guess based on the flag order in Sochi 2014
219 | 0x3E: "Ireland", # * This is a guess based on the flag order in Sochi 2014
220 | 0x3F: "Israel", # * This is a guess based on the flag order in Sochi 2014
221 | 0x40: "Italy",
222 | 0x41: "Netherlands",
223 | 0x42: "Norway", # * This is a guess based on the flag order in Sochi 2014
224 | 0x43: "Poland", # * This is a guess based on the flag order in Sochi 2014
225 | 0x44: "Portugal", # * This is a guess based on the flag order in Sochi 2014
226 | 0x45: "Romania", # * This is a guess based on the flag order in Sochi 2014
227 | 0x46: "Russia", # * This is a guess based on the flag order in Sochi 2014
228 | 0x47: "Slovenia", # * This is a guess based on the flag order in Sochi 2014
229 | 0x48: "Switzerland", # * This is a guess based on the flag order in Sochi 2014
230 | 0x49: "Slovakia", # * This is a guess based on the flag order in Sochi 2014
231 | 0x4A: "Sweden", # * This is a guess based on the flag order in Sochi 2014
232 | 0x4B: "Turkey", # * This is a guess based on the flag order in Sochi 2014
233 | 0x4C: "Ukraine", # * This is a guess based on the flag order in Sochi 2014
234 | 0x4D: "Australia",
235 | 0x4E: "Fiji", # * This is a guess based on the flag order in Sochi 2014
236 | 0x4F: "New Zealand", # * This is a guess based on the flag order in Sochi 2014
237 | }
238 |
239 | '''
240 | Subset of events. Not all events
241 | have a leaderboard
242 | '''
243 | categories = [
244 | 0x06, # * Rhythmic Gynmastics
245 | 0x01, # * BMX
246 | 0x0D, # * Equestrian
247 | 0x05, # * 100m
248 | 0x0E, # * Archery
249 | 0x0B, # * Triple Jump
250 | 0x0C, # * Swimming
251 | 0x0A, # * Javelin Throw
252 | 0x09, # * 4 x 100m Relay
253 | ]
254 |
255 | for category in categories:
256 | '''
257 | Make 1 request to get the total number of entries first.
258 | Using mode 0 to get the latest results
259 | '''
260 | mode = 0
261 | order_param = ranking.RankingOrderParam()
262 | unique_id = 0
263 | principal_id = 0
264 |
265 | order_param.offset = 0
266 | order_param.count = 1
267 |
268 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
269 |
270 | offset = 0
271 | total = result.total
272 | remaining = result.total
273 |
274 | leaderboard = []
275 | leaderboard_name = events[category].replace(" ", "")
276 | seen_rankings = []
277 |
278 | principal_id = result.data[0].pid
279 |
280 | while remaining > 0:
281 | print("{0} on offset {1}. {2}/{3} remaining".format(events[category], offset, remaining, total))
282 |
283 | '''
284 | Using mode 1 as a hack to get around the 1000 offset limit.
285 | Mode 1 selects entries around "your" entry, but the server
286 | does not verify if the currently logged in user is the same
287 | as the user being used in this mode. Thus we can pretend to
288 | be the last user and continue past the offset limit
289 | '''
290 | mode = 1
291 | order_param = ranking.RankingOrderParam()
292 | unique_id = 0
293 |
294 | order_param.offset = 0
295 | order_param.count = 0xFF # * Max we can do in one go
296 | order_param.order_calc = 1 # * Ordinal (1234) rankings. Prevents duplicate ranking positions (no ties)
297 |
298 | result = await ranking_client.get_ranking(mode, category, order_param, unique_id, principal_id)
299 | rankings = result.data
300 |
301 | for user in rankings:
302 | ranking_entry = {
303 | "pid": user.pid,
304 | "unique_id": user.unique_id,
305 | "rank": user.rank,
306 | "category": user.category,
307 | "score": user.score,
308 | "groups": user.groups,
309 | "param": user.param,
310 | "common_data": base64.b64encode(user.common_data).decode("utf-8"),
311 | "update_time": user.update_time.standard_datetime().isoformat(),
312 | }
313 |
314 | if ranking_entry in seen_rankings:
315 | # * Ignore duplicates
316 | continue
317 |
318 | '''
319 | The player can change their character and country at will.
320 | As such, the character and country the player was using at
321 | the time the ranking was uploaded may be different from
322 | their favorite character/country. The ranking "groups" is
323 | used in this game to determine which country/character was
324 | used at the time the ranking was uploaded
325 | '''
326 | [completed_country, completed_character] = user.groups
327 |
328 | common_data = user.common_data
329 |
330 | '''
331 | common_data offsets
332 | All numbers are BE
333 | 0x00-0x2F: 0x2D character null-terminated name (not the same as NNID username)
334 | 0x30-0x8F: Mii data
335 | 0x90: Country ID
336 | 0x91: Tournaments Cleared
337 | 0x92: Tournament Gold Medals
338 | 0x93: League events Cleared. 0xFF if disabled
339 | 0x94: League event Gold Medals. Only displayed if league events enabled
340 | 0x95: Special Prizes
341 | 0x96-0x97: Carnival Challenges
342 | 0x98: Favorite Event ID
343 | 0x99: Favorite character ID
344 | 0x9A: Flags
345 | 0x9B: Tips
346 | 0x9C-0x9D: Ghost Match Victories
347 | 0x9E-0xA1: Mii Wear
348 | 0xA2-0xA3: Music Tracks
349 | 0xA4: Stamps
350 | 0xA5: Guests Unlocked
351 | 0xA8-0xAB: Total Coins Earned
352 | 0xAC-0xAF: Total Rings Earned
353 | '''
354 | name_block = common_data[0x0:0x30]
355 | mii_data = common_data[0x30:0x90]
356 | metadata = common_data[0x90:0xB0] # * Unknown what use the rest of the data after this is
357 | unknown_common_data = common_data[0xB0:]
358 |
359 | name = name_block.split(b'\x00')[0].decode("utf-8", "replace")
360 |
361 | (
362 | country,
363 | tournaments_cleared,
364 | tournaments_gold_medals,
365 | leagues_cleared,
366 | leagues_gold_medals,
367 | special_prizes,
368 | carnival_challenges,
369 | favorite_event,
370 | favorite_character,
371 | flags,
372 | tips,
373 | ghost_match_victories,
374 | mii_wear,
375 | music_tracks,
376 | stamps,
377 | guests_unlocked,
378 | total_coins_earned,
379 | total_rings_earned
380 | ) = struct.unpack(">BBBBBBHBBBBHIHBBxxII", metadata)
381 |
382 | user_data = {
383 | "event": category,
384 | "name": name,
385 | "pid": user.pid,
386 | "score": user.score,
387 | "place": user.rank,
388 | "update_time": user.update_time.standard_datetime().isoformat(),
389 | "mii_data": base64.b64encode(mii_data).decode("utf-8"),
390 | "completed_country": {
391 | "id": completed_country,
392 | "name": countries.get(completed_country)
393 | },
394 | "completed_character": {
395 | "id": completed_character,
396 | "name": characters.get(completed_character, "Unknown")
397 | },
398 | "user_country": {
399 | "id": country,
400 | "name": countries.get(country)
401 | },
402 | "tournaments": {
403 | "cleared": tournaments_cleared,
404 | "gold_medals": tournaments_gold_medals,
405 | },
406 | "leagues": {
407 | "cleared": leagues_cleared if leagues_cleared != 0xFF else 0,
408 | "gold_medals": leagues_gold_medals,
409 | },
410 | "favorite_event": {
411 | "id": favorite_event,
412 | "name": events.get(favorite_event, "Unknown")
413 | },
414 | "favorite_character": {
415 | "id": favorite_character,
416 | "name": characters.get(favorite_character, "Unknown")
417 | },
418 | "total_coins_earned": total_coins_earned,
419 | "total_rings_earned": total_rings_earned,
420 | "clear_counts": {
421 | "special_prizes": special_prizes,
422 | "ghost_match_victories": ghost_match_victories,
423 | "carnival_challenges": carnival_challenges,
424 | "guests": guests_unlocked
425 | },
426 | "collectables": {
427 | "flags": flags,
428 | "tips": tips,
429 | "mii_wear": mii_wear,
430 | "music_tracks": music_tracks,
431 | "stamps": stamps
432 | },
433 | "unknown_common_data": unknown_common_data.hex(),
434 | "ranking_raw": ranking_entry
435 | }
436 |
437 | leaderboard.append(user_data)
438 | principal_id = user.pid
439 | offset += 1
440 | remaining -= 1
441 | seen_rankings.append(ranking_entry)
442 |
443 | print("Writing ./data/{0}/rankings.json.gz".format(category))
444 | leaderboard_data = json.dumps(leaderboard)
445 | os.makedirs("./data/{0}".format(category), exist_ok=True)
446 | await write_to_file("./data/{0}/rankings.json.gz".format(category), leaderboard_data.encode("utf-8"))
447 |
448 | async def write_to_file(path, data):
449 | with gzip.open(path, "w", compresslevel=9) as f: # * 4. fewer bytes (i.e. gzip)
450 | f.write(data)
451 |
452 | anyio.run(main)
453 |
--------------------------------------------------------------------------------
/idbe/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "idbe",
3 | "version": "1.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "idbe",
9 | "version": "1.0.0",
10 | "license": "ISC",
11 | "dependencies": {
12 | "axios": "^1.6.7",
13 | "cheerio": "^1.0.0-rc.12",
14 | "fs-extra": "^11.2.0",
15 | "xmlbuilder2": "^3.1.1"
16 | }
17 | },
18 | "node_modules/@oozcitak/dom": {
19 | "version": "1.15.10",
20 | "resolved": "https://registry.npmjs.org/@oozcitak/dom/-/dom-1.15.10.tgz",
21 | "integrity": "sha512-0JT29/LaxVgRcGKvHmSrUTEvZ8BXvZhGl2LASRUgHqDTC1M5g1pLmVv56IYNyt3bG2CUjDkc67wnyZC14pbQrQ==",
22 | "dependencies": {
23 | "@oozcitak/infra": "1.0.8",
24 | "@oozcitak/url": "1.0.4",
25 | "@oozcitak/util": "8.3.8"
26 | },
27 | "engines": {
28 | "node": ">=8.0"
29 | }
30 | },
31 | "node_modules/@oozcitak/infra": {
32 | "version": "1.0.8",
33 | "resolved": "https://registry.npmjs.org/@oozcitak/infra/-/infra-1.0.8.tgz",
34 | "integrity": "sha512-JRAUc9VR6IGHOL7OGF+yrvs0LO8SlqGnPAMqyzOuFZPSZSXI7Xf2O9+awQPSMXgIWGtgUf/dA6Hs6X6ySEaWTg==",
35 | "dependencies": {
36 | "@oozcitak/util": "8.3.8"
37 | },
38 | "engines": {
39 | "node": ">=6.0"
40 | }
41 | },
42 | "node_modules/@oozcitak/url": {
43 | "version": "1.0.4",
44 | "resolved": "https://registry.npmjs.org/@oozcitak/url/-/url-1.0.4.tgz",
45 | "integrity": "sha512-kDcD8y+y3FCSOvnBI6HJgl00viO/nGbQoCINmQ0h98OhnGITrWR3bOGfwYCthgcrV8AnTJz8MzslTQbC3SOAmw==",
46 | "dependencies": {
47 | "@oozcitak/infra": "1.0.8",
48 | "@oozcitak/util": "8.3.8"
49 | },
50 | "engines": {
51 | "node": ">=8.0"
52 | }
53 | },
54 | "node_modules/@oozcitak/util": {
55 | "version": "8.3.8",
56 | "resolved": "https://registry.npmjs.org/@oozcitak/util/-/util-8.3.8.tgz",
57 | "integrity": "sha512-T8TbSnGsxo6TDBJx/Sgv/BlVJL3tshxZP7Aq5R1mSnM5OcHY2dQaxLMu2+E8u3gN0MLOzdjurqN4ZRVuzQycOQ==",
58 | "engines": {
59 | "node": ">=8.0"
60 | }
61 | },
62 | "node_modules/argparse": {
63 | "version": "1.0.10",
64 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
65 | "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
66 | "dependencies": {
67 | "sprintf-js": "~1.0.2"
68 | }
69 | },
70 | "node_modules/asynckit": {
71 | "version": "0.4.0",
72 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
73 | "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
74 | },
75 | "node_modules/axios": {
76 | "version": "1.6.7",
77 | "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
78 | "integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
79 | "dependencies": {
80 | "follow-redirects": "^1.15.4",
81 | "form-data": "^4.0.0",
82 | "proxy-from-env": "^1.1.0"
83 | }
84 | },
85 | "node_modules/boolbase": {
86 | "version": "1.0.0",
87 | "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
88 | "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
89 | },
90 | "node_modules/cheerio": {
91 | "version": "1.0.0-rc.12",
92 | "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz",
93 | "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==",
94 | "dependencies": {
95 | "cheerio-select": "^2.1.0",
96 | "dom-serializer": "^2.0.0",
97 | "domhandler": "^5.0.3",
98 | "domutils": "^3.0.1",
99 | "htmlparser2": "^8.0.1",
100 | "parse5": "^7.0.0",
101 | "parse5-htmlparser2-tree-adapter": "^7.0.0"
102 | },
103 | "engines": {
104 | "node": ">= 6"
105 | },
106 | "funding": {
107 | "url": "https://github.com/cheeriojs/cheerio?sponsor=1"
108 | }
109 | },
110 | "node_modules/cheerio-select": {
111 | "version": "2.1.0",
112 | "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz",
113 | "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==",
114 | "dependencies": {
115 | "boolbase": "^1.0.0",
116 | "css-select": "^5.1.0",
117 | "css-what": "^6.1.0",
118 | "domelementtype": "^2.3.0",
119 | "domhandler": "^5.0.3",
120 | "domutils": "^3.0.1"
121 | },
122 | "funding": {
123 | "url": "https://github.com/sponsors/fb55"
124 | }
125 | },
126 | "node_modules/combined-stream": {
127 | "version": "1.0.8",
128 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
129 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
130 | "dependencies": {
131 | "delayed-stream": "~1.0.0"
132 | },
133 | "engines": {
134 | "node": ">= 0.8"
135 | }
136 | },
137 | "node_modules/css-select": {
138 | "version": "5.1.0",
139 | "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz",
140 | "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==",
141 | "dependencies": {
142 | "boolbase": "^1.0.0",
143 | "css-what": "^6.1.0",
144 | "domhandler": "^5.0.2",
145 | "domutils": "^3.0.1",
146 | "nth-check": "^2.0.1"
147 | },
148 | "funding": {
149 | "url": "https://github.com/sponsors/fb55"
150 | }
151 | },
152 | "node_modules/css-what": {
153 | "version": "6.1.0",
154 | "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz",
155 | "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==",
156 | "engines": {
157 | "node": ">= 6"
158 | },
159 | "funding": {
160 | "url": "https://github.com/sponsors/fb55"
161 | }
162 | },
163 | "node_modules/delayed-stream": {
164 | "version": "1.0.0",
165 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
166 | "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
167 | "engines": {
168 | "node": ">=0.4.0"
169 | }
170 | },
171 | "node_modules/dom-serializer": {
172 | "version": "2.0.0",
173 | "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
174 | "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
175 | "dependencies": {
176 | "domelementtype": "^2.3.0",
177 | "domhandler": "^5.0.2",
178 | "entities": "^4.2.0"
179 | },
180 | "funding": {
181 | "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
182 | }
183 | },
184 | "node_modules/domelementtype": {
185 | "version": "2.3.0",
186 | "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
187 | "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
188 | "funding": [
189 | {
190 | "type": "github",
191 | "url": "https://github.com/sponsors/fb55"
192 | }
193 | ]
194 | },
195 | "node_modules/domhandler": {
196 | "version": "5.0.3",
197 | "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
198 | "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
199 | "dependencies": {
200 | "domelementtype": "^2.3.0"
201 | },
202 | "engines": {
203 | "node": ">= 4"
204 | },
205 | "funding": {
206 | "url": "https://github.com/fb55/domhandler?sponsor=1"
207 | }
208 | },
209 | "node_modules/domutils": {
210 | "version": "3.1.0",
211 | "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz",
212 | "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==",
213 | "dependencies": {
214 | "dom-serializer": "^2.0.0",
215 | "domelementtype": "^2.3.0",
216 | "domhandler": "^5.0.3"
217 | },
218 | "funding": {
219 | "url": "https://github.com/fb55/domutils?sponsor=1"
220 | }
221 | },
222 | "node_modules/entities": {
223 | "version": "4.5.0",
224 | "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
225 | "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
226 | "engines": {
227 | "node": ">=0.12"
228 | },
229 | "funding": {
230 | "url": "https://github.com/fb55/entities?sponsor=1"
231 | }
232 | },
233 | "node_modules/esprima": {
234 | "version": "4.0.1",
235 | "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
236 | "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
237 | "bin": {
238 | "esparse": "bin/esparse.js",
239 | "esvalidate": "bin/esvalidate.js"
240 | },
241 | "engines": {
242 | "node": ">=4"
243 | }
244 | },
245 | "node_modules/follow-redirects": {
246 | "version": "1.15.5",
247 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
248 | "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
249 | "funding": [
250 | {
251 | "type": "individual",
252 | "url": "https://github.com/sponsors/RubenVerborgh"
253 | }
254 | ],
255 | "engines": {
256 | "node": ">=4.0"
257 | },
258 | "peerDependenciesMeta": {
259 | "debug": {
260 | "optional": true
261 | }
262 | }
263 | },
264 | "node_modules/form-data": {
265 | "version": "4.0.0",
266 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
267 | "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
268 | "dependencies": {
269 | "asynckit": "^0.4.0",
270 | "combined-stream": "^1.0.8",
271 | "mime-types": "^2.1.12"
272 | },
273 | "engines": {
274 | "node": ">= 6"
275 | }
276 | },
277 | "node_modules/fs-extra": {
278 | "version": "11.2.0",
279 | "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz",
280 | "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==",
281 | "dependencies": {
282 | "graceful-fs": "^4.2.0",
283 | "jsonfile": "^6.0.1",
284 | "universalify": "^2.0.0"
285 | },
286 | "engines": {
287 | "node": ">=14.14"
288 | }
289 | },
290 | "node_modules/graceful-fs": {
291 | "version": "4.2.11",
292 | "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
293 | "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
294 | },
295 | "node_modules/htmlparser2": {
296 | "version": "8.0.2",
297 | "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
298 | "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==",
299 | "funding": [
300 | "https://github.com/fb55/htmlparser2?sponsor=1",
301 | {
302 | "type": "github",
303 | "url": "https://github.com/sponsors/fb55"
304 | }
305 | ],
306 | "dependencies": {
307 | "domelementtype": "^2.3.0",
308 | "domhandler": "^5.0.3",
309 | "domutils": "^3.0.1",
310 | "entities": "^4.4.0"
311 | }
312 | },
313 | "node_modules/js-yaml": {
314 | "version": "3.14.1",
315 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
316 | "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
317 | "dependencies": {
318 | "argparse": "^1.0.7",
319 | "esprima": "^4.0.0"
320 | },
321 | "bin": {
322 | "js-yaml": "bin/js-yaml.js"
323 | }
324 | },
325 | "node_modules/jsonfile": {
326 | "version": "6.1.0",
327 | "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz",
328 | "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==",
329 | "dependencies": {
330 | "universalify": "^2.0.0"
331 | },
332 | "optionalDependencies": {
333 | "graceful-fs": "^4.1.6"
334 | }
335 | },
336 | "node_modules/mime-db": {
337 | "version": "1.52.0",
338 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
339 | "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
340 | "engines": {
341 | "node": ">= 0.6"
342 | }
343 | },
344 | "node_modules/mime-types": {
345 | "version": "2.1.35",
346 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
347 | "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
348 | "dependencies": {
349 | "mime-db": "1.52.0"
350 | },
351 | "engines": {
352 | "node": ">= 0.6"
353 | }
354 | },
355 | "node_modules/nth-check": {
356 | "version": "2.1.1",
357 | "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
358 | "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
359 | "dependencies": {
360 | "boolbase": "^1.0.0"
361 | },
362 | "funding": {
363 | "url": "https://github.com/fb55/nth-check?sponsor=1"
364 | }
365 | },
366 | "node_modules/parse5": {
367 | "version": "7.1.2",
368 | "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz",
369 | "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==",
370 | "dependencies": {
371 | "entities": "^4.4.0"
372 | },
373 | "funding": {
374 | "url": "https://github.com/inikulin/parse5?sponsor=1"
375 | }
376 | },
377 | "node_modules/parse5-htmlparser2-tree-adapter": {
378 | "version": "7.0.0",
379 | "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz",
380 | "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==",
381 | "dependencies": {
382 | "domhandler": "^5.0.2",
383 | "parse5": "^7.0.0"
384 | },
385 | "funding": {
386 | "url": "https://github.com/inikulin/parse5?sponsor=1"
387 | }
388 | },
389 | "node_modules/proxy-from-env": {
390 | "version": "1.1.0",
391 | "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
392 | "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
393 | },
394 | "node_modules/sprintf-js": {
395 | "version": "1.0.3",
396 | "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
397 | "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g=="
398 | },
399 | "node_modules/universalify": {
400 | "version": "2.0.1",
401 | "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
402 | "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
403 | "engines": {
404 | "node": ">= 10.0.0"
405 | }
406 | },
407 | "node_modules/xmlbuilder2": {
408 | "version": "3.1.1",
409 | "resolved": "https://registry.npmjs.org/xmlbuilder2/-/xmlbuilder2-3.1.1.tgz",
410 | "integrity": "sha512-WCSfbfZnQDdLQLiMdGUQpMxxckeQ4oZNMNhLVkcekTu7xhD4tuUDyAPoY8CwXvBYE6LwBHd6QW2WZXlOWr1vCw==",
411 | "dependencies": {
412 | "@oozcitak/dom": "1.15.10",
413 | "@oozcitak/infra": "1.0.8",
414 | "@oozcitak/util": "8.3.8",
415 | "js-yaml": "3.14.1"
416 | },
417 | "engines": {
418 | "node": ">=12.0"
419 | }
420 | }
421 | }
422 | }
423 |
--------------------------------------------------------------------------------
/idbe/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------