├── .yarnrc.yml ├── .github ├── CODEOWNERS ├── release-drafter.yml └── workflows │ ├── build.yml │ ├── release-drafter.yml │ └── publish-docker-image.yml ├── .dockerignore ├── nodemon.json ├── app ├── model │ └── SchematicRecord.d.ts ├── launch.ts ├── http │ ├── routes │ │ ├── IndexRouter.ts │ │ ├── DownloadRouter.ts │ │ ├── DeleteRouter.ts │ │ └── UploadRouter.ts │ ├── Response.ts │ └── ArkitektonikaServer.ts ├── config │ └── Config.ts ├── storage │ ├── IDataStorage.ts │ └── Database.ts ├── Logger.ts └── Arkitektonika.ts ├── renovate.json ├── LICENSE ├── Dockerfile ├── package.json ├── .gitignore ├── endpoints.yml ├── README.md └── tsconfig.json /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | nodeLinker: node-modules 2 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @IntellectualSites/core-team 2 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | _extends: .github 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | data/ 2 | node_modules/ 3 | tmp/ 4 | .github/ 5 | .gitignore 6 | nodemon.json 7 | renovate.json 8 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignore": [".git", "node_modules", "dist"], 3 | "watch": ["./app"], 4 | "exec": "npm start", 5 | "ext": "ts" 6 | } -------------------------------------------------------------------------------- /app/model/SchematicRecord.d.ts: -------------------------------------------------------------------------------- 1 | export declare type SchematicRecord = { 2 | id?: number, 3 | downloadKey: string, 4 | deleteKey: string, 5 | fileName: string, 6 | last_accessed?: Date, 7 | expired?: Date 8 | } -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:base", 5 | ":semanticCommitsDisabled" 6 | ], 7 | "automerge": true, 8 | "labels": ["dependencies"], 9 | "rebaseWhen": "conflicted", 10 | "packageRules": [ 11 | { 12 | "matchDatasources": ["npm"], 13 | "stabilityDays": 3 14 | } 15 | ], 16 | "schedule": ["on the first day of the month"] 17 | } 18 | -------------------------------------------------------------------------------- /app/launch.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Entrypoint for the application 3 | */ 4 | 5 | import minimist from 'minimist'; 6 | import Arkitektonika from "./Arkitektonika.js"; 7 | 8 | const app = new Arkitektonika(); 9 | const params = minimist(process.argv.slice(2)); 10 | 11 | (() => { 12 | if (params.prune) { 13 | app.prune().then(() => process.exit(0)) 14 | .catch(reason => console.error(reason)); 15 | return; 16 | } 17 | 18 | app.run(); 19 | })(); 20 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | on: [pull_request, push] 3 | jobs: 4 | build: 5 | if: ${{ github.event_name != 'pull_request' || github.repository != github.event.pull_request.head.repo.full_name }} 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v6 9 | - name: Set up Docker Buildx 10 | uses: docker/setup-buildx-action@v3 11 | - name: Build 12 | uses: docker/build-push-action@v6 13 | with: 14 | context: . 15 | file: ./Dockerfile 16 | push: false 17 | -------------------------------------------------------------------------------- /app/http/routes/IndexRouter.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import Arkitektonika from "../../Arkitektonika.js"; 3 | 4 | export const INDEX_ROUTER = (app: Arkitektonika, router: express.Application) => { 5 | 6 | router.get('/', (async (req, res) => { 7 | res.status(200).send({ 8 | name: "arkitektonika", 9 | version: process.env.ARK_VERSION, 10 | made: { 11 | with: 'love', 12 | by: 'IntellectualSites' 13 | } 14 | }); 15 | })); 16 | 17 | return router; 18 | } -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: draft release 2 | on: 3 | push: 4 | branches: 5 | - main 6 | pull_request: 7 | types: [opened, reopened, synchronize] 8 | pull_request_target: 9 | types: [opened, reopened, synchronize] 10 | jobs: 11 | update_release_draft: 12 | if: ${{ github.event_name != 'pull_request' || github.repository != github.event.pull_request.head.repo.full_name }} 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: release-drafter/release-drafter@v6 16 | env: 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2022- IntellectualSites 2 | 3 | Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. 4 | 5 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 6 | -------------------------------------------------------------------------------- /app/http/Response.ts: -------------------------------------------------------------------------------- 1 | import {Response} from "express"; 2 | 3 | const HTTP_EXPIRED = 420; 4 | const HTTP_NOT_FOUND = 404; 5 | const HTTP_INTERNAL_SERVER_ERROR = 500; 6 | 7 | export const ExpiredRecord = (response: Response) => { 8 | response.status(HTTP_EXPIRED).send({ 9 | error: "This schematic file already expired" 10 | }); 11 | }; 12 | 13 | export const CorruptMetadata = (response: Response, reason: string) => { 14 | response.status(HTTP_INTERNAL_SERVER_ERROR).send({ 15 | error: `Corrupt metadata: ${reason}` 16 | }); 17 | }; 18 | 19 | export const SchematicNotFound = (response: Response) => { 20 | response.status(HTTP_NOT_FOUND).send({ 21 | error: `Schematic file was not found` 22 | }); 23 | } 24 | 25 | export const MissingFileSystemEntry = (response: Response) => { 26 | response.status(HTTP_EXPIRED).send({ 27 | error: "Missing file in file system for schematic record - Expiring this record" 28 | }) 29 | } -------------------------------------------------------------------------------- /app/http/ArkitektonikaServer.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import {Express} from 'express'; 3 | import Logger from "../Logger.js"; 4 | import Arkitektonika from "../Arkitektonika.js"; 5 | import {DOWNLOAD_ROUTER} from "./routes/DownloadRouter.js"; 6 | import {UPLOAD_ROUTER} from "./routes/UploadRouter.js"; 7 | import {INDEX_ROUTER} from "./routes/IndexRouter.js"; 8 | import {DELETE_ROUTER} from "./routes/DeleteRouter.js"; 9 | 10 | export default class ArkitektonikaServer { 11 | 12 | private readonly logger: Logger; 13 | private readonly app: Express; 14 | 15 | constructor(app: Arkitektonika) { 16 | this.logger = app.logger; 17 | this.app = express(); 18 | 19 | this.app.use((req, res, next) => { 20 | res.setHeader('Access-Control-Allow-Origin', app.config.allowedOrigin); 21 | next(); 22 | }); 23 | this.app.use(this.logger.getExpressLogger()); 24 | 25 | INDEX_ROUTER(app, this.app) 26 | UPLOAD_ROUTER(app, this.app) 27 | DOWNLOAD_ROUTER(app, this.app) 28 | DELETE_ROUTER(app, this.app) 29 | } 30 | 31 | public start(port: number = 3000): void { 32 | this.app.listen(port, () => { 33 | this.logger.info(`Arkitektonika server up and running @ 0.0.0.0:${port}`) 34 | }); 35 | } 36 | 37 | } -------------------------------------------------------------------------------- /app/config/Config.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs"; 2 | import {PathLike} from "fs"; 3 | 4 | export declare type Config = { 5 | port: number, 6 | prune: number, 7 | maxIterations: number, 8 | maxSchematicSize: number, 9 | allowedOrigin: string, 10 | limiter: { 11 | windowMs: number, 12 | delayAfter: number, 13 | delayMs: number, 14 | } 15 | }; 16 | 17 | const DEFAULT_CONFIG: Config = { 18 | port: 3000, 19 | prune: 1000 * 60 * 30, 20 | maxIterations: 20, 21 | maxSchematicSize: 1000 * 1000, // 1 MB 22 | allowedOrigin: '*', 23 | limiter: { 24 | windowMs: 1000 * 60, 25 | delayAfter: 30, 26 | delayMs: 500 27 | } 28 | }; 29 | 30 | const parseConfigContent = (content: string): Config => { 31 | const json = JSON.parse(content); 32 | try { 33 | return json as Config; 34 | } catch (error) { 35 | return Object.assign(DEFAULT_CONFIG, json); 36 | } 37 | } 38 | 39 | export const loadConfig = (file: PathLike): Config => { 40 | if (!fs.existsSync(file)) { 41 | fs.writeFileSync(file, JSON.stringify(DEFAULT_CONFIG, null, 2)); 42 | return DEFAULT_CONFIG; 43 | } 44 | try { 45 | return parseConfigContent(fs.readFileSync(file).toString()); 46 | } catch (error) { 47 | throw error 48 | } 49 | } -------------------------------------------------------------------------------- /.github/workflows/publish-docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Publish Docker image 2 | on: 3 | release: 4 | types: [published] 5 | jobs: 6 | push_to_registries: 7 | name: Push Docker image to multiple registries 8 | runs-on: ubuntu-latest 9 | permissions: 10 | packages: write 11 | contents: read 12 | steps: 13 | - name: Check out the repo 14 | uses: actions/checkout@v6 15 | - name: Log in to Docker Hub 16 | uses: docker/login-action@v3 17 | with: 18 | username: ${{ secrets.DOCKER_USERNAME }} 19 | password: ${{ secrets.DOCKER_PASSWORD }} 20 | - name: Log in to the Container registry 21 | uses: docker/login-action@v3 22 | with: 23 | registry: ghcr.io 24 | username: ${{ github.actor }} 25 | password: ${{ secrets.GITHUB_TOKEN }} 26 | - name: Extract metadata (tags, labels) for Docker 27 | id: meta 28 | uses: docker/metadata-action@v5 29 | with: 30 | images: | 31 | intellectualsites/arkitektonika 32 | ghcr.io/${{ github.repository }} 33 | - name: Build and push Docker images 34 | uses: docker/build-push-action@v6 35 | with: 36 | context: . 37 | push: true 38 | tags: ${{ steps.meta.outputs.tags }} 39 | labels: | 40 | org.opencontainers.image.revision=${{ github.event.pull_request.head.sha || github.event.after || github.event.release.tag_name }} 41 | ${{ steps.meta.outputs.labels }} 42 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Build the application 2 | # -> transpile typescript to javascript 3 | FROM node:lts AS builder 4 | 5 | WORKDIR /usr/src/app 6 | 7 | COPY package.json ./ 8 | COPY tsconfig.json ./ 9 | COPY yarn.lock ./ 10 | COPY .yarnrc.yml ./ 11 | COPY ./app ./app 12 | RUN corepack enable ; yarn set version latest ; yarn install ; yarn build 13 | 14 | # Application runner 15 | # -> runs the transpiled code itself 16 | # seperated from builder context to keep image as slim as possible 17 | FROM node:lts-alpine 18 | 19 | WORKDIR /app 20 | ENV NODE_ENV=production 21 | 22 | COPY package.json ./ 23 | COPY yarn.lock ./ 24 | COPY .yarnrc.yml ./ 25 | COPY --from=builder /usr/src/app/dist/app ./app 26 | RUN corepack enable ; yarn set version latest; \ 27 | RUN yarn workspaces focus --all --production && rm -rf "$(yarn cache clean)" ; yarn install 28 | # "temporary" fix to allow directory traversal in both docker and non-docker environments 29 | # Can't just change the app directory, as that might break existing directory mounts - so it'll do 30 | RUN cp package.json ./../package.json 31 | EXPOSE 3000 32 | CMD [ "node", "app/launch.js" ] 33 | 34 | LABEL \ 35 | org.opencontainers.image.vendor="IntellectualSites" \ 36 | org.opencontainers.image.title="Arkitektonika" \ 37 | org.opencontainers.image.description="A REST repository for NBT data for Minecraft" \ 38 | org.opencontainers.image.url="https://github.com/IntellectualSites" \ 39 | org.opencontainers.image.source="https://github.com/IntellectualSites/Arkitektonika" \ 40 | org.opencontainers.image.licenses="ISC" \ 41 | com.docker.image.source.entrypoint=Dockerfile 42 | -------------------------------------------------------------------------------- /app/storage/IDataStorage.ts: -------------------------------------------------------------------------------- 1 | import {SchematicRecord} from "../model/SchematicRecord.js"; 2 | 3 | export default interface IDataStorage { 4 | 5 | getAllRecords(): Promise; 6 | 7 | getAllUnexpiredRecords(): Promise; 8 | 9 | /** 10 | * Retrieve a {@link SchematicRecord} from the current data storage implementation by its download key. 11 | * 12 | * @param downloadKey The download key to search for 13 | * @return Promise either containing the found {@link SchematicRecord} or 14 | * a failed promise of none found matching the download key. 15 | */ 16 | getSchematicRecordByDownloadKey(downloadKey: string): Promise; 17 | 18 | /** 19 | * Retrieve a {@link SchematicRecord} from the current data storage implementation by its delete key. 20 | * 21 | * @param deleteKey The delete key to search for 22 | * @return Promise either containing the found {@link SchematicRecord} or 23 | * a failed promise of none found matching the delete key. 24 | */ 25 | getSchematicRecordByDeleteKey(deleteKey: string): Promise; 26 | 27 | /** 28 | * Let a schematic record expire 29 | * @param recordId 30 | */ 31 | expireSchematicRecord(recordId: number): Promise; 32 | 33 | /** 34 | * 35 | * @param record 36 | */ 37 | storeSchematicRecord(record: SchematicRecord): Promise; 38 | 39 | /** 40 | * Let schematics expired if {@link SchematicRecord#last_accessed} is further than x milliseconds ago. 41 | * @param milliseconds The amount of milliseconds to check last_accessed against. 42 | * @return Promise either containing the expired rows or 43 | * a failed promise if something went wrong. 44 | */ 45 | expireSchematicRecords(milliseconds: number): Promise; 46 | 47 | generateDownloadKey(maxIterations: number): Promise; 48 | 49 | generateDeletionKey(maxIterations: number): Promise; 50 | 51 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "arkitektonika", 3 | "version": "2.1.1-SNAPSHOT", 4 | "description": "", 5 | "private": true, 6 | "scripts": { 7 | "start": "ts-node-esm ./app/launch.ts", 8 | "start:prod": "yarn build && node ./dist/app/launch.js", 9 | "build": "tsc", 10 | "start:nodemon": "node ./node_modules/nodemon/bin/nodemon.js" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+https://github.com/IntellectualSites/Arkitektonika.git" 15 | }, 16 | "packageManager": "yarn@4.12.0", 17 | "type": "module", 18 | "author": "IntellectualSites", 19 | "license": "ISC", 20 | "bugs": { 21 | "url": "https://github.com/IntellectualSites/Arkitektonika/issues" 22 | }, 23 | "exports": "./dist/app/launch.js", 24 | "engines": { 25 | "node": ">=16.16.0" 26 | }, 27 | "homepage": "https://github.com/IntellectualSites/Arkitektonika#readme", 28 | "babel": { 29 | "presets": [ 30 | "@babel/preset-env" 31 | ], 32 | "plugins": [ 33 | [ 34 | "@babel/plugin-transform-runtime", 35 | { 36 | "regenerator": true 37 | } 38 | ] 39 | ] 40 | }, 41 | "dependencies": { 42 | "@babel/cli": "7.28.3", 43 | "@babel/core": "7.28.5", 44 | "@babel/preset-env": "7.28.5", 45 | "@babel/runtime": "7.28.4", 46 | "better-sqlite3": "12.4.6", 47 | "chalk": "5.6.2", 48 | "express": "4.21.2", 49 | "express-fileupload": "1.5.2", 50 | "express-slow-down": "3.0.1", 51 | "express-winston": "4.2.0", 52 | "minimist": "1.2.8", 53 | "multer": "2.0.2", 54 | "nanoid": "5.1.6", 55 | "nbt-ts": "1.3.6", 56 | "pako": "2.1.0", 57 | "ts-node": "10.9.2", 58 | "typescript": "5.9.3", 59 | "winston": "3.18.3", 60 | "winston-transport": "4.9.0" 61 | }, 62 | "devDependencies": { 63 | "@babel/plugin-transform-runtime": "7.28.5", 64 | "@types/better-sqlite3": "7.6.13", 65 | "@types/express": "4.17.25", 66 | "@types/express-fileupload": "1.5.1", 67 | "@types/express-slow-down": "2.0.0", 68 | "@types/gzip-js": "0.3.5", 69 | "@types/minimist": "1.2.5", 70 | "@types/node": "24.10.1", 71 | "@types/pako": "2.0.4", 72 | "nodemon": "3.1.11" 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /app/http/routes/DownloadRouter.ts: -------------------------------------------------------------------------------- 1 | import express, {Request, Response} from "express"; 2 | import Arkitektonika, {SCHEMATIC_DIR} from "../../Arkitektonika.js"; 3 | import path from "path"; 4 | import * as fs from "fs"; 5 | import {SchematicRecord} from "../../model/SchematicRecord.js"; 6 | import {CorruptMetadata, ExpiredRecord, MissingFileSystemEntry} from "../Response.js"; 7 | 8 | export const DOWNLOAD_ROUTER = (app: Arkitektonika, router: express.Application) => { 9 | 10 | const fetchRecord = async (request: Request, response: Response): Promise => { 11 | let record; 12 | // search for record by download key 13 | try { 14 | record = await app.dataStorage.getSchematicRecordByDownloadKey(request.params.key); 15 | } catch (error) { 16 | response.status(404).send({ 17 | error: 'No record found for download key' 18 | }); 19 | return undefined; 20 | } 21 | if (!record.id) { 22 | CorruptMetadata(response, "Missing schematic id"); 23 | return undefined; 24 | } 25 | if (record.expired && record.expired.getMilliseconds() <= new Date().getMilliseconds()) { 26 | ExpiredRecord(response); 27 | return undefined; 28 | } 29 | if (!(fs.existsSync(path.join(SCHEMATIC_DIR, record.downloadKey)))) { 30 | await app.dataStorage.expireSchematicRecord(record.id); 31 | MissingFileSystemEntry(response); 32 | return undefined; 33 | } 34 | return record; 35 | } 36 | 37 | router.options('/download/:key', (req, res) => { 38 | res.setHeader('Access-Control-Allow-Methods', 'HEAD, GET'); 39 | res.sendStatus(204); 40 | }) 41 | 42 | router.head('/download/:key', (async (req, res) => { 43 | if (await fetchRecord(req, res) != undefined) { 44 | res.sendStatus(200); 45 | } 46 | })); 47 | 48 | router.get('/download/:key', (async (req, res) => { 49 | let record = await fetchRecord(req, res); 50 | if (!record) { 51 | return; 52 | } 53 | res.setHeader('Content-Disposition', `attachment; filename="${record.fileName}"`) 54 | fs.createReadStream(path.join(SCHEMATIC_DIR, record.downloadKey)).pipe(res) 55 | })); 56 | 57 | return router; 58 | } -------------------------------------------------------------------------------- /app/Logger.ts: -------------------------------------------------------------------------------- 1 | import winston from 'winston'; 2 | import * as Transport from 'winston-transport'; 3 | import expressWinston from 'express-winston'; 4 | import {Handler, Request, Response} from "express"; 5 | import chalk from 'chalk'; 6 | 7 | export default class Logger { 8 | 9 | private readonly logger: winston.Logger; 10 | private readonly transport: Transport = new winston.transports.Console(); 11 | private readonly format = winston.format.combine( 12 | winston.format.timestamp({ 13 | format: 'YYYY-MM-DD HH:mm:ss' 14 | }), 15 | winston.format((info) => { 16 | info.level = info.level.toUpperCase(); 17 | return info; 18 | })(), 19 | winston.format.colorize(), 20 | winston.format.printf((info) => { 21 | return "[" + info.level + " | " + info.timestamp + "] " + info.message; 22 | }), 23 | ); 24 | 25 | constructor() { 26 | this.logger = winston.createLogger({ 27 | transports: this.transport, 28 | format: this.format, 29 | level: Logger.getLogLevel() 30 | }); 31 | } 32 | 33 | public info(message: string, ...meta: any[]): void { 34 | this.logger.info(message, meta); 35 | } 36 | 37 | public error(message: string, ...meta: any[]): void { 38 | this.logger.error(message, meta); 39 | } 40 | 41 | public debug(message: string, ...meta: any[]): void { 42 | this.logger.debug(message, meta); 43 | } 44 | 45 | public getExpressLogger(): Handler { 46 | return expressWinston.logger({ 47 | transports: [this.transport], 48 | colorize: true, 49 | format: this.format, 50 | level: Logger.getLogLevel(), 51 | msg: (req: Request, res: Response) => { 52 | const substrTo = Math.min(req.url.length, 50); 53 | const shortened = req.url.length > 50; 54 | const url = req.url.substr(0, substrTo) + (shortened ? '...' : ''); 55 | return chalk.grey(`${req.method} ${url}`) + (shortened ? chalk.blueBright(' (URL Shortened)') : '') + 56 | ` ${res.statusCode} ` + 57 | chalk.grey(`{{res.responseTime}}ms`) 58 | } 59 | }); 60 | } 61 | 62 | public static getLogLevel(): string { 63 | let level = (process.env.LOG_LEVEL || 'info').toLowerCase(); 64 | if (['error', 'warn', 'info', 'verbose', 'debug', 'silly'].indexOf(level) === -1) { 65 | level = 'info'; 66 | } 67 | return level; 68 | } 69 | 70 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Node ### 2 | # Logs 3 | logs 4 | *.log 5 | npm-debug.log* 6 | yarn-debug.log* 7 | yarn-error.log* 8 | lerna-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # TypeScript v1 declaration files 46 | typings/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Microbundle cache 58 | .rpt2_cache/ 59 | .rts2_cache_cjs/ 60 | .rts2_cache_es/ 61 | .rts2_cache_umd/ 62 | 63 | # Optional REPL history 64 | .node_repl_history 65 | 66 | # Output of 'npm pack' 67 | *.tgz 68 | 69 | # Yarn Integrity file 70 | .yarn-integrity 71 | 72 | # dotenv environment variables file 73 | .env 74 | .env.test 75 | 76 | # parcel-bundler cache (https://parceljs.org/) 77 | .cache 78 | 79 | # Next.js build output 80 | .next 81 | 82 | # Nuxt.js build / generate output 83 | .nuxt 84 | dist 85 | 86 | # Gatsby files 87 | .cache/ 88 | # Comment in the public line in if your project uses Gatsby and not Next.js 89 | # https://nextjs.org/blog/next-9-1#public-directory-support 90 | # public 91 | 92 | # vuepress build output 93 | .vuepress/dist 94 | 95 | # Serverless directories 96 | .serverless/ 97 | 98 | # FuseBox cache 99 | .fusebox/ 100 | 101 | # DynamoDB Local files 102 | .dynamodb/ 103 | 104 | # TernJS port file 105 | .tern-port 106 | 107 | # Stores VSCode versions used for testing VSCode extensions 108 | .vscode-test 109 | 110 | ### yarn ### 111 | # https://yarnpkg.com/advanced/qa#which-files-should-be-gitignored 112 | .yarn/* 113 | 114 | # .yarn/cache and .pnp.* may be safely ignored, but you'll need to run yarn install 115 | # to regenerate them between each branch switch. 116 | # Uncomment the following lines if you're not using Zero-Installs: 117 | # .yarn/cache 118 | # .pnp.* 119 | 120 | ### project ### 121 | data/ 122 | .idea 123 | -------------------------------------------------------------------------------- /app/http/routes/DeleteRouter.ts: -------------------------------------------------------------------------------- 1 | import express, {Request, Response} from "express"; 2 | import slowDown from 'express-slow-down'; 3 | import Arkitektonika, {SCHEMATIC_DIR} from "../../Arkitektonika.js"; 4 | import path from "path"; 5 | import * as fs from "fs"; 6 | import {CorruptMetadata, ExpiredRecord} from "../Response.js"; 7 | import {SchematicRecord} from "../../model/SchematicRecord.js"; 8 | 9 | export const DELETE_ROUTER = (app: Arkitektonika, router: express.Application) => { 10 | 11 | /** 12 | * Configures the rate limiter for the delete-route because brute force is kind of not cool 13 | * 14 | * Default: 15 | * Allows 30 requests per minute = 1 request every 2 seconds. 16 | * Each request above that limit will load for 500ms extra. 17 | * The 31st request will take additional 500ms to fulfill, 32nd additional 1000ms, ... 18 | */ 19 | 20 | const LIMITER = slowDown({ 21 | windowMs: app.config.limiter.windowMs || 1000 * 60, 22 | delayAfter: app.config.limiter.delayAfter || 30, 23 | delayMs: () => app.config.limiter.delayMs || 500 24 | }); 25 | 26 | const fetchRecord = async (request: Request, response: Response): Promise => { 27 | let record; 28 | // search for record by deletion key 29 | try { 30 | record = await app.dataStorage.getSchematicRecordByDeleteKey(request.params.key); 31 | } catch (error) { 32 | response.status(404).send({ 33 | error: 'No record found for deletion key' 34 | }); 35 | return undefined; 36 | } 37 | if (!record.id) { 38 | CorruptMetadata(response, "Missing schematic id"); 39 | return undefined; 40 | } 41 | if (record.expired && record.expired.getMilliseconds() <= new Date().getMilliseconds()) { 42 | ExpiredRecord(response); 43 | return undefined; 44 | } 45 | return record; 46 | } 47 | 48 | router.options('/delete/:key', (req, res) => { 49 | res.setHeader('Access-Control-Allow-Methods', 'HEAD, DELETE'); 50 | res.sendStatus(204); 51 | }) 52 | 53 | router.head('/delete/:key', LIMITER, (async (req, res) => { 54 | if (await fetchRecord(req, res)) { 55 | return res.sendStatus(200); 56 | } 57 | })); 58 | 59 | router.delete('/delete/:key', LIMITER, (async (req, res) => { 60 | let record = await fetchRecord(req, res); 61 | if (!record) { 62 | return; 63 | } 64 | await app.dataStorage.expireSchematicRecord(record.id!); 65 | const filePath = path.join(SCHEMATIC_DIR, record.downloadKey); 66 | if (fs.existsSync(filePath)) { 67 | fs.rmSync(filePath); 68 | } 69 | res.status(200).send({}); 70 | })); 71 | 72 | return router; 73 | } -------------------------------------------------------------------------------- /app/http/routes/UploadRouter.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import fileUpload, {UploadedFile} from 'express-fileupload'; 3 | import Arkitektonika, {SCHEMATIC_DIR} from "../../Arkitektonika.js"; 4 | import * as fs from "fs"; 5 | import path from "path"; 6 | import {decode} from "nbt-ts"; 7 | import Pako from "pako"; 8 | 9 | const UPLOAD_OPTIONS: fileUpload.Options = { 10 | abortOnLimit: true, 11 | useTempFiles: true, 12 | preserveExtension: ("schematic".length), 13 | createParentPath: true, 14 | safeFileNames: true, 15 | limits: {}, 16 | uploadTimeout: 1000 * 15 17 | }; 18 | export const UPLOAD_ROUTER = (app: Arkitektonika, router: express.Application) => { 19 | router.options('/upload', (req, res) => { 20 | res.setHeader('Access-Control-Allow-Methods', 'POST'); 21 | res.sendStatus(204); 22 | }) 23 | router.post('/upload', fileUpload(UPLOAD_OPTIONS), (async (req, res) => { 24 | const file = req.files?.schematic as UploadedFile; 25 | 26 | // check if request contains file 27 | if (!file) { 28 | return res.status(400).send({ 29 | error: 'Missing file' 30 | }); 31 | } 32 | 33 | // Validate nbt file 34 | try { 35 | const content = fs.readFileSync(file.tempFilePath) 36 | const deflated = Buffer.from(Pako.ungzip(content)) 37 | const result = decode(deflated, { 38 | unnamed: false 39 | }) 40 | if (result.value == null) { 41 | throw new Error("decoded value is null"); 42 | } 43 | if (result.length > app.config.maxSchematicSize) { 44 | fs.unlinkSync(file.tempFilePath); 45 | return res.status(413).send({ 46 | error: `Submitted NBT file exceeds max size of ${app.config.maxSchematicSize} bytes` 47 | }) 48 | } 49 | } catch (error) { 50 | app.logger.debug('Invalid request due to invalid nbt content: ' + error); 51 | fs.unlinkSync(file.tempFilePath); 52 | return res.status(400).send({ 53 | error: 'File is not valid NBT' 54 | }); 55 | } 56 | 57 | // Generate keys 58 | let downloadKey, deleteKey; 59 | try { 60 | downloadKey = await app.dataStorage.generateDownloadKey(app.config.maxIterations); 61 | deleteKey = await app.dataStorage.generateDeletionKey(app.config.maxIterations); 62 | } catch (error) { 63 | fs.unlinkSync(file.tempFilePath); 64 | return res.status(500).send({ 65 | error: 'Failed to generate download and / or deletion key' 66 | }); 67 | } 68 | 69 | // Insert record into accounting table 70 | try { 71 | const record = await app.dataStorage.storeSchematicRecord({ 72 | downloadKey, deleteKey, 73 | fileName: file.name 74 | }); 75 | await file.mv(path.join(SCHEMATIC_DIR, downloadKey)) 76 | res.status(200).send({ 77 | download_key: record.downloadKey, 78 | delete_key: record.deleteKey 79 | }); 80 | } catch (error) { 81 | fs.unlinkSync(file.tempFilePath); 82 | return res.status(500).send({ 83 | error: 'Failed to persist data in table' 84 | }); 85 | } 86 | })); 87 | 88 | return router; 89 | } 90 | -------------------------------------------------------------------------------- /app/Arkitektonika.ts: -------------------------------------------------------------------------------- 1 | import ArkitektonikaServer from "./http/ArkitektonikaServer.js"; 2 | import Logger from "./Logger.js"; 3 | import IDataStorage from "./storage/IDataStorage.js"; 4 | import Database from "./storage/Database.js"; 5 | import path from "path"; 6 | import * as fs from "fs"; 7 | import {Config, loadConfig} from "./config/Config.js"; 8 | import {fileURLToPath} from "url"; 9 | 10 | const ROOT_DIR = path.join(path.dirname(fileURLToPath(import.meta.url)), '..', '..') 11 | const PACKAGE_JSON = path.join(ROOT_DIR, "package.json") 12 | 13 | export const DATA_DIR: string = path.join(ROOT_DIR, 'data'); 14 | export const SCHEMATIC_DIR: string = path.join(DATA_DIR, 'schemata'); 15 | 16 | export default class Arkitektonika { 17 | 18 | private readonly _logger: Logger; 19 | private readonly _config: Config; 20 | private readonly httpServer: ArkitektonikaServer; 21 | private readonly _dataStorage: IDataStorage; 22 | 23 | constructor() { 24 | this._logger = new Logger(); 25 | 26 | if (!fs.existsSync(DATA_DIR)) { 27 | fs.mkdirSync(DATA_DIR) 28 | } 29 | if (!fs.existsSync(SCHEMATIC_DIR)) { 30 | fs.mkdirSync(SCHEMATIC_DIR) 31 | } 32 | 33 | this._config = loadConfig(path.join(DATA_DIR, 'config.json')); 34 | this.httpServer = new ArkitektonikaServer(this); 35 | this._dataStorage = new Database(this._logger); 36 | } 37 | 38 | /** 39 | * Starts the http server for production usage. 40 | */ 41 | public run(): void { 42 | process.env.ARK_VERSION = JSON.parse(fs.readFileSync(PACKAGE_JSON).toString()).version 43 | this.httpServer.start(this._config.port || 3000); 44 | } 45 | 46 | /** 47 | * Prune all old or expired schematics. 48 | * Expired schematics are defined by {@link SchematicRecord#last_accessed}. Old or "broken" schematics are either: 49 | * - Schematic database entries with no matching file on the file system 50 | * - Dangling files on the file system without a database entry 51 | */ 52 | public async prune() { 53 | this.logger.info("Starting prune of old or expired schematics... ") 54 | const deleted = await this._dataStorage.expireSchematicRecords(this._config.prune); 55 | for (let record of deleted) { 56 | fs.rmSync(path.join(SCHEMATIC_DIR, record.downloadKey)) 57 | } 58 | this.logger.info(`Expired ${deleted.length} schematic records from the database and deleted file system entries`) 59 | let deletionCounter = 0; 60 | for (let file of fs.readdirSync(SCHEMATIC_DIR)) { 61 | try { 62 | await this._dataStorage.getSchematicRecordByDownloadKey(path.basename(file)) 63 | } catch (error) { 64 | // file has no matching database entry -> delete the dangling file 65 | this.logger.debug(`Deleting dangling file ${file}`) 66 | fs.rmSync(path.join(SCHEMATIC_DIR, file)); 67 | deletionCounter++; 68 | } 69 | } 70 | this.logger.info(`Deleted ${deletionCounter} dangling files`); 71 | deletionCounter = 0; 72 | for (let record of (await this._dataStorage.getAllUnexpiredRecords())) { 73 | if (fs.existsSync(path.join(SCHEMATIC_DIR, record.downloadKey))) { 74 | continue; 75 | } 76 | if (!record.id) { 77 | continue; 78 | } 79 | await this._dataStorage.expireSchematicRecord(record.id); 80 | this.logger.debug(`Expired schematic with id ${record.id} because no file system entry was present`) 81 | deletionCounter++; 82 | } 83 | this.logger.info(`Pruned ${deletionCounter} schematic records`); 84 | } 85 | 86 | get logger(): Logger { 87 | return this._logger; 88 | } 89 | 90 | get dataStorage(): IDataStorage { 91 | return this._dataStorage; 92 | } 93 | 94 | get config(): Config { 95 | return this._config; 96 | } 97 | } -------------------------------------------------------------------------------- /endpoints.yml: -------------------------------------------------------------------------------- 1 | swagger: '2.0' 2 | info: 3 | title: Arkitektonika Endpoint Definition 4 | description: Collection of all available endpoints and their responses 5 | version: 1.0.0 6 | basePath: '/' 7 | host: 'api.schematic.cloud' 8 | paths: 9 | /upload: 10 | post: 11 | summary: Upload a NBT file 12 | consumes: 13 | - 'multipart/form-data' 14 | responses: 15 | 200: 16 | description: 'The NBT file was persisted' 17 | schema: 18 | properties: 19 | download_key: 20 | type: string 21 | deletion_key: 22 | type: string 23 | required: 24 | - download_key 25 | - deletion_key 26 | 400: 27 | description: 'An user error occurred (No file attached / invalid NBT)' 28 | schema: 29 | properties: 30 | error: 31 | description: A more precise description of the error 32 | type: string 33 | required: 34 | - error 35 | 413: 36 | description: 'The uploaded schematic file is too big' 37 | schema: 38 | properties: 39 | error: 40 | description: Containing the error message including the max upload size 41 | type: string 42 | required: 43 | - error 44 | 500: 45 | $ref: '#/responses/500' 46 | /download/{key}: 47 | head: 48 | summary: Simulate a download request to get the matching headers before retrieving the actual file 49 | parameters: 50 | - name: key 51 | description: The generated download key for that file 52 | type: string 53 | in: path 54 | required: true 55 | maxLength: 32 56 | minLength: 32 57 | responses: 58 | 200: 59 | description: The NBT file would be downloaded on a GET request 60 | 404: 61 | description: 'No schematic was found for the passed download key' 62 | schema: 63 | properties: 64 | error: 65 | description: Informational 66 | type: string 67 | required: 68 | - error 69 | 420: 70 | description: 'The schematic file has already expired' 71 | schema: 72 | properties: 73 | error: 74 | description: Informational 75 | type: string 76 | required: 77 | - error 78 | 500: 79 | $ref: '#/responses/500' 80 | get: 81 | summary: Download an uploded NBT file 82 | parameters: 83 | - name: key 84 | description: The generated download key for that file 85 | type: string 86 | in: path 87 | required: true 88 | maxLength: 32 89 | minLength: 32 90 | produces: 91 | - 'application/json' 92 | - 'application/octet-stream' 93 | responses: 94 | 200: 95 | description: The NBT file is present and sent in the response body 96 | 404: 97 | description: 'No schematic was found for the passed download key' 98 | schema: 99 | properties: 100 | error: 101 | description: Informational 102 | type: string 103 | required: 104 | - error 105 | 420: 106 | description: 'The schematic file has already expired' 107 | schema: 108 | properties: 109 | error: 110 | description: Informational 111 | type: string 112 | required: 113 | - error 114 | 500: 115 | $ref: '#/responses/500' 116 | /delete/{key}: 117 | head: 118 | summary: Simulate a delete request to get the matching headers before deleting the actual file 119 | parameters: 120 | - name: key 121 | description: The generated deletion key for that file 122 | type: string 123 | in: path 124 | required: true 125 | maxLength: 32 126 | minLength: 32 127 | responses: 128 | 200: 129 | description: The NBT file would be deleted on a DELETE request 130 | 404: 131 | description: 'No schematic was found for the passed deletion key' 132 | schema: 133 | properties: 134 | error: 135 | description: Informational 136 | type: string 137 | required: 138 | - error 139 | 420: 140 | description: 'The schematic file has already expired' 141 | schema: 142 | properties: 143 | error: 144 | description: Informational 145 | type: string 146 | required: 147 | - error 148 | 429: 149 | description: 'Too many requests were sent' 150 | 500: 151 | $ref: '#/responses/500' 152 | get: 153 | summary: Delete an uploaded NBT file 154 | parameters: 155 | - name: key 156 | description: The generated deletion key for that file 157 | type: string 158 | in: path 159 | required: true 160 | maxLength: 32 161 | minLength: 32 162 | responses: 163 | 200: 164 | description: The NBT file was deleted 165 | 404: 166 | description: 'No schematic was found for the passed deletion key' 167 | schema: 168 | properties: 169 | error: 170 | description: Informational 171 | type: string 172 | required: 173 | - error 174 | 420: 175 | description: 'The schematic file has already expired' 176 | schema: 177 | properties: 178 | error: 179 | description: Informational 180 | type: string 181 | required: 182 | - error 183 | 429: 184 | description: 'Too many requests were sent' 185 | 500: 186 | $ref: '#/responses/500' 187 | 188 | responses: 189 | 500: 190 | description: The request could not be handled, e.g. due to a database error 191 | schema: 192 | description: Basic structure of a 500 error 193 | properties: 194 | error: 195 | description: Contains the component of Arkitektonika which is in charge of the error (Key-Generator / Database) 196 | type: string 197 | required: 198 | - error -------------------------------------------------------------------------------- /app/storage/Database.ts: -------------------------------------------------------------------------------- 1 | import sqlite from 'better-sqlite3'; 2 | import Logger from "../Logger.js"; 3 | import IDataStorage from "./IDataStorage.js"; 4 | import {SchematicRecord} from "../model/SchematicRecord.js"; 5 | import path from "path"; 6 | import {DATA_DIR} from "../Arkitektonika.js"; 7 | import {customAlphabet} from "nanoid"; 8 | 9 | const ID_GENERATOR = customAlphabet("0123456789abcdef", 32); 10 | 11 | export default class Database implements IDataStorage { 12 | 13 | private readonly database: sqlite.Database; 14 | 15 | constructor(logger: Logger) { 16 | this.database = sqlite(path.join(DATA_DIR, 'database.db'), { 17 | verbose: (message, additionalArgs) => logger.debug(String(message), additionalArgs) 18 | }); 19 | this.migrate(); 20 | } 21 | 22 | getAllRecords(): Promise { 23 | return new Promise((resolve) => { 24 | const rows = this.database.prepare('SELECT * FROM accounting').all(); 25 | resolve(rows.map(value => Database.transformRowToRecord(value))); 26 | }); 27 | } 28 | 29 | getAllUnexpiredRecords(): Promise { 30 | return new Promise((resolve) => { 31 | const rows = this.database.prepare('SELECT * FROM accounting WHERE expired IS NULL').all(); 32 | resolve(rows.map(value => Database.transformRowToRecord(value))); 33 | }); 34 | } 35 | 36 | /** 37 | * @inheritDoc 38 | */ 39 | getSchematicRecordByDeleteKey(deleteKey: string): Promise { 40 | return new Promise(async (resolve, reject) => { 41 | const result = this.database.prepare('SELECT * FROM accounting WHERE delete_key = ? LIMIT 1') 42 | .get(deleteKey); 43 | if (!result) { 44 | return reject("No data found for passed delete key"); 45 | } 46 | resolve(Database.transformRowToRecord(result)); 47 | }); 48 | } 49 | 50 | /** 51 | * @inheritDoc 52 | */ 53 | getSchematicRecordByDownloadKey(downloadKey: string): Promise { 54 | return new Promise(async (resolve, reject) => { 55 | const result = this.database.prepare('SELECT * FROM accounting WHERE download_key = ? LIMIT 1') 56 | .get(downloadKey); 57 | if (!result) { 58 | return reject("No data found for passed download key"); 59 | } 60 | resolve(Database.transformRowToRecord(result)); 61 | }); 62 | } 63 | 64 | /** 65 | * @inheritDoc 66 | */ 67 | expireSchematicRecord(recordId: number): Promise { 68 | return new Promise(async (resolve, reject) => { 69 | try { 70 | const changes = this.database.prepare('UPDATE accounting SET expired = ? WHERE id = ?') 71 | .run(Date.now(), recordId).changes; 72 | if (changes < 1) { 73 | return reject(new Error("Failed to expire schematic - No schematic exists with passed id")); 74 | } 75 | resolve(null); 76 | } catch (error) { 77 | reject(error); 78 | } 79 | }); 80 | } 81 | 82 | /** 83 | * @inheritDoc 84 | */ 85 | storeSchematicRecord(record: SchematicRecord): Promise { 86 | return new Promise(async (resolve, reject) => { 87 | try { 88 | this.database.prepare('INSERT INTO accounting (filename, download_key, delete_key, last_accessed) VALUES (?, ?, ?, ?)') 89 | .bind([record.fileName, record.downloadKey, record.deleteKey, Date.now()]).run(); 90 | resolve(record); 91 | } catch (error) { 92 | reject(error); 93 | } 94 | }); 95 | } 96 | 97 | /** 98 | * @inheritDoc 99 | */ 100 | expireSchematicRecords(milliseconds: number): Promise { 101 | return new Promise(async (resolve, reject) => { 102 | try { 103 | // retrieve rows to delete 104 | const rows = this.database.prepare('SELECT * FROM accounting WHERE last_accessed <= ? AND expired IS NULL') 105 | .all((Date.now() - milliseconds)); 106 | if (rows.length == 0) { 107 | return resolve([]); 108 | } 109 | const records: SchematicRecord[] = rows.map(entry => Database.transformRowToRecord(entry)); 110 | 111 | const stmt = this.database.prepare('UPDATE accounting SET expired = ? WHERE id = ?'); 112 | 113 | for (let record of records) { 114 | stmt.run(Date.now(), record.id); 115 | } 116 | 117 | resolve(records); 118 | } catch (error) { 119 | reject(error); 120 | } 121 | }); 122 | } 123 | 124 | generateDeletionKey(maxIterations: number): Promise { 125 | return new Promise((resolve, reject) => { 126 | let iterations = 0; 127 | let key: string | null; 128 | do { 129 | key = ID_GENERATOR(); 130 | const dbResult = this.database.prepare('SELECT id FROM accounting where delete_key = ? LIMIT 1') 131 | .get(key) 132 | if (dbResult) { 133 | key = null; 134 | continue 135 | } 136 | if (key) 137 | resolve(key); 138 | } while (key == null && (iterations++ < maxIterations)) 139 | reject(); 140 | }); 141 | } 142 | 143 | generateDownloadKey(maxIterations: number): Promise { 144 | return new Promise((resolve, reject) => { 145 | let iterations = 0; 146 | let key: string | null; 147 | do { 148 | key = ID_GENERATOR(); 149 | const dbResult = this.database.prepare('SELECT id FROM accounting where download_key = ? LIMIT 1') 150 | .get(key) 151 | if (dbResult) { 152 | key = null; 153 | continue 154 | } 155 | if (key) 156 | resolve(key); 157 | } while (key == null && (iterations++ < maxIterations)) 158 | reject(); 159 | }); 160 | } 161 | 162 | 163 | /** 164 | * Set up the database 165 | * @private 166 | */ 167 | private migrate(): void { 168 | this.database.prepare(`create table if not exists accounting ( 169 | id integer not null, 170 | download_key char(32) not null, 171 | delete_key char(32) not null, 172 | filename char(33) not null, 173 | last_accessed integer not null, 174 | expired date, 175 | constraint accounting_pk primary key (id autoincrement))`).run(); 176 | [ 177 | 'create unique index if not exists accounting_id_uindex on accounting (id);', 178 | 'create unique index if not exists accounting_download_key_uindex on accounting (download_key);', 179 | 'create unique index if not exists accounting_delete_key_uindex on accounting (delete_key);', 180 | ].forEach(qry => this.database.prepare(qry).run()); 181 | } 182 | 183 | /** 184 | * Internal helper method to map a database row to a {@link SchematicRecord} instance. 185 | * @param row The database row to convert. 186 | * @private 187 | */ 188 | private static transformRowToRecord(row: any): SchematicRecord { 189 | return { 190 | id: row.id, 191 | downloadKey: row.download_key, 192 | deleteKey: row.delete_key, 193 | fileName: row.filename, 194 | expired: row.expired ? new Date(row.expired) : undefined, 195 | last_accessed: row.last_accessed ? new Date(row.last_accessed) : undefined 196 | } 197 | } 198 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Arkitektonika 2 | 3 |

4 | 5 |

6 | 7 | --- 8 | 9 | Arkitektonika is a REST repository for NBT data. It accepts uploads of valid NBT data and stores them in a local folder 10 | while accounting for its metadata in a local sqlite database. Optionally, uploaded files can be expired based on the 11 | configurable age by running the prune script. Files can always be deleted via their deletion key. 12 | 13 | Example Instances: 14 | 15 | | Address | Expiry | 16 | |-----------------------------------|------------| 17 | | https://api.schematic.cloud/ | 30 days | 18 | 19 | ## To Run 20 | 21 | ### With Docker 22 | 23 | ```sh 24 | docker pull intellectualsites/arkitektonika 25 | ``` 26 | 27 | Available on https://hub.docker.com/r/intellectualsites/arkitektonika 28 | 29 | ### From scratch 30 | 31 | ```sh 32 | git clone https://github.com/IntellectualSites/Arkitektonika.git && 33 | cd Arkitektonika && 34 | yarn install 35 | ``` 36 | 37 | #### With Typescript transpiling (recommended) 38 | 39 | ```sh 40 | yarn start:prod 41 | ``` 42 | 43 | #### Without Typescript transpiling 44 | 45 | ```sh 46 | yarn start 47 | ``` 48 | 49 | ### Build image locally 50 | 51 | Clone the entire repository and run the following commands: 52 | 53 | ```sh 54 | docker build -t intellectualsites/arkitektonika: . 55 | ``` 56 | 57 | --- 58 | 59 | Example docker compose: 60 | 61 | ```yaml 62 | version: '3.8' 63 | 64 | services: 65 | arkitektonika: 66 | container_name: Arkitektonika 67 | image: intellectualsites/arkitektonika:dev 68 | restart: unless-stopped 69 | volumes: 70 | - /home/ark/data:/data # Mount the data folder (containing config file, database and schematic storage) 71 | environment: 72 | - LOG_LEVEL=DEBUG # if debug logs should be printed to the console 73 | ``` 74 | 75 | `/data` is mounted to the host at `/home/ark/data` as that folder contains persistent data. 76 | 77 | ## Prune data 78 | 79 | Execute the start command with the prune flag to execute the prune routine: 80 | ```sh 81 | yarn start:prod --prune 82 | ``` 83 | 84 | ## Set up Expiration 85 | 86 | Create a cron job that runs at whatever frequency you desire. As an example, this will run the pruning script every 12 87 | hours: 88 | 89 | ```sh 90 | 0 */12 * * * cd /srv/arkitektonika && /usr/bin/yarn start:prod --prune 91 | ``` 92 | 93 | Or with a docker-compose configuration: 94 | 95 | ```sh 96 | 0 */12 * * * cd /srv/arkitektonika && docker-compose run --rm arkitektonika node app/launch.js --prune 97 | ``` 98 | 99 | ## Configuration 100 | 101 | ```json 102 | { 103 | "port": 3000, 104 | "prune": 1800000, 105 | "maxIterations": 20, 106 | "maxSchematicSize": 1000000, 107 | "limiter": { 108 | "windowMs": 60000, 109 | "delayAfter": 30, 110 | "delayMs": 500 111 | } 112 | } 113 | ``` 114 | 115 | | Config Key | Description | 116 | |--------------------|----------------------------------------------------------------------------------------------------------------------------| 117 | | port | on which port should the application bind | 118 | | prune | defines how old records must be to be deleted by the prune script (in ms) | 119 | | maxIterations | maximum amount of iterations to obtain a unique download and deletion token | 120 | | maxSchematicSize | maximum size of schematic files to be accepted (in bytes) | 121 | | limiter.windowMs | the frame of the limiter (after what duration should the limit gets reset) | 122 | | limiter.delayAfter | After how many requests during windowMs should delayMs be applied | 123 | | limiter.delayMs | How many ms should the request take longer. Formula: `currentRequestDelay = (currentRequestAmount - delayAfter) * delayMs` | 124 | 125 | ## File structure: 126 | 127 | ``` 128 | data 129 | ├── config.json 130 | ├── database.db 131 | └── schemata 132 | ├── fe65d7edc37149c47171962dc26a039b 133 | └── a98f299c5cf294e6555617e83226bcdd 134 | ``` 135 | 136 | `config.json` holds the user configuration data
137 | `database.db` holds the required data for each schematic
138 | `schemata` holds all schematic file data 139 | 140 | ### Routes 141 | 142 | All routes will be available at the exposed port (e.g. `localhost:3000`). 143 | 144 | ### Upload a file 145 | 146 | **POST `INSTANCE_URL/upload`**: send your file as multipart/form-data; example: 147 | 148 | ```sh 149 | curl --location --request POST 'http://localhost:3000/upload' \ 150 | --form 'schematic=@/path/to/plot.schem' 151 | ``` 152 | 153 | response: 154 | 155 | | code | meaning | 156 | |------|----------------------------------------------------------------------| 157 | | 200 | file was of valid NBT format and was accepted | 158 | | 400 | file was not of valid NBT format | 159 | | 413 | file payload was too large and rejected | 160 | | 500 | file could not be found on disk after being uploaded (upload failed) | 161 | 162 | success body: 163 | 164 | ```json 165 | { 166 | "download_key": "db6186c8795740379d26fc61ecba1a24", 167 | "delete_key": "11561161dffe4a1298992ce063be5ff9" 168 | } 169 | ``` 170 | 171 | The download key allows you to download the file, and the delete key lets you delete it. Share the `download_key`, but 172 | not the `delete_key`. 173 | 174 | ### Check download headers 175 | 176 | **HEAD `INSTANCE_URL/download/:download_key`**: check what headers you'd get if you sent a POST request for a file with 177 | the given download_key; example: 178 | 179 | ```sh 180 | curl --location --head 'http://localhost:3000/download/db6186c8795740379d26fc61ecba1a24' 181 | ``` 182 | 183 | The response for this is in the form of status codes only. 184 | 185 | | Status-Code | Meaning | 186 | |-------------|----------------------------------------------------------------------------------------| 187 | | 200 | File was found, prospective download would succeed | 188 | | 404 | File was not found in the database | 189 | | 410 | File metadata is in accounting table, but file is not on disk or already expired | 190 | | 500 | An internal server error occurred due to corrupted metadata (missing data in database) | 191 | 192 | ### Download a file 193 | 194 | **GET `INSTANCE_URL/download/:download_key`**: download a file with the given `download_key`; example: 195 | 196 | ```sh 197 | curl --location --request GET 'http://localhost:3000/download/db6186c8795740379d26fc61ecba1a24' 198 | ``` 199 | 200 | response: 201 | see **Check download headers** above. 202 | 203 | On success, the file is sent as an attachment for download to the browser / requester. 204 | 205 | ### Check deletion headers 206 | 207 | **HEAD `INSTANCE_URL/delete/:delete_key`**: check what headers you'd get if you sent a DELETE request for a file with 208 | the given delete_key; example: 209 | 210 | ```sh 211 | curl --location --head 'http://localhost:3000/delete/11561161dffe4a1298992ce063be5ff9' 212 | ``` 213 | 214 | The response for this is in the form of status codes only. 215 | 216 | | Status-Code | Meaning | 217 | |-------------|----------------------------------------------------------------------------------------| 218 | | 200 | File was found, prospective deletion would succeed | 219 | | 404 | File was not found in the database | 220 | | 410 | File metadata is in accounting table, but file is not on disk or already expired | 221 | | 500 | An internal server error occurred due to corrupted metadata (missing data in database) | 222 | 223 | ### Delete a file 224 | 225 | **DELETE `PUBLIC_URL/delete/:delete_key`**: delete a file with the given `delete_key`; example: 226 | 227 | ```sh 228 | curl --location --request DELETE 'http://localhost:3000/delete/11561161dffe4a1298992ce063be5ff9' 229 | ``` 230 | 231 | response: 232 | see **Check deletion headers** above. 233 | 234 | On success, the file is deleted and the record is marked as expired in the database. 235 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es6", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 22 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | 26 | /* Modules */ 27 | "module": "Node16", /* Specify what module code is generated. */ 28 | "rootDir": "./", /* Specify the root folder within your source files. */ 29 | "moduleResolution": "Node16", /* Specify how TypeScript looks up a file from a given module specifier. */ 30 | "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 31 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 32 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 33 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 34 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 35 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 36 | // "resolveJsonModule": true, /* Enable importing .json files */ 37 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 38 | 39 | /* JavaScript Support */ 40 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */ 41 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 42 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 43 | 44 | /* Emit */ 45 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 46 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 47 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 48 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 49 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 50 | "outDir": "./dist", /* Specify an output folder for all emitted files. */ 51 | // "removeComments": true, /* Disable emitting comments. */ 52 | // "noEmit": true, /* Disable emitting files from a compilation. */ 53 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 54 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 55 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 56 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 57 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 58 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 59 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 60 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 61 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 62 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 63 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 64 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 65 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 66 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 67 | 68 | /* Interop Constraints */ 69 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 70 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 71 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */ 72 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 73 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 74 | 75 | /* Type Checking */ 76 | "strict": true, /* Enable all strict type-checking options. */ 77 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 78 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 79 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 80 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 81 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 82 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 83 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 84 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 85 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ 86 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 87 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 88 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 89 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 90 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 91 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 92 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 93 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 94 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 95 | 96 | /* Completeness */ 97 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 98 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 99 | } 100 | } 101 | --------------------------------------------------------------------------------