├── db └── .gitkeep ├── mise.toml ├── .github ├── FUNDING.yml ├── renovate.json5 ├── ISSUE_TEMPLATE │ └── manual-rule-request.md └── workflows │ └── cicd.yml ├── .dockerignore ├── bin └── update.ts ├── vitest.setup.ts ├── .editorconfig ├── .gitignore ├── src ├── types.ts ├── routes │ ├── v2 │ │ ├── ids │ │ │ ├── schemas │ │ │ │ ├── common.ts │ │ │ │ ├── query-params.ts │ │ │ │ ├── json-body.ts │ │ │ │ ├── query-params.test.ts │ │ │ │ └── json-body.test.ts │ │ │ ├── __snapshots__ │ │ │ │ └── handler.test.ts.snap │ │ │ ├── handler.ts │ │ │ └── handler.test.ts │ │ ├── special │ │ │ ├── schemas │ │ │ │ └── special.ts │ │ │ ├── handler.ts │ │ │ └── handler.test.ts │ │ ├── include.ts │ │ ├── include.test.ts │ │ └── include.test-utils.ts │ └── v1 │ │ └── ids │ │ ├── schemas │ │ ├── query-params.ts │ │ ├── json-body.ts │ │ ├── query-params.test.ts │ │ └── json-body.test.ts │ │ ├── __snapshots__ │ │ └── handler.test.ts.snap │ │ ├── handler.ts │ │ └── handler.test.ts ├── lib │ └── logger.ts ├── shared-schemas.ts ├── index.ts ├── config.ts ├── docs.ts ├── migrations │ └── 20190611171759_initial.ts ├── manual-rules.ts ├── db │ └── file-provider.ts ├── db.ts ├── app.ts ├── utils.ts ├── update.test.ts └── update.ts ├── vitest.config.ts ├── biome.json ├── tsconfig.json ├── tsdown.config.ts ├── pnpm-workspace.yaml ├── Dockerfile ├── eslint.config.js ├── package.json ├── README.md ├── docs └── openapi.yaml └── LICENSE.md /db/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /mise.toml: -------------------------------------------------------------------------------- 1 | [tools] 2 | node = "24" 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | ko_fi: beequeue 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .git 3 | .github 4 | dist 5 | node_modules 6 | redoc-static.html 7 | -------------------------------------------------------------------------------- /bin/update.ts: -------------------------------------------------------------------------------- 1 | import { updateRelations } from "../src/update.ts" 2 | 3 | await updateRelations() 4 | -------------------------------------------------------------------------------- /vitest.setup.ts: -------------------------------------------------------------------------------- 1 | import { migrator } from "./src/db.ts" 2 | 3 | await migrator.migrateToLatest() 4 | -------------------------------------------------------------------------------- /.github/renovate.json5: -------------------------------------------------------------------------------- 1 | { 2 | $schema: "https://docs.renovatebot.com/renovate-schema.json", 3 | extends: ["github>BeeeQueue/renovate-config:js-lib.json"], 4 | } 5 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] # all files 4 | indent_style = tab 5 | indent_size = 2 6 | end_of_line = lf 7 | insert_final_newline = true 8 | charset = utf-8 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .env 3 | coverage/ 4 | db/*.sqlite3* 5 | dist/ 6 | redoc-static.html 7 | 8 | # Logs 9 | logs 10 | *.log 11 | 12 | # Dependency directories 13 | node_modules/ 14 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export type JsonObject = { [Key in string]: JsonValue } & { 2 | [Key in string]?: JsonValue | undefined 3 | } 4 | 5 | export type JsonArray = JsonValue[] | readonly JsonValue[] 6 | 7 | export type JsonPrimitive = string | number | boolean | null 8 | 9 | export type JsonValue = JsonPrimitive | JsonObject | JsonArray 10 | -------------------------------------------------------------------------------- /src/routes/v2/ids/schemas/common.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | // Does not include `thetvdb` due to the one-to-many issue 4 | export const numberIdSourceSchema = v.picklist([ 5 | "anilist", 6 | "anidb", 7 | "anisearch", 8 | "kitsu", 9 | "livechart", 10 | "myanimelist", 11 | ]) 12 | 13 | export const stringIdSourceSchema = v.picklist(["anime-planet", "notify-moe"]) 14 | -------------------------------------------------------------------------------- /src/lib/logger.ts: -------------------------------------------------------------------------------- 1 | import { pino } from "pino" 2 | 3 | import { config } from "../config.ts" 4 | 5 | const stream = 6 | process.env.NODE_ENV !== "production" 7 | ? (await import("pino-pretty")).PinoPretty() 8 | : undefined 9 | 10 | export const logger = pino( 11 | { 12 | level: config.LOG_LEVEL, 13 | redact: ["headers.authorization", "headers.cookie", "*.token"], 14 | }, 15 | stream, 16 | ) 17 | -------------------------------------------------------------------------------- /src/routes/v1/ids/schemas/query-params.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import { numberIdSchema, oldSourceSchema } from "../../../../shared-schemas.ts" 4 | 5 | export const queryInputSchema = v.pipe( 6 | v.object({ 7 | source: oldSourceSchema, 8 | id: numberIdSchema, 9 | }), 10 | v.check((data) => Object.keys(data).length > 0, "At least one source is required."), 11 | ) 12 | 13 | export type QueryParamQuery = v.InferOutput 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/manual-rule-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Manual Rule Request 3 | about: ... 4 | 5 | --- 6 | 7 | 12 | 13 | **Which entry is missing or duplicated?** 14 | 15 | `source:id` 16 | 17 | **What should it be mapped to, if you know?** 18 | 19 | `source:id` 20 | -------------------------------------------------------------------------------- /src/routes/v2/special/schemas/special.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import { imdbIdSchema, numberIdSchema } from "../../../../shared-schemas.ts" 4 | import { includeSchema } from "../../include.ts" 5 | 6 | export const specialInputSchema = v.intersect([ 7 | v.object({ id: numberIdSchema }), 8 | includeSchema, 9 | ]) 10 | 11 | export const specialImdbInputSchema = v.intersect([ 12 | v.object({ id: imdbIdSchema }), 13 | includeSchema, 14 | ]) 15 | 16 | export type SpecialQuery = v.InferOutput 17 | -------------------------------------------------------------------------------- /vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config" 2 | 3 | export default defineConfig(async ({ command }) => ({ 4 | test: { 5 | reporters: ["verbose"], 6 | 7 | setupFiles: ["./vitest.setup.ts"], 8 | poolOptions: { 9 | forks: { singleFork: true, minForks: 1, maxForks: 1 }, 10 | }, 11 | 12 | env: { 13 | NODE_ENV: "test", 14 | }, 15 | 16 | coverage: { 17 | enabled: command === "build", 18 | exclude: ["config.ts"], 19 | 20 | lines: 90, 21 | functions: 85, 22 | branches: 85, 23 | statements: 90, 24 | }, 25 | }, 26 | })) 27 | -------------------------------------------------------------------------------- /biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/2.0.5/schema.json", 3 | "vcs": { 4 | "enabled": true, 5 | "clientKind": "git", 6 | "defaultBranch": "main", 7 | "useIgnoreFile": true 8 | }, 9 | "formatter": { 10 | "lineEnding": "lf", 11 | "indentStyle": "tab", 12 | "lineWidth": 90, 13 | "formatWithErrors": true 14 | }, 15 | "javascript": { 16 | "formatter": { 17 | "quoteStyle": "double", 18 | "semicolons": "asNeeded" 19 | } 20 | }, 21 | 22 | "assist": { "actions": { "source": { "organizeImports": "off" } } }, 23 | "linter": { "enabled": false } 24 | } 25 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["@tsconfig/node24/tsconfig.json", "@tsconfig/strictest/tsconfig.json"], 3 | "compilerOptions": { 4 | "noEmit": true, 5 | "sourceMap": true, 6 | 7 | "module": "esnext", 8 | "moduleResolution": "bundler", 9 | "resolveJsonModule": true, 10 | "allowImportingTsExtensions": true, 11 | 12 | "lib": ["esnext"], 13 | "noImplicitReturns": false, 14 | "noPropertyAccessFromIndexSignature": false, 15 | "noUncheckedIndexedAccess": false, 16 | "exactOptionalPropertyTypes": false, 17 | 18 | "types": ["node"] 19 | }, 20 | "include": ["**/*.ts"], 21 | "exclude": ["node_modules"] 22 | } 23 | -------------------------------------------------------------------------------- /tsdown.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "tsdown" 2 | 3 | import pkgJson from "./package.json" with { type: "json" } 4 | 5 | export default defineConfig({ 6 | entry: ["src/index.ts", "src/migrations/*.ts"], 7 | outDir: "dist", 8 | 9 | sourcemap: true, 10 | minify: true, 11 | 12 | env: { 13 | NODE_ENV: process.env.NODE_ENV ?? "production", 14 | DEV: process.env.NODE_ENV === "development", 15 | PROD: process.env.NODE_ENV === "production", 16 | TEST: false, 17 | HOMEPAGE: pkgJson.homepage, 18 | }, 19 | 20 | shims: true, 21 | platform: "node", 22 | target: ["node24"], 23 | format: ["esm"], 24 | fixedExtension: true, 25 | }) 26 | -------------------------------------------------------------------------------- /src/routes/v2/ids/schemas/query-params.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import { numberIdSchema, stringIdSchema } from "../../../../shared-schemas.ts" 4 | import { includeSchema } from "../../include.ts" 5 | 6 | import { numberIdSourceSchema, stringIdSourceSchema } from "./common.ts" 7 | 8 | export const queryInputSchema = v.intersect([ 9 | v.union([ 10 | v.object({ 11 | source: numberIdSourceSchema, 12 | id: numberIdSchema, 13 | }), 14 | v.object({ 15 | source: stringIdSourceSchema, 16 | id: stringIdSchema, 17 | }), 18 | ]), 19 | includeSchema, 20 | ]) 21 | 22 | export type QueryParamQuery = v.InferOutput 23 | -------------------------------------------------------------------------------- /src/shared-schemas.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | export const oldSourceSchema = v.picklist( 4 | ["anilist", "anidb", "myanimelist", "kitsu"], 5 | "Invalid source", 6 | ) 7 | 8 | export const numberIdSchema = v.pipe( 9 | v.unknown(), 10 | v.transform(Number), 11 | v.integer("Invalid ID"), 12 | v.minValue(1), 13 | v.maxValue(50_000_000), 14 | ) 15 | 16 | export const stringIdSchema = v.pipe( 17 | v.string("Invalid ID"), 18 | v.minLength(1), 19 | v.maxLength(150), 20 | ) 21 | 22 | export const imdbIdSchema = v.pipe( 23 | v.string("Invalid IMDB ID"), 24 | v.startsWith("tt"), 25 | v.minLength(3), 26 | v.maxLength(50), 27 | v.transform((input): `tt${string}` => input as `tt${string}`), 28 | ) 29 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { serve } from "h3" 2 | 3 | import { createApp } from "./app.ts" 4 | import { config } from "./config.ts" 5 | import { migrator } from "./db.ts" 6 | import { updateRelations } from "./update.ts" 7 | 8 | const { NODE_ENV, PORT } = config 9 | 10 | // Note: Migrations are handled separately and not through Kysely 11 | 12 | const runUpdateScript = async () => updateRelations() 13 | 14 | if (NODE_ENV === "production") { 15 | void runUpdateScript() 16 | 17 | // eslint-disable-next-line ts/no-misused-promises 18 | setInterval(runUpdateScript, 1000 * 60 * 60 * 24) 19 | } 20 | 21 | const app = createApp() 22 | 23 | await migrator.migrateToLatest() 24 | 25 | serve(app, { hostname: "0.0.0.0", port: PORT }) 26 | -------------------------------------------------------------------------------- /src/routes/v1/ids/schemas/json-body.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import type { SourceValue } from "../../../../db.ts" 4 | import { numberIdSchema, oldSourceSchema } from "../../../../shared-schemas.ts" 5 | 6 | export const singularItemInputSchema = v.pipe( 7 | v.record(oldSourceSchema, numberIdSchema), 8 | v.check((data) => Object.keys(data).length > 0, "At least one source is required."), 9 | ) 10 | 11 | const arrayInputSchema = v.pipe( 12 | v.array(singularItemInputSchema), 13 | v.check((data) => data.length > 0, "At least one source is required."), 14 | ) 15 | 16 | export const bodyInputSchema = v.union([singularItemInputSchema, arrayInputSchema]) 17 | 18 | type BodyItem = { 19 | [key in SourceValue]?: number 20 | } 21 | 22 | export type BodyQuery = BodyItem | BodyItem[] 23 | -------------------------------------------------------------------------------- /pnpm-workspace.yaml: -------------------------------------------------------------------------------- 1 | packages: [] 2 | 3 | ignoredBuiltDependencies: ["@biomejs/biome", esbuild, simple-git-hooks] 4 | onlyBuiltDependencies: [] 5 | 6 | overrides: 7 | "@antfu/eslint-config>@eslint/markdown": "-" 8 | "@antfu/eslint-config>@stylistic/eslint-plugin": "-" 9 | "@antfu/eslint-config>eslint-plugin-jsonc": "-" 10 | "@antfu/eslint-config>eslint-plugin-toml": "-" 11 | "@antfu/eslint-config>eslint-plugin-vue": "-" 12 | "@antfu/eslint-config>eslint-processor-vue-blocks": "-" 13 | "@antfu/eslint-config>jsonc-eslint-parser": "-" 14 | "@antfu/eslint-config>toml-eslint-parser": "-" 15 | "@antfu/eslint-config>vitest": "-" 16 | "better-sqlite3": "-" 17 | eslint-plugin-pnpm>jsonc-eslint-parser: "-" 18 | vite: npm:rolldown-vite@latest 19 | 20 | savePrefix: "" 21 | shellEmulator: true 22 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | export const Environment = { 4 | Dev: "development", 5 | Test: "test", 6 | Prod: "production", 7 | } as const 8 | 9 | const schema = v.object({ 10 | NODE_ENV: v.optional(v.enum(Environment), Environment.Dev), 11 | PORT: v.optional(v.pipe(v.string(), v.transform(Number), v.integer()), "3000"), 12 | LOG_LEVEL: v.optional( 13 | v.picklist(["fatal", "error", "warn", "info", "debug", "trace"]), 14 | process.env.NODE_ENV === "development" ? "debug" : "info", 15 | ), 16 | USER_AGENT: v.optional(v.string(), "arm-server"), 17 | }) 18 | 19 | const result = v.safeParse(schema, process.env) 20 | 21 | if (!result.success) { 22 | console.error( 23 | "❌ Invalid environment variables:", 24 | JSON.stringify(result.issues, null, 4), 25 | ) 26 | 27 | process.exit(1) 28 | } 29 | 30 | export const config = result.output 31 | -------------------------------------------------------------------------------- /src/docs.ts: -------------------------------------------------------------------------------- 1 | import { existsSync } from "node:fs" 2 | import { readFile } from "node:fs/promises" 3 | import path from "node:path" 4 | 5 | import { H3, handleCacheHeaders, html } from "h3" 6 | 7 | import { CacheTimes } from "./utils.ts" 8 | 9 | const filePath = path.resolve(import.meta.dirname, "../redoc-static.html") 10 | let docsHtml: string | null = null 11 | 12 | export const docsRoutes = new H3() 13 | 14 | docsRoutes.get("/", async (event) => { 15 | if (docsHtml != null) { 16 | handleCacheHeaders(event, { maxAge: CacheTimes.DAY }) 17 | 18 | return html(docsHtml) 19 | } 20 | 21 | docsHtml = existsSync(filePath) ? await readFile(filePath, "utf8") : null 22 | 23 | if (docsHtml == null) { 24 | throw new Error("docs.html not found") 25 | } else { 26 | handleCacheHeaders(event, { maxAge: CacheTimes.DAY }) 27 | 28 | return html(docsHtml) 29 | } 30 | }) 31 | -------------------------------------------------------------------------------- /src/routes/v2/include.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import { db, Source, type SourceValue } from "../../db.ts" 4 | 5 | export const includeSchema = v.object({ 6 | include: v.optional( 7 | v.pipe( 8 | v.string(), 9 | v.regex(/^[\-a-z,]+$/, "Invalid `include` query"), 10 | v.minLength(1), 11 | v.maxLength(100), 12 | ), 13 | ), 14 | }) 15 | 16 | export type IncludeQuery = v.InferOutput 17 | 18 | const sources = Object.values(Source) 19 | const selectAll = sources.map((column) => db.dynamic.ref(column)) 20 | export const buildSelectFromInclude = (include: string | null | undefined) => { 21 | if (include == null) { 22 | return selectAll 23 | } 24 | 25 | return include 26 | .split(",") 27 | .filter((inclusion) => sources.includes(inclusion as SourceValue)) 28 | .map((column) => db.dynamic.ref(column)) 29 | } 30 | -------------------------------------------------------------------------------- /src/migrations/20190611171759_initial.ts: -------------------------------------------------------------------------------- 1 | import { type Kysely, sql } from "kysely" 2 | 3 | export async function up(db: Kysely): Promise { 4 | await sql`PRAGMA journal_mode=WAL`.execute(db) 5 | 6 | await db.schema 7 | .createTable("relations") 8 | .ifNotExists() 9 | 10 | // Original columns 11 | .addColumn("anilist", "integer", (col) => col.unique()) 12 | .addColumn("anidb", "integer", (col) => col.unique()) 13 | .addColumn("myanimelist", "integer", (col) => col.unique()) 14 | .addColumn("kitsu", "integer", (col) => col.unique()) 15 | 16 | // v2 columns 17 | .addColumn("anime-planet", "text", (col) => col.unique()) 18 | .addColumn("anisearch", "integer", (col) => col.unique()) 19 | .addColumn("imdb", "text") 20 | .addColumn("livechart", "integer", (col) => col.unique()) 21 | .addColumn("notify-moe", "text", (col) => col.unique()) 22 | .addColumn("themoviedb", "integer") 23 | .addColumn("thetvdb", "integer") 24 | 25 | .execute() 26 | } 27 | -------------------------------------------------------------------------------- /src/routes/v2/ids/schemas/json-body.ts: -------------------------------------------------------------------------------- 1 | import * as v from "valibot" 2 | 3 | import type { Relation } from "../../../../db.ts" 4 | import { numberIdSchema, stringIdSchema } from "../../../../shared-schemas.ts" 5 | 6 | // Does not include `thetvdb` due to the one-to-many issue 7 | type BodyItem = Omit 8 | export const singularItemInputSchema = v.pipe( 9 | v.partial( 10 | v.strictObject({ 11 | anidb: numberIdSchema, 12 | anilist: numberIdSchema, 13 | "anime-planet": stringIdSchema, 14 | anisearch: numberIdSchema, 15 | kitsu: numberIdSchema, 16 | livechart: numberIdSchema, 17 | "notify-moe": stringIdSchema, 18 | myanimelist: numberIdSchema, 19 | }), 20 | ), 21 | v.check( 22 | (value) => Object.values(value).some((id) => id != null), 23 | "At least one ID must be provided", 24 | ), 25 | ) 26 | 27 | export type BodyQuery = BodyItem | BodyItem[] 28 | 29 | const arrayInputSchema = v.pipe( 30 | v.array(singularItemInputSchema), 31 | v.minLength(1), 32 | v.maxLength(100), 33 | ) 34 | 35 | export const bodyInputSchema = v.union([singularItemInputSchema, arrayInputSchema]) 36 | -------------------------------------------------------------------------------- /src/routes/v2/ids/__snapshots__/handler.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html 2 | 3 | exports[`json body > array input > requires at least one source 1`] = ` 4 | { 5 | "code": "FST_ERR_VALIDATION", 6 | "details": { 7 | "nested": { 8 | "0": [ 9 | "At least one ID must be provided", 10 | ], 11 | }, 12 | }, 13 | "error": "Bad Request", 14 | "message": "Validation error", 15 | "statusCode": 400, 16 | } 17 | `; 18 | 19 | exports[`json body > object input > errors correctly on an empty object 1`] = ` 20 | { 21 | "code": "FST_ERR_VALIDATION", 22 | "details": { 23 | "root": [ 24 | "At least one ID must be provided", 25 | ], 26 | }, 27 | "error": "Bad Request", 28 | "message": "Validation error", 29 | "statusCode": 400, 30 | } 31 | `; 32 | 33 | exports[`json body > object input > gET fails with json body 1`] = ` 34 | { 35 | "code": "FST_ERR_VALIDATION", 36 | "details": { 37 | "$": [ 38 | "Invalid type: Expected Object but received Object", 39 | ], 40 | }, 41 | "error": "Bad Request", 42 | "message": "Validation error", 43 | "statusCode": 400, 44 | } 45 | `; 46 | -------------------------------------------------------------------------------- /src/routes/v1/ids/__snapshots__/handler.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html 2 | 3 | exports[`json body > array input > requires at least one source 1`] = ` 4 | { 5 | "code": "FST_ERR_VALIDATION", 6 | "details": { 7 | "nested": { 8 | "0": [ 9 | "At least one source is required.", 10 | ], 11 | }, 12 | }, 13 | "error": "Bad Request", 14 | "message": "Validation error", 15 | "statusCode": 400, 16 | } 17 | `; 18 | 19 | exports[`json body > object input > errors correctly on an empty object 1`] = ` 20 | { 21 | "code": "FST_ERR_VALIDATION", 22 | "details": { 23 | "root": [ 24 | "At least one source is required.", 25 | ], 26 | }, 27 | "error": "Bad Request", 28 | "message": "Validation error", 29 | "statusCode": 400, 30 | } 31 | `; 32 | 33 | exports[`json body > object input > gET fails with json body 1`] = ` 34 | { 35 | "code": "FST_ERR_VALIDATION", 36 | "details": { 37 | "id": [ 38 | "Invalid key: Expected "id" but received undefined", 39 | ], 40 | "source": [ 41 | "Invalid key: Expected "source" but received undefined", 42 | ], 43 | }, 44 | "error": "Bad Request", 45 | "message": "Validation error", 46 | "statusCode": 400, 47 | } 48 | `; 49 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:24-alpine as base 2 | 3 | WORKDIR /app 4 | 5 | ENV PNPM_HOME=/pnpm 6 | ENV CI=1 7 | # Use production in case any dependencies use it in any way 8 | ENV NODE_ENV=production 9 | 10 | # Enable node compile cache 11 | ENV NODE_COMPILE_CACHE=/node-cc 12 | RUN mkdir -p $NODE_COMPILE_CACHE 13 | 14 | FROM base as base_deps 15 | 16 | ENV CI=1 17 | 18 | COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ 19 | 20 | RUN corepack enable 21 | RUN corepack prepare --activate 22 | 23 | # Install dependencies 24 | RUN pnpm install --frozen-lockfile --ignore-scripts 25 | 26 | FROM base_deps as build 27 | 28 | COPY tsconfig.json tsdown.config.ts ./ 29 | COPY src/ src/ 30 | 31 | RUN node --run build 32 | 33 | FROM base_deps AS docs 34 | 35 | COPY docs/openapi.yaml docs/openapi.yaml 36 | 37 | RUN node --run docs 38 | 39 | FROM base 40 | 41 | COPY src/ src/ 42 | COPY package.json pnpm-workspace.yaml ./ 43 | 44 | COPY --from=build /app/dist dist/ 45 | COPY --from=docs /app/redoc-static.html . 46 | 47 | # Run with... 48 | # Source maps enabled, since it does not affect performance from what I found 49 | ENV NODE_OPTIONS="--enable-source-maps" 50 | # Warnings disabled, we know what we're doing and they're annoying 51 | ENV NODE_NO_WARNINGS=1 52 | 53 | CMD ["node", "--run", "start"] 54 | -------------------------------------------------------------------------------- /src/routes/v1/ids/schemas/query-params.test.ts: -------------------------------------------------------------------------------- 1 | import { safeParse } from "valibot" 2 | import { describe, expect, it } from "vitest" 3 | 4 | import { Source } from "../../../../db.ts" 5 | import type { JsonValue } from "../../../../types.ts" 6 | 7 | import { queryInputSchema } from "./query-params.ts" 8 | 9 | type Case = [JsonValue, boolean] 10 | type Cases = Case[] 11 | 12 | const okCases: Cases = [ 13 | [{ source: Source.AniList, id: 1337 }, true], 14 | [{ source: Source.AniDB, id: 1337 }, true], 15 | [{ source: Source.MAL, id: 1337 }, true], 16 | [{ source: Source.Kitsu, id: 1337 }, true], 17 | [{ source: Source.Kitsu, id: 133_700 }, true], 18 | ] 19 | 20 | const badCases: Cases = [ 21 | [{}, false], 22 | [{ id: 1337 }, false], 23 | [{ source: Source.AniList }, false], 24 | [{ source: Source.AniList, id: null }, false], 25 | [{ source: Source.AniList, id: -1234 }, false], 26 | [{ source: Source.AniList, id: 50_000_001 }, false], 27 | ] 28 | 29 | describe("schema", () => { 30 | const inputs: Cases = [...okCases, ...badCases] 31 | 32 | it.each(inputs)("%o = %s", (input, expected) => { 33 | const result = safeParse(queryInputSchema, input) 34 | 35 | if (expected) { 36 | expect(result.issues?.[0]).not.toBeDefined() 37 | } else { 38 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1) 39 | } 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import antfu from "@antfu/eslint-config" 2 | 3 | const sortImports = { 4 | "perfectionist/sort-imports": [ 5 | "error", 6 | { 7 | type: "natural", 8 | internalPattern: ["^@/", "^~/", "^#[a-zA-Z0-9-]+/"], 9 | newlinesBetween: "always", 10 | groups: [ 11 | ["builtin", "builtin-type"], 12 | ["external", "external-type"], 13 | ["internal", "internal-type"], 14 | ["parent", "parent-type"], 15 | ["sibling", "sibling-type"], 16 | ["index", "index-type"], 17 | "object", 18 | "unknown", 19 | ], 20 | }, 21 | ], 22 | } 23 | 24 | export default antfu({ 25 | ignores: ["**/*.json"], 26 | markdown: false, 27 | stylistic: false, 28 | jsonc: false, 29 | jsx: false, 30 | toml: false, 31 | typescript: { 32 | tsconfigPath: "tsconfig.json", 33 | 34 | overrides: { 35 | "no-console": "off", 36 | "antfu/no-top-level-await": "off", 37 | "import/consistent-type-specifier-style": ["error", "prefer-top-level"], 38 | "ts/consistent-type-imports": [ 39 | "error", 40 | { fixStyle: "inline-type-imports", disallowTypeAnnotations: false }, 41 | ], 42 | "node/prefer-global/process": "off", 43 | "ts/consistent-type-definitions": "off", 44 | "ts/no-use-before-define": "off", 45 | "unused-imports/no-unused-vars": "off", 46 | 47 | ...sortImports, 48 | }, 49 | }, 50 | }) 51 | -------------------------------------------------------------------------------- /src/routes/v2/ids/schemas/query-params.test.ts: -------------------------------------------------------------------------------- 1 | import { safeParse } from "valibot" 2 | import { describe, expect, it } from "vitest" 3 | 4 | import { Source } from "../../../../db.ts" 5 | import type { JsonValue } from "../../../../types.ts" 6 | 7 | import { queryInputSchema, type QueryParamQuery } from "./query-params.ts" 8 | 9 | type Case = [V, boolean] 10 | type Cases = Array> 11 | 12 | const okCases = [ 13 | [{ source: Source.AniList, id: 1337 }, true], 14 | [{ source: Source.AniDB, id: 1337 }, true], 15 | [{ source: Source.MAL, id: 1337 }, true], 16 | [{ source: Source.Kitsu, id: 1337 }, true], 17 | [{ source: Source.Kitsu, id: 133_700 }, true], 18 | [{ source: Source.AnimePlanet, id: "1337" }, true], 19 | ] satisfies Cases 20 | 21 | const badCases: Cases = [ 22 | [{}, false], 23 | [{ id: 1337 }, false], 24 | [{ source: Source.AniList }, false], 25 | [{ source: Source.AniList, id: null }, false], 26 | [{ source: Source.AniList, id: -1234 }, false], 27 | [{ source: Source.AniList, id: 50_000_001 }, false], 28 | [{ source: Source.IMDB, id: "tt1337" }, false], 29 | [{ source: Source.TheTVDB, id: 1337 }, false], 30 | ] 31 | 32 | describe("schema", () => { 33 | const inputs = [...okCases, ...badCases] satisfies Cases 34 | 35 | it.each(inputs)("%o = %s", (input, expected) => { 36 | const result = safeParse(queryInputSchema, input) 37 | 38 | if (expected) { 39 | expect(result.issues?.[0]).not.toBeDefined() 40 | } else { 41 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1) 42 | } 43 | }) 44 | }) 45 | -------------------------------------------------------------------------------- /src/routes/v1/ids/schemas/json-body.test.ts: -------------------------------------------------------------------------------- 1 | import { safeParse } from "valibot" 2 | import { describe, expect, it } from "vitest" 3 | 4 | import type { Relation } from "../../../../db.ts" 5 | import type { JsonValue } from "../../../../types.ts" 6 | 7 | import { bodyInputSchema } from "./json-body.ts" 8 | 9 | type Case = [V, boolean] 10 | type Cases = Array> 11 | 12 | const okCases = [ 13 | [{ anilist: 1337 }, true], 14 | [{ anidb: 1337 }, true], 15 | [{ anidb: 1337, anilist: 1337 }, true], 16 | [{ anidb: 1337, anilist: 1337, myanimelist: 1337, kitsu: 1337 }, true], 17 | ] satisfies Cases 18 | 19 | const badCases = [ 20 | // No source 21 | [{}, false], 22 | // Invalid ID (negative) 23 | [{ anilist: -1 }, false], 24 | // Invalid ID (not integer) 25 | [{ anilist: 1.5 }, false], 26 | [{ anidb: 1.5 }, false], 27 | // Invalid source 28 | [{ aniDb: 1337 }, false], 29 | [{ aniList: 1337 }, false], 30 | [{ anidb: 1337, test: 123 }, false], 31 | ] satisfies Cases 32 | 33 | const mapToSingularArrayInput = (cases: Cases): Cases => 34 | cases.map(([input, expected]) => [[input], expected]) 35 | 36 | describe("schema", () => { 37 | const inputs = [ 38 | ...okCases, 39 | ...badCases, 40 | [[], false], 41 | ...mapToSingularArrayInput(okCases), 42 | ...mapToSingularArrayInput(badCases), 43 | ] satisfies Cases 44 | 45 | it.each(inputs)("%o = %s", (input, expected) => { 46 | const result = safeParse(bodyInputSchema, input) 47 | 48 | if (expected) { 49 | expect(result.issues?.[0]).not.toBeDefined() 50 | } else { 51 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1) 52 | } 53 | }) 54 | }) 55 | -------------------------------------------------------------------------------- /src/manual-rules.ts: -------------------------------------------------------------------------------- 1 | import { db, type Relation } from "./db.ts" 2 | 3 | type Rule = `${keyof Relation}:${number}` 4 | const rules: Record = {} 5 | 6 | export const updateBasedOnManualRules = async () => { 7 | const promises = Object.entries(rules).map(async ([from, to]) => { 8 | const [fromSource, fromId] = from.split(":") 9 | const fromSourceKey = fromSource as keyof Relation 10 | const fromIdNum = Number(fromId) 11 | 12 | const [toSource, toId] = (to as string).split(":") 13 | const toSourceKey = toSource as keyof Relation 14 | const toIdNum = Number(toId) 15 | 16 | // Find the relation that needs to be fixed 17 | const badRelation = await db 18 | .selectFrom("relations") 19 | .selectAll() 20 | .where(fromSourceKey, "=", fromIdNum) 21 | .executeTakeFirst() 22 | 23 | if (!badRelation) { 24 | throw new Error(`Could not find rule source for ${from}->${to as string}!!!!!`) 25 | } 26 | 27 | if (badRelation[toSourceKey] === toIdNum) { 28 | return console.warn( 29 | `${from}:${to as string} has been fixed, can be removed from manual rules.`, 30 | ) 31 | } 32 | 33 | try { 34 | await db.transaction().execute(async (trx) => { 35 | // Delete the relation with the "from" condition 36 | await trx.deleteFrom("relations").where(fromSourceKey, "=", fromIdNum).execute() 37 | 38 | // Update the relation with the "to" condition to include the "from" data 39 | await trx 40 | .updateTable("relations") 41 | .set({ [fromSourceKey]: fromIdNum }) 42 | .where(toSourceKey, "=", toIdNum) 43 | .execute() 44 | }) 45 | } catch (error) { 46 | console.error(error) 47 | } 48 | }) 49 | 50 | await Promise.all(promises) 51 | } 52 | -------------------------------------------------------------------------------- /src/routes/v2/special/handler.ts: -------------------------------------------------------------------------------- 1 | import { getValidatedQuery, H3, handleCacheHeaders } from "h3" 2 | 3 | import { db, Source } from "../../../db.ts" 4 | import { CacheTimes } from "../../../utils.ts" 5 | import { buildSelectFromInclude } from "../include.ts" 6 | 7 | import { specialImdbInputSchema, specialInputSchema } from "./schemas/special.ts" 8 | 9 | export const specialRoutes = new H3() 10 | .get("/imdb", async (event) => { 11 | const query = await getValidatedQuery(event, specialImdbInputSchema) 12 | const selectFields = buildSelectFromInclude(query.include) 13 | 14 | const data = await db 15 | .selectFrom("relations") 16 | .select(selectFields) 17 | .where(Source.IMDB, "=", query.id) 18 | .execute() 19 | 20 | handleCacheHeaders(event, { maxAge: CacheTimes.SIX_HOURS }) 21 | 22 | return data 23 | }) 24 | .get("/themoviedb", async (event) => { 25 | const query = await getValidatedQuery(event, specialInputSchema) 26 | const selectFields = buildSelectFromInclude(query.include) 27 | 28 | const data = await db 29 | .selectFrom("relations") 30 | .select(selectFields) 31 | .where(Source.TheMovieDB, "=", query.id) 32 | .execute() 33 | 34 | handleCacheHeaders(event, { maxAge: CacheTimes.SIX_HOURS }) 35 | 36 | return data 37 | }) 38 | .get("/thetvdb", async (event) => { 39 | const query = await getValidatedQuery(event, specialInputSchema) 40 | const selectFields = buildSelectFromInclude(query.include) 41 | 42 | const data = await db 43 | .selectFrom("relations") 44 | .select(selectFields) 45 | .where(Source.TheTVDB, "=", query.id) 46 | .execute() 47 | 48 | handleCacheHeaders(event, { maxAge: CacheTimes.SIX_HOURS }) 49 | 50 | return data 51 | }) 52 | -------------------------------------------------------------------------------- /src/db/file-provider.ts: -------------------------------------------------------------------------------- 1 | import { readdirSync } from "node:fs" 2 | import path from "node:path" 3 | import { pathToFileURL } from "node:url" 4 | 5 | import type { Migration, MigrationProvider } from "kysely" 6 | 7 | const isMigration = (obj: unknown): obj is Migration => 8 | typeof (obj as { up?: unknown })?.up === "function" 9 | 10 | /** 11 | * Reads all migrations from a folder in node.js. 12 | * 13 | * ```ts 14 | * new FileMigrationProvider("path/to/migrations/folder") 15 | * ``` 16 | */ 17 | export class ActuallyWorkingMigrationProvider implements MigrationProvider { 18 | readonly #migrationDirPath: string 19 | 20 | constructor(migrationDirPath: string) { 21 | this.#migrationDirPath = migrationDirPath 22 | } 23 | 24 | async getMigrations(): Promise> { 25 | const migrations: Record = {} 26 | const files = readdirSync(this.#migrationDirPath) 27 | 28 | for (const fileName of files) { 29 | if ( 30 | fileName.endsWith(".js") || 31 | (fileName.endsWith(".ts") && !fileName.endsWith(".d.ts")) || 32 | fileName.endsWith(".mjs") || 33 | (fileName.endsWith(".mts") && !fileName.endsWith(".d.mts")) 34 | ) { 35 | const filePath = pathToFileURL( 36 | path.join(this.#migrationDirPath, fileName), 37 | ).toString() 38 | const migration = (await import(filePath)) as Migration | { default?: Migration } 39 | const migrationKey = fileName.substring(0, fileName.lastIndexOf(".")) 40 | 41 | // Handle esModuleInterop export's `default` prop... 42 | if (isMigration((migration as { default?: Migration })?.default)) { 43 | migrations[migrationKey] = (migration as { default?: Migration }).default! 44 | } else if (isMigration(migration)) { 45 | migrations[migrationKey] = migration 46 | } 47 | } 48 | } 49 | 50 | return migrations 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/db.ts: -------------------------------------------------------------------------------- 1 | import { mkdirSync } from "node:fs" 2 | 3 | import { createDatabase } from "db0" 4 | import sqlite from "db0/connectors/node-sqlite" 5 | import { Kysely, Migrator } from "kysely" 6 | import { Db0SqliteDialect } from "kysely-db0/sqlite" 7 | 8 | import { ActuallyWorkingMigrationProvider } from "./db/file-provider.ts" 9 | 10 | export const Source = { 11 | AniDB: "anidb", 12 | AniList: "anilist", 13 | AnimePlanet: "anime-planet", 14 | AniSearch: "anisearch", 15 | IMDB: "imdb", 16 | Kitsu: "kitsu", 17 | LiveChart: "livechart", 18 | NotifyMoe: "notify-moe", 19 | TheMovieDB: "themoviedb", 20 | TheTVDB: "thetvdb", 21 | MAL: "myanimelist", 22 | } as const 23 | export type SourceValue = (typeof Source)[keyof typeof Source] 24 | 25 | export type Relation = { 26 | [Source.AniDB]?: number 27 | [Source.AniList]?: number 28 | [Source.AnimePlanet]?: string 29 | [Source.AniSearch]?: number 30 | [Source.IMDB]?: `tt${string}` 31 | [Source.Kitsu]?: number 32 | [Source.LiveChart]?: number 33 | [Source.NotifyMoe]?: string 34 | [Source.TheMovieDB]?: number 35 | [Source.TheTVDB]?: number 36 | [Source.MAL]?: number 37 | } 38 | 39 | export type OldRelation = Pick 40 | 41 | // Define database schema for Kysely 42 | export interface Database { 43 | relations: Relation 44 | } 45 | 46 | // Ensure SQLite directory exists 47 | mkdirSync("./dir", { recursive: true }) 48 | 49 | const db0 = createDatabase( 50 | sqlite({ path: `./db/${process.env.NODE_ENV ?? "development"}.sqlite3` }), 51 | ) 52 | // Create Kysely instance 53 | export const db = new Kysely({ 54 | dialect: new Db0SqliteDialect(db0), 55 | }) 56 | 57 | export const migrator = new Migrator({ 58 | db, 59 | provider: new ActuallyWorkingMigrationProvider( 60 | process.env.NODE_ENV !== "test" ? "dist/migrations" : "src/migrations", 61 | ), 62 | }) 63 | -------------------------------------------------------------------------------- /src/routes/v2/include.test.ts: -------------------------------------------------------------------------------- 1 | import { getValidatedQuery, H3, type H3EventContext } from "h3" 2 | import { afterAll, beforeEach, describe, expect, it, vi } from "vitest" 3 | 4 | import { db, Source } from "../../db.ts" 5 | 6 | import { includeSchema } from "./include.ts" 7 | 8 | const handlerFn = vi.fn((_event: H3EventContext) => ({ message: "ok" })) 9 | const app = new H3().get("/test", async (event) => { 10 | await getValidatedQuery(event, includeSchema) 11 | 12 | return handlerFn(event.context) 13 | }) 14 | 15 | beforeEach(async () => { 16 | await db.deleteFrom("relations").execute() 17 | }) 18 | 19 | afterAll(async () => { 20 | await db.destroy() 21 | }) 22 | 23 | describe("schema", () => { 24 | it("single source (anilist)", async () => { 25 | const response = await app.request(`/test?include=${Source.AniList}`) 26 | 27 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" }) 28 | expect(response.status).toBe(200) 29 | expect(response.headers.get("content-type")).toContain("application/json") 30 | }) 31 | 32 | it("multiple sources (anilist,thetvdb)", async () => { 33 | const params = new URLSearchParams({ 34 | include: [Source.AniList, Source.TheTVDB].join(","), 35 | }) 36 | const response = await app.request(`/test?${params.toString()}`) 37 | 38 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" }) 39 | expect(response.status).toBe(200) 40 | expect(response.headers.get("content-type")).toContain("application/json") 41 | }) 42 | 43 | it("all the sources", async () => { 44 | const params = new URLSearchParams({ 45 | include: Object.values(Source).join(","), 46 | }) 47 | const response = await app.request(`/test?${params.toString()}`) 48 | 49 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" }) 50 | expect(response.status).toBe(200) 51 | expect(response.headers.get("content-type")).toContain("application/json") 52 | }) 53 | }) 54 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "arm-server", 3 | "type": "module", 4 | "author": "BeeeQueue ", 5 | "version": "1.0.0", 6 | "private": true, 7 | "license": "AGPL-3.0-only", 8 | "homepage": "https://arm.haglund.dev/docs", 9 | "packageManager": "pnpm@10.18.1", 10 | "engines": { 11 | "node": "^24" 12 | }, 13 | "simple-git-hooks": { 14 | "pre-commit": "node_modules/.bin/nano-staged" 15 | }, 16 | "nano-staged": { 17 | "*.{js,cjs,mjs,ts,cts,mts,json}": [ 18 | "biome check --fix" 19 | ] 20 | }, 21 | "scripts": { 22 | "build": "tsdown", 23 | "dev": "node --env-file-if-exists=.env --watch src/index.ts", 24 | "docs": "pnpm --package=@redocly/cli dlx redocly build-docs docs/openapi.yaml", 25 | "docs:dev": "onchange --initial --kill docs/openapi.yaml -- pnpm --silent run docs", 26 | "docker:build": "docker build . --tag arm-server", 27 | "docker:start": "pnpm --silent docker:build; pnpm --silent docker:run", 28 | "docker:run": "docker run -it --rm --name arm -p 3000:3000 arm-server", 29 | "fetch-data": "node bin/update.ts --exit", 30 | "lint": "eslint src", 31 | "start": "node dist/index.mjs", 32 | "test": "vitest", 33 | "typecheck": "tsgo", 34 | "prepare": "simple-git-hooks" 35 | }, 36 | "devDependencies": { 37 | "@antfu/eslint-config": "5.4.1", 38 | "@biomejs/biome": "2.2.5", 39 | "@tsconfig/node24": "24.0.1", 40 | "@tsconfig/strictest": "2.0.6", 41 | "@types/json-schema": "7.0.15", 42 | "@types/node": "24.7.0", 43 | "@typescript/native-preview": "7.0.0-dev.20251008.1", 44 | "@vitest/coverage-v8": "4.0.0-beta.17", 45 | "db0": "0.3.4", 46 | "eslint": "9.37.0", 47 | "h3": "2.0.1-rc.2", 48 | "kysely": "0.28.7", 49 | "kysely-db0": "0.1.0-beta.0", 50 | "mentoss": "0.11.0", 51 | "nano-staged": "0.8.0", 52 | "onchange": "7.1.0", 53 | "pino": "10.0.0", 54 | "pino-pretty": "13.1.1", 55 | "simple-git-hooks": "2.13.1", 56 | "tsdown": "0.15.6", 57 | "valibot": "1.1.0", 58 | "vitest": "4.0.0-beta.17", 59 | "xior": "0.7.8" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/routes/v2/ids/schemas/json-body.test.ts: -------------------------------------------------------------------------------- 1 | import { safeParse } from "valibot" 2 | import { describe, expect, it } from "vitest" 3 | 4 | import type { Relation } from "../../../../db.ts" 5 | import type { JsonValue } from "../../../../types.ts" 6 | 7 | import { bodyInputSchema } from "./json-body.ts" 8 | 9 | type Case = [V, boolean] 10 | type Cases = Array> 11 | 12 | const okCases = [ 13 | [{ anilist: 1337 }, true], 14 | [{ anidb: 1337 }, true], 15 | [{ anidb: 1337, anilist: 1337 }, true], 16 | [{ anidb: 1337, anilist: 1337, myanimelist: 1337, kitsu: 1337 }, true], 17 | [ 18 | { 19 | anidb: 1337, 20 | anilist: 1337, 21 | "anime-planet": "1337", 22 | anisearch: 1337, 23 | kitsu: 1337, 24 | livechart: 1337, 25 | "notify-moe": "1337", 26 | myanimelist: 1337, 27 | }, 28 | true, 29 | ], 30 | ] satisfies Cases 31 | 32 | const badCases = [ 33 | // No source 34 | [{}, false], 35 | // Invalid ID (negative) 36 | [{ anilist: -1 }, false], 37 | // Invalid ID (not integer) 38 | [{ anilist: 1.5 }, false], 39 | [{ anidb: 1.5 }, false], 40 | // Invalid source 41 | [{ aniDb: 1337 }, false], 42 | [{ aniList: 1337 }, false], 43 | [{ anidb: 1337, test: 123 }, false], 44 | // Invalid IMDB IDs 45 | [{ imdb: "1337" }, false], 46 | // No filtering by special dbs in this endpoint 47 | [{ imdb: 1337 }, false], 48 | [{ themoviedb: 1337 }, false], 49 | [{ thetvdb: 1337 }, false], 50 | ] satisfies Cases 51 | 52 | const mapToSingularArrayInput = (cases: Cases): Cases => 53 | cases.map(([input, expected]) => [[input], expected]) 54 | 55 | describe("schema", () => { 56 | const inputs = [ 57 | [[], false], 58 | ...okCases, 59 | ...badCases, 60 | ...mapToSingularArrayInput(okCases), 61 | ...mapToSingularArrayInput(badCases), 62 | ] satisfies Cases 63 | 64 | it.each(inputs)("%o = %s", (input, expected) => { 65 | const result = safeParse(bodyInputSchema, input) 66 | 67 | if (expected) { 68 | expect(result.issues?.[0]).not.toBeDefined() 69 | } else { 70 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1) 71 | } 72 | }) 73 | }) 74 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # arm-server 2 | 3 | [![](https://img.shields.io/github/actions/workflow/status/BeeeQueue/arm-server/cicd.yml?branch=main)](https://github.com/BeeeQueue/arm-server/actions?query=branch%3Amain+workflow%3ACI) 4 | [![](https://uptime.h.haglund.dev/api/badge/2/uptime/168?label=Uptime%207d)](https://uptime.h.haglund.dev/status/arm-server) 5 | 6 | This app uses data from [`Fribb/anime-lists`](https://github.com/Fribb/anime-lists) - fetching 7 | and updating itself every 24 hours. 8 | 9 | [`Fribb/anime-lists`](https://github.com/Fribb/anime-lists) is an automatic merged copy of 10 | [`anime-offline-database`](https://github.com/manami-project/anime-offline-database) 11 | and 12 | [`Anime-Lists/anime-lists`](https://github.com/Anime-Lists/anime-lists). 13 | 14 | 15 | #### Get notifications on important API changes 16 | 17 | Subscribe to new releases in this repo: 18 | 19 | ![image](https://user-images.githubusercontent.com/472500/121041611-c116fc00-c767-11eb-9aaa-64a894a1598a.png) 20 | 21 | ### Missing or duplicate entries 22 | 23 | Some entries in the database are not mapped correctly due to inconsistent naming - the owner of `anime-offline-database` 24 | cannot fix them due to complexity. Therefore this service has manual rules that combines known failures. 25 | 26 | You can help add rules by submitting 27 | a [manual rule request](https://github.com/BeeeQueue/arm-server/issues/new?template=manual-rule-request.md). 28 | 29 | ## [API Docs](https://arm.haglund.dev/docs) 30 | 31 | ## Self-hosting 32 | 33 | Docker images are built and provided for each commit on main! 34 | 35 | The minimum configuration needed can be found in the following command: 36 | 37 | ``` 38 | docker run -it --name arm-server -p 3000:3000 ghcr.io/beeequeue/arm-server:latest 39 | ``` 40 | 41 | ## Development 42 | 43 | ### Server 44 | 45 | 1. Clone the project 46 | 1. Install dependencies - `pnpm` 47 | 1. Run database migrations - `pnpm migrate` 48 | 1. Download data (optional) - `pnpm fetch-data` 49 | 1. Start the server - `pnpm dev` 50 | 51 | If the database connection fails double check that your `NODE_ENV` is set to `development`. 52 | 53 | ### Docs 54 | 55 | 1. Clone the project 56 | 1. Install dependencies - `pnpm` 57 | 1. Start the build - `pnpm docs:dev` 58 | 1. Open the file in a browser - `redoc-static.html` 59 | 1. Edit `docs/openapi.yaml` file 60 | -------------------------------------------------------------------------------- /src/routes/v1/ids/handler.ts: -------------------------------------------------------------------------------- 1 | import { getValidatedQuery, H3, handleCacheHeaders, readValidatedBody } from "h3" 2 | 3 | import { db } from "../../../db.ts" 4 | import type { OldRelation, Relation, SourceValue } from "../../../db.ts" 5 | import { CacheTimes } from "../../../utils.ts" 6 | 7 | import { bodyInputSchema } from "./schemas/json-body.ts" 8 | import { queryInputSchema } from "./schemas/query-params.ts" 9 | 10 | // Fields to select for v1 API 11 | const V1_FIELDS = [ 12 | "relations.anidb", 13 | "relations.anilist", 14 | "relations.myanimelist", 15 | "relations.kitsu", 16 | ] as const 17 | 18 | export const v1Routes = new H3() 19 | .get("/ids", async (event) => { 20 | const query = await getValidatedQuery(event, queryInputSchema) 21 | 22 | const row = await db 23 | .selectFrom("relations") 24 | .select(V1_FIELDS) 25 | .where(query.source as keyof Relation, "=", query.id) 26 | .executeTakeFirst() 27 | 28 | handleCacheHeaders(event, { maxAge: CacheTimes.SIX_HOURS }) 29 | 30 | return (row as OldRelation) ?? null 31 | }) 32 | .post("/ids", async (event) => { 33 | const input = await readValidatedBody(event, bodyInputSchema) 34 | 35 | if (!Array.isArray(input)) { 36 | // Single item query 37 | const [key, value] = Object.entries(input)[0] 38 | 39 | const relation = await db 40 | .selectFrom("relations") 41 | .select(V1_FIELDS) 42 | .where(key as keyof Relation, "=", value) 43 | .executeTakeFirst() 44 | 45 | return relation ?? null 46 | } 47 | 48 | let relations: Array = [] 49 | 50 | // Get relations with multiple OR conditions 51 | if (input.length > 0) { 52 | let query = db.selectFrom("relations").select(V1_FIELDS) 53 | 54 | // Build OR conditions 55 | query = query.where((eb) => 56 | eb.or( 57 | input.map((item) => { 58 | const [key, value] = Object.entries(item)[0] 59 | return eb(key as keyof Relation, "=", value) 60 | }), 61 | ), 62 | ) 63 | 64 | relations = await query.execute() 65 | } 66 | 67 | // Map them against the input, so we get results like [{item}, null, {item}] 68 | relations = input.map((item) => { 69 | const realItem = Object.entries(item)[0] as [SourceValue, number] 70 | 71 | return relations.find((relation) => relation![realItem[0]] === realItem[1]) ?? null 72 | }) 73 | 74 | return relations 75 | }) 76 | -------------------------------------------------------------------------------- /src/routes/v2/ids/handler.ts: -------------------------------------------------------------------------------- 1 | import { getValidatedQuery, H3, handleCacheHeaders, readValidatedBody } from "h3" 2 | 3 | import { db, type Relation, type SourceValue } from "../../../db.ts" 4 | import { CacheTimes } from "../../../utils.ts" 5 | import { buildSelectFromInclude, includeSchema } from "../include.ts" 6 | 7 | import { bodyInputSchema } from "./schemas/json-body.ts" 8 | import { queryInputSchema } from "./schemas/query-params.ts" 9 | 10 | export const v2Routes = new H3() 11 | .get("/ids", async (event) => { 12 | const query = await getValidatedQuery(event, queryInputSchema) 13 | const selectFields = buildSelectFromInclude(query.include) 14 | 15 | const data = await db 16 | .selectFrom("relations") 17 | .select(selectFields) 18 | .where(query.source as keyof Relation, "=", query.id) 19 | .executeTakeFirst() 20 | 21 | handleCacheHeaders(event, { maxAge: CacheTimes.SIX_HOURS }) 22 | 23 | return data ?? null 24 | }) 25 | .post("/ids", async (event) => { 26 | const input = await readValidatedBody(event, bodyInputSchema) 27 | const query = await getValidatedQuery(event, includeSchema) 28 | 29 | const selectFields = buildSelectFromInclude(query.include) 30 | 31 | if (!Array.isArray(input)) { 32 | // Single item query 33 | const [key, value] = Object.entries(input)[0] 34 | 35 | const relation = await db 36 | .selectFrom("relations") 37 | .select(selectFields) 38 | .where(key as keyof Relation, "=", value) 39 | .executeTakeFirst() 40 | 41 | return relation ?? null 42 | } 43 | 44 | let relations: Array = [] 45 | 46 | // Get relations with multiple OR conditions 47 | if (input.length > 0) { 48 | let query = db.selectFrom("relations").select(selectFields) 49 | 50 | // Build OR conditions 51 | query = query.where((eb) => 52 | eb.or( 53 | input.map((item) => { 54 | const [key, value] = Object.entries(item)[0] 55 | return eb(key as keyof Relation, "=", value) 56 | }), 57 | ), 58 | ) 59 | 60 | relations = await query.execute() 61 | } 62 | 63 | // Map them against the input, so we get results like [{item}, null, {item}] 64 | relations = input.map((item) => { 65 | const realItem = Object.entries(item)[0] as [SourceValue, number] 66 | 67 | return relations.find((relation) => relation![realItem[0]] === realItem[1]) ?? null 68 | }) 69 | 70 | return relations 71 | }) 72 | -------------------------------------------------------------------------------- /src/app.ts: -------------------------------------------------------------------------------- 1 | import { H3, handleCacheHeaders, handleCors, redirect } from "h3" 2 | 3 | import { docsRoutes } from "./docs.ts" 4 | import { logger } from "./lib/logger.ts" 5 | import { v1Routes } from "./routes/v1/ids/handler.ts" 6 | import { v2Routes } from "./routes/v2/ids/handler.ts" 7 | import { specialRoutes } from "./routes/v2/special/handler.ts" 8 | import { CacheTimes, createErrorJson } from "./utils.ts" 9 | 10 | export const createApp = () => 11 | new H3({ 12 | onError: (error, event) => { 13 | /* c8 ignore next 4 */ 14 | if (!error.unhandled) { 15 | if (event.req.method === "GET") { 16 | handleCacheHeaders(event, { maxAge: CacheTimes.WEEK }) 17 | } 18 | 19 | return createErrorJson(event, error) 20 | } 21 | 22 | logger.error(error, "unhandled error") 23 | 24 | return createErrorJson(event, error) 25 | }, 26 | }) 27 | 28 | .use(async (event, next) => { 29 | const start = performance.now() 30 | logger.info( 31 | { 32 | method: event.req.method, 33 | path: event.url.pathname, 34 | headers: event.req.headers, 35 | }, 36 | "req", 37 | ) 38 | 39 | await next() 40 | 41 | logger.info( 42 | { 43 | status: event.res.status, 44 | ms: Math.round((performance.now() - start + Number.EPSILON) * 10000) / 10000, 45 | }, 46 | "res", 47 | ) 48 | }) 49 | 50 | .use(async (event, next) => { 51 | const response = handleCors(event, { 52 | origin: () => true, 53 | methods: "*", 54 | preflight: { statusCode: 204 }, 55 | }) 56 | if (response !== false) return response 57 | 58 | return next() 59 | }) 60 | 61 | .mount("/api", v1Routes) 62 | .mount("/api/v2", v2Routes) 63 | .mount("/api/v2", specialRoutes) 64 | .mount("/docs", docsRoutes) 65 | 66 | .get("/", (event) => { 67 | handleCacheHeaders(event, { maxAge: CacheTimes.WEEK * 4 }) 68 | 69 | return redirect(process.env.HOMEPAGE!, 301) 70 | }) 71 | 72 | // This makes sure we return "null" instead of an empty response when trying to return a null body 73 | .use( 74 | async (event, next) => { 75 | const body = await next() 76 | 77 | if (body === null) { 78 | event.res.headers.set("Content-Type", "application/json") 79 | return "null" 80 | } 81 | }, 82 | { 83 | match: (e) => 84 | (e.req.method === "GET" || e.req.method === "POST") && 85 | e.url.pathname.startsWith("/api"), 86 | }, 87 | ) 88 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import type { H3Event, HTTPError } from "h3" 2 | import { type FlatErrors, flatten, type ValiError } from "valibot" 3 | 4 | type ErrorJson = { 5 | code?: string 6 | statusCode: number 7 | error: string 8 | message?: string 9 | } 10 | 11 | const getErrorText = (code: number) => { 12 | switch (code) { 13 | case 400: 14 | return "Bad Request" 15 | case 401: 16 | return "Unauthorized" 17 | case 403: 18 | return "Forbidden" 19 | case 404: 20 | return "Not Found" 21 | case 405: 22 | return "Method Not Allowed" 23 | case 406: 24 | return "Not Acceptable" 25 | case 408: 26 | return "Request Timeout" 27 | case 409: 28 | return "Conflict" 29 | case 410: 30 | return "Gone" 31 | case 411: 32 | return "Length Required" 33 | case 412: 34 | return "Precondition Failed" 35 | case 413: 36 | return "Payload Too Large" 37 | case 414: 38 | return "URI Too Long" 39 | case 415: 40 | return "Unsupported Media Type" 41 | case 416: 42 | return "Range Not Satisfiable" 43 | case 417: 44 | return "Expectation Failed" 45 | case 418: 46 | return "I'm a teapot" 47 | case 421: 48 | return "Misdirected Request" 49 | case 422: 50 | return "Unprocessable Entity" 51 | case 423: 52 | return "Locked" 53 | case 424: 54 | return "Failed Dependency" 55 | case 425: 56 | return "Too Early" 57 | case 426: 58 | return "Upgrade Required" 59 | case 428: 60 | return "Precondition Required" 61 | case 429: 62 | return "Too Many Requests" 63 | case 431: 64 | return "Request Header Fields Too Large" 65 | case 451: 66 | return "Unavailable For Legal Reasons" 67 | case 500: 68 | return "Internal Server Error" 69 | case 501: 70 | return "Not Implemented" 71 | case 502: 72 | return "Bad Gateway" 73 | case 503: 74 | return "Service Unavailable" 75 | case 504: 76 | return "Gateway Timeout" 77 | case 505: 78 | return "HTTP Version Not Supported" 79 | case 506: 80 | return "Variant Also Negotiates" 81 | case 507: 82 | return "Insufficient Storage" 83 | case 508: 84 | return "Loop Detected" 85 | case 510: 86 | return "Not Extended" 87 | case 511: 88 | return "Network Authentication Required" 89 | default: 90 | return "Error" 91 | } 92 | } 93 | 94 | export const createErrorJson = (event: H3Event, input: HTTPError) => { 95 | const body: Omit & { 96 | code?: "FST_ERR_VALIDATION" 97 | details?: FlatErrors 98 | } = { 99 | message: input.message ?? "An error occurred.", 100 | } 101 | 102 | if (input.status === 400 && "issues" in (input.data as ValiError)) { 103 | body.code = "FST_ERR_VALIDATION" 104 | body.message = "Validation error" 105 | body.details = flatten((input.data as ValiError).issues) 106 | } 107 | 108 | event.res.status = input.status 109 | return { 110 | ...body, 111 | statusCode: input.status, 112 | error: getErrorText(input.status), 113 | } 114 | } 115 | 116 | export const CacheTimes = { 117 | HOUR: 3600, 118 | SIX_HOURS: 21_600, 119 | DAY: 86_400, 120 | WEEK: 1_209_600, 121 | } as const 122 | -------------------------------------------------------------------------------- /src/routes/v2/include.test-utils.ts: -------------------------------------------------------------------------------- 1 | import type { H3 } from "h3" 2 | import { describe, expect, test } from "vitest" 3 | 4 | import { db, Source, type SourceValue } from "../../db.ts" 5 | 6 | export const testIncludeQueryParam = ( 7 | app: H3, 8 | path: string, 9 | source: SourceValue = Source.AniList, 10 | ) => { 11 | const arrayify = (data: T) => (source !== Source.AniList ? [data] : data) 12 | const prefixify = ( 13 | source: S, 14 | input: T, 15 | ) => (source === "imdb" ? (`tt${input}` as const) : input) 16 | 17 | describe("?include", () => { 18 | test("single source", async () => { 19 | await db 20 | .insertInto("relations") 21 | .values({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" }) 22 | .execute() 23 | 24 | const query = new URLSearchParams({ 25 | source, 26 | id: prefixify(source, "1337"), 27 | include: source, 28 | }) 29 | const response = await app.fetch( 30 | new Request(`http://localhost${path}?${query.toString()}`), 31 | ) 32 | 33 | await expect(response.json()).resolves.toStrictEqual( 34 | arrayify({ [source]: prefixify(source, 1337) }), 35 | ) 36 | expect(response.status).toBe(200) 37 | expect(response.headers.get("content-type")).toContain("application/json") 38 | }) 39 | 40 | test("multiple sources (anilist,thetvdb,themoviedb)", async () => { 41 | await db 42 | .insertInto("relations") 43 | .values({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" }) 44 | .execute() 45 | 46 | const query = new URLSearchParams({ 47 | source, 48 | id: prefixify(source, "1337"), 49 | include: [Source.AniList, Source.TheTVDB, Source.TheMovieDB, Source.IMDB].join( 50 | ",", 51 | ), 52 | }) 53 | const response = await app.fetch( 54 | new Request(`http://localhost${path}?${query.toString()}`), 55 | ) 56 | 57 | await expect(response.json()).resolves.toStrictEqual( 58 | arrayify({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" }), 59 | ) 60 | expect(response.status).toBe(200) 61 | expect(response.headers.get("content-type")).toContain("application/json") 62 | }) 63 | 64 | test("all the sources", async () => { 65 | await db 66 | .insertInto("relations") 67 | .values({ anilist: 1337, [source]: prefixify(source, 1337) }) 68 | .execute() 69 | 70 | const query = new URLSearchParams({ 71 | source, 72 | id: prefixify(source, "1337"), 73 | include: Object.values(Source).join(","), 74 | }) 75 | const response = await app.fetch( 76 | new Request(`http://localhost${path}?${query.toString()}`), 77 | ) 78 | 79 | const expectedResult: Record = { 80 | anidb: null, 81 | anilist: 1337, 82 | "anime-planet": null, 83 | anisearch: null, 84 | imdb: null, 85 | kitsu: null, 86 | livechart: null, 87 | "notify-moe": null, 88 | themoviedb: null, 89 | thetvdb: null, 90 | myanimelist: null, 91 | } 92 | expectedResult[source] = prefixify(source, 1337) as never 93 | 94 | await expect(response.json()).resolves.toStrictEqual(arrayify(expectedResult)) 95 | expect(response.status).toBe(200) 96 | expect(response.headers.get("content-type")).toContain("application/json") 97 | }) 98 | }) 99 | } 100 | -------------------------------------------------------------------------------- /src/update.test.ts: -------------------------------------------------------------------------------- 1 | import { FetchMocker, MockServer } from "mentoss" 2 | import { afterAll, afterEach, beforeEach, expect, it, vi } from "vitest" 3 | 4 | import { db, type Relation, Source, type SourceValue } from "./db.ts" 5 | import { 6 | type AnimeListsSchema, 7 | formatEntry, 8 | removeDuplicates, 9 | updateRelations, 10 | } from "./update.ts" 11 | 12 | // create a new server with the given base URL 13 | const server = new MockServer("https://raw.githubusercontent.com") 14 | const mocker = new FetchMocker({ servers: [server] }) 15 | 16 | beforeEach(() => { 17 | mocker.mockGlobal() 18 | }) 19 | 20 | afterEach(async () => { 21 | mocker.clearAll() 22 | vi.resetAllMocks() 23 | await db.deleteFrom("relations").execute() 24 | }) 25 | 26 | afterAll(async () => { 27 | mocker.unmockGlobal() 28 | await db.destroy() 29 | }) 30 | 31 | it("handles bad values", async () => { 32 | server.get("/Fribb/anime-lists/master/anime-list-full.json", { 33 | status: 200, 34 | body: [ 35 | { anidb_id: 1337, themoviedb_id: "unknown" }, 36 | { anidb_id: 1338, thetvdb_id: "unknown" as never }, 37 | { anidb_id: 1339, imdb_id: "tt1337,tt1338,tt1339" }, 38 | { anidb_id: 1340, themoviedb_id: "unknown" }, 39 | { anidb_id: 1341, themoviedb_id: 1341 }, 40 | ] satisfies AnimeListsSchema, 41 | }) 42 | 43 | await updateRelations() 44 | 45 | await expect( 46 | db 47 | .selectFrom("relations") 48 | .select([ 49 | "relations.anidb", 50 | "relations.imdb", 51 | "relations.themoviedb", 52 | "relations.thetvdb", 53 | ]) 54 | .execute(), 55 | ).resolves.toMatchInlineSnapshot(` 56 | [ 57 | { 58 | "anidb": 1337, 59 | "imdb": null, 60 | "themoviedb": null, 61 | "thetvdb": null, 62 | }, 63 | { 64 | "anidb": 1338, 65 | "imdb": null, 66 | "themoviedb": null, 67 | "thetvdb": null, 68 | }, 69 | { 70 | "anidb": 1339, 71 | "imdb": null, 72 | "themoviedb": null, 73 | "thetvdb": null, 74 | }, 75 | { 76 | "anidb": 1340, 77 | "imdb": null, 78 | "themoviedb": null, 79 | "thetvdb": null, 80 | }, 81 | { 82 | "anidb": 1341, 83 | "imdb": null, 84 | "themoviedb": 1341, 85 | "thetvdb": null, 86 | }, 87 | ] 88 | `) 89 | }) 90 | 91 | it("handles duplicates", async () => { 92 | mocker.unmockGlobal() 93 | 94 | const entries: Relation[] = await fetch( 95 | "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json", 96 | ) 97 | .then(async (r) => r.json()) 98 | .then((e) => (e as any[]).map(formatEntry)) 99 | 100 | // There should be >=5 Konosuba entries 101 | const konosubaEntries = entries.filter(({ themoviedb }) => themoviedb === 65844) 102 | expect(konosubaEntries.length).toBeGreaterThanOrEqual(5) 103 | 104 | const results = removeDuplicates(entries) 105 | 106 | // There should still be 5 Konosuba entries 107 | expect(results.filter(({ themoviedb }) => themoviedb === 65844).length).toBe( 108 | konosubaEntries.length, 109 | ) 110 | 111 | const goodSources = [ 112 | Source.AniDB, 113 | Source.AniList, 114 | Source.AnimePlanet, 115 | Source.AniSearch, 116 | Source.Kitsu, 117 | Source.LiveChart, 118 | Source.NotifyMoe, 119 | Source.MAL, 120 | ] 121 | 122 | // Check if any sources have duplicate ids 123 | const duplicates = Object.fromEntries( 124 | goodSources.map((source) => { 125 | const groups = Object.groupBy(results, (e) => e[source]?.toString() ?? "undefined") 126 | 127 | return [ 128 | source, 129 | Object.fromEntries( 130 | Object.entries(groups) 131 | .filter(([id, g]) => id !== "undefined" && id !== "null" && g!.length > 1) 132 | .map(([id, g]) => [id, g!.length]), 133 | ), 134 | ] 135 | }), 136 | ) 137 | for (const goodSource of goodSources) { 138 | expect(duplicates[goodSource], `${goodSource} has duplicates`).toStrictEqual({}) 139 | } 140 | 141 | const findEntry = (source: SourceValue, id: number | string) => 142 | results.find((entry) => entry[source] === id) 143 | expect(findEntry(Source.AniDB, 11261)).toBeDefined() 144 | expect(findEntry(Source.AniDB, 11992)).toBeDefined() 145 | }) 146 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yml: -------------------------------------------------------------------------------- 1 | name: ci&cd 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 15 | 16 | - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 17 | with: 18 | node-version: 24 19 | 20 | - run: corepack enable 21 | 22 | - name: find pnpm cache path 23 | id: cache 24 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT 25 | 26 | - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4 27 | with: 28 | path: ${{ steps.cache.outputs.path }} 29 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }} 30 | restore-keys: | 31 | v1-pnpm- 32 | 33 | - name: Install dependencies 34 | run: pnpm install --frozen-lockfile 35 | 36 | - run: node --run lint 37 | 38 | fmt: 39 | runs-on: ubuntu-latest 40 | 41 | steps: 42 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 43 | 44 | - uses: biomejs/setup-biome@454fa0d884737805f48d7dc236c1761a0ac3cc13 # v2 45 | with: 46 | version: latest 47 | 48 | - run: biome check 49 | 50 | typecheck: 51 | runs-on: ubuntu-latest 52 | 53 | steps: 54 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 55 | 56 | - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 57 | with: 58 | node-version: 24 59 | 60 | - run: corepack enable 61 | 62 | - name: find pnpm cache path 63 | id: cache 64 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT 65 | 66 | - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4 67 | with: 68 | path: ${{ steps.cache.outputs.path }} 69 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }} 70 | restore-keys: | 71 | v1-pnpm- 72 | 73 | - name: Install dependencies 74 | run: pnpm install --frozen-lockfile 75 | 76 | - run: node --run typecheck 77 | 78 | test: 79 | runs-on: ubuntu-latest 80 | 81 | steps: 82 | - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 83 | 84 | - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 85 | with: 86 | node-version: 24 87 | 88 | - run: corepack enable 89 | 90 | - name: find pnpm cache path 91 | id: cache 92 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT 93 | 94 | - uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4 95 | with: 96 | path: ${{ steps.cache.outputs.path }} 97 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }} 98 | restore-keys: | 99 | v1-pnpm- 100 | 101 | - name: Install dependencies 102 | run: pnpm install --frozen-lockfile 103 | 104 | - run: node --run test 105 | env: 106 | NODE_ENV: test 107 | 108 | build: 109 | runs-on: ubuntu-latest 110 | permissions: 111 | contents: read 112 | packages: write 113 | id-token: write 114 | 115 | steps: 116 | - name: Docker meta 117 | id: meta 118 | uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5 119 | with: 120 | images: | 121 | ghcr.io/${{ github.repository }} 122 | tags: | 123 | type=raw,value={{sha}} 124 | type=raw,value=latest 125 | 126 | - name: Set up depot 127 | uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1 128 | 129 | - name: Login to GHCR 130 | if: github.ref == 'refs/heads/main' 131 | uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3 132 | with: 133 | registry: ghcr.io 134 | username: ${{ github.actor }} 135 | password: ${{ secrets.GITHUB_TOKEN }} 136 | 137 | - name: Build and maybe Push Docker image 138 | uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1 139 | with: 140 | project: ks849krng9 141 | push: ${{ github.ref == 'refs/heads/main' }} 142 | tags: ${{ steps.meta.outputs.tags }} 143 | labels: ${{ steps.meta.outputs.labels }} 144 | 145 | deploy: 146 | if: github.ref == 'refs/heads/main' 147 | needs: [lint, test, typecheck, build] 148 | runs-on: ubuntu-latest 149 | environment: 150 | name: prod 151 | url: https://arm.haglund.dev/api 152 | 153 | steps: 154 | - run: echo 'Deployed! :)' 155 | -------------------------------------------------------------------------------- /src/update.ts: -------------------------------------------------------------------------------- 1 | import xior, { type XiorError } from "xior" 2 | import errorRetryPlugin from "xior/plugins/error-retry" 3 | 4 | import { db, type Relation, Source, type SourceValue } from "./db.ts" 5 | import { logger } from "./lib/logger.ts" 6 | import { updateBasedOnManualRules } from "./manual-rules.ts" 7 | 8 | const http = xior.create({ responseType: "json" }) 9 | http.plugins.use(errorRetryPlugin({ retryTimes: 5 })) 10 | 11 | const isXiorError = (response: T | XiorError): response is XiorError => 12 | "stack" in (response as XiorError) 13 | 14 | export type AnimeListsSchema = Array<{ 15 | anidb_id?: number 16 | anilist_id?: number 17 | "anime-planet_id"?: string 18 | anisearch_id?: number 19 | imdb_id?: `tt${string}` | "" 20 | kitsu_id?: number 21 | livechart_id?: number 22 | mal_id?: number 23 | "notify.moe_id"?: string 24 | themoviedb_id?: number | "unknown" 25 | thetvdb_id?: number 26 | }> 27 | 28 | const fetchDatabase = async (): Promise => { 29 | const response = await http 30 | .get( 31 | "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json", 32 | ) 33 | .catch((error: XiorError) => error) 34 | 35 | if (isXiorError(response)) { 36 | const error = new Error("Could not fetch updated database!!", { 37 | cause: response, 38 | }) 39 | 40 | console.error(error) 41 | 42 | return null 43 | } 44 | 45 | return response.data 46 | } 47 | 48 | const badValues = ["", "unknown", "tv special"] as const 49 | 50 | const handleBadValues = ( 51 | value: T | (typeof badValues)[number], 52 | ): T | undefined => { 53 | if ( 54 | typeof value === "string" && 55 | (badValues.includes(value as never) || value.includes(",")) 56 | ) { 57 | return undefined 58 | } 59 | 60 | return value as T 61 | } 62 | 63 | // Removes duplicate source-id pairs from the list, except for thetvdb and themoviedb ids 64 | export const removeDuplicates = (entries: Relation[]): Relation[] => { 65 | const sources = (Object.values(Source) as SourceValue[]).filter( 66 | (source) => 67 | source !== Source.TheTVDB && source !== Source.TheMovieDB && source !== Source.IMDB, 68 | ) 69 | const existing = new Map>( 70 | sources.map((name) => [name, new Set()]), 71 | ) 72 | 73 | const goodEntries = entries.filter((entry) => { 74 | for (const source of Object.keys(entry) as (keyof typeof entry)[]) { 75 | const id = entry[source] 76 | 77 | // Ignore nulls 78 | if (id == null) continue 79 | // Ignore sources with one-to-many relations 80 | if ( 81 | source === Source.TheTVDB || 82 | source === Source.TheMovieDB || 83 | source === Source.IMDB 84 | ) { 85 | continue 86 | } 87 | 88 | if (existing.get(source)!.has(id)) return false 89 | 90 | existing.get(source)!.add(id) 91 | } 92 | 93 | return true 94 | }) 95 | 96 | return goodEntries 97 | } 98 | 99 | export const formatEntry = (entry: AnimeListsSchema[number]): Relation => ({ 100 | anidb: handleBadValues(entry.anidb_id), 101 | anilist: handleBadValues(entry.anilist_id), 102 | "anime-planet": handleBadValues(entry["anime-planet_id"]), 103 | anisearch: handleBadValues(entry.anisearch_id), 104 | imdb: handleBadValues(entry.imdb_id), 105 | kitsu: handleBadValues(entry.kitsu_id), 106 | livechart: handleBadValues(entry.livechart_id), 107 | myanimelist: handleBadValues(entry.mal_id), 108 | "notify-moe": handleBadValues(entry["notify.moe_id"]), 109 | themoviedb: handleBadValues(entry.themoviedb_id), 110 | thetvdb: handleBadValues(entry.thetvdb_id), 111 | }) 112 | 113 | export const updateRelations = async () => { 114 | logger.debug(`Using ${process.env.NODE_ENV!} database configuration...`) 115 | 116 | logger.info("Fetching updated Database...") 117 | const data = await fetchDatabase() 118 | logger.info("Fetched updated Database.") 119 | 120 | if (data == null) { 121 | logger.error("got no data") 122 | return 123 | } 124 | 125 | logger.info("Formatting entries...") 126 | const formattedEntries = data 127 | .map(formatEntry) 128 | .filter((entry) => Object.values(entry).some((value) => value != null)) 129 | logger.info({ remaining: formattedEntries.length }, `Formatted entries.`) 130 | 131 | logger.info(`Removing duplicates.`) 132 | const goodEntries = removeDuplicates(formattedEntries) 133 | logger.info({ remaining: goodEntries.length }, `Removed duplicates.`) 134 | 135 | logger.info("Updating database...") 136 | await db.transaction().execute(async (trx) => { 137 | // Delete all existing relations 138 | await trx.deleteFrom("relations").execute() 139 | 140 | // Insert new relations in chunks of 100 141 | const chunkSize = 100 142 | for (let i = 0; i < goodEntries.length; i += chunkSize) { 143 | const chunk = goodEntries.slice(i, i + chunkSize) 144 | for (const entry of chunk) { 145 | await trx.insertInto("relations").values(entry).execute() 146 | } 147 | } 148 | }) 149 | logger.info("Updated database.") 150 | 151 | logger.info("Executing manual rules...") 152 | await updateBasedOnManualRules() 153 | 154 | logger.info("Done.") 155 | 156 | if (process.argv.includes("--exit")) { 157 | await db.destroy() 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /src/routes/v2/special/handler.test.ts: -------------------------------------------------------------------------------- 1 | import { afterAll, beforeEach, describe, expect, it } from "vitest" 2 | 3 | import { createApp } from "../../../app.ts" 4 | import { db, type Relation, Source } from "../../../db.ts" 5 | import { testIncludeQueryParam } from "../include.test-utils.ts" 6 | 7 | let id = 1 8 | const createRelations = async ( 9 | amount: N, 10 | specialId?: number, 11 | ): Promise => { 12 | const relations = Array.from({ length: amount }).map(() => ({ 13 | anidb: id++, 14 | anilist: id++, 15 | "anime-planet": `${id++}`, 16 | anisearch: id++, 17 | imdb: `tt${specialId ?? id++}`, 18 | kitsu: id++, 19 | livechart: id++, 20 | "notify-moe": `${id++}`, 21 | themoviedb: specialId ?? id++, 22 | thetvdb: specialId ?? id++, 23 | myanimelist: id++, 24 | })) 25 | 26 | await db.insertInto("relations").values(relations).execute() 27 | 28 | if (amount === 1) { 29 | return relations[0] as never 30 | } 31 | 32 | return relations as never 33 | } 34 | 35 | const app = createApp() 36 | 37 | beforeEach(async () => { 38 | await db.deleteFrom("relations").execute() 39 | }) 40 | 41 | afterAll(async () => { 42 | await db.destroy() 43 | }) 44 | 45 | describe("imdb", () => { 46 | it("fetches relations correctly", async () => { 47 | await createRelations(4, 1336) 48 | const relations = await createRelations(3, 1337) 49 | 50 | const response = await app.request(`/api/v2/imdb?id=${relations[0].imdb!}`) 51 | 52 | await expect(response.json()).resolves.toStrictEqual(relations) 53 | expect(response.status).toBe(200) 54 | expect(response.headers.get("content-type")).toContain("application/json") 55 | }) 56 | 57 | it("returns empty array when id doesn't exist", async () => { 58 | const response = await app.request("/api/v2/imdb?id=tt404") 59 | 60 | await expect(response.json()).resolves.toStrictEqual([]) 61 | expect(response.status).toBe(200) 62 | expect(response.headers.get("content-type")).toContain("application/json") 63 | }) 64 | 65 | it("can return a partial response", async () => { 66 | const relation: Relation = { 67 | anidb: 1337, 68 | anilist: 1337, 69 | "anime-planet": null!, 70 | anisearch: null!, 71 | imdb: "tt1337", 72 | kitsu: null!, 73 | livechart: null!, 74 | "notify-moe": null!, 75 | themoviedb: null!, 76 | thetvdb: null!, 77 | myanimelist: null!, 78 | } 79 | await db.insertInto("relations").values(relation).execute() 80 | 81 | const response = await app.request(`/api/v2/imdb?id=${relation.imdb!}`) 82 | 83 | await expect(response.json()).resolves.toStrictEqual([relation]) 84 | expect(response.status).toBe(200) 85 | expect(response.headers.get("content-type")).toContain("application/json") 86 | }) 87 | 88 | testIncludeQueryParam(app, "/api/v2/imdb", Source.IMDB) 89 | }) 90 | 91 | describe("thetvdb", () => { 92 | it("fetches relations correctly", async () => { 93 | await createRelations(4, 1336) 94 | const relations = await createRelations(3, 1337) 95 | 96 | const response = await app.request( 97 | `/api/v2/thetvdb?id=${relations[0].thetvdb!.toString()}`, 98 | ) 99 | 100 | await expect(response.json()).resolves.toStrictEqual(relations) 101 | expect(response.status).toBe(200) 102 | expect(response.headers.get("content-type")).toContain("application/json") 103 | }) 104 | 105 | it("returns empty array when id doesn't exist", async () => { 106 | const response = await app.request("/api/v2/thetvdb?id=404") 107 | 108 | await expect(response.json()).resolves.toStrictEqual([]) 109 | expect(response.status).toBe(200) 110 | expect(response.headers.get("content-type")).toContain("application/json") 111 | }) 112 | 113 | it("can return a partial response", async () => { 114 | const relation: Relation = { 115 | anidb: 1337, 116 | anilist: 1337, 117 | "anime-planet": null!, 118 | anisearch: null!, 119 | imdb: null!, 120 | kitsu: null!, 121 | livechart: null!, 122 | "notify-moe": null!, 123 | themoviedb: null!, 124 | thetvdb: 1337, 125 | myanimelist: null!, 126 | } 127 | await db.insertInto("relations").values(relation).execute() 128 | 129 | const response = await app.request( 130 | `/api/v2/thetvdb?id=${relation.thetvdb!.toString()}`, 131 | ) 132 | 133 | await expect(response.json()).resolves.toStrictEqual([relation]) 134 | expect(response.status).toBe(200) 135 | expect(response.headers.get("content-type")).toContain("application/json") 136 | }) 137 | 138 | testIncludeQueryParam(app, "/api/v2/thetvdb", Source.TheTVDB) 139 | }) 140 | 141 | describe("themoviedb", () => { 142 | it("fetches relations correctly", async () => { 143 | await createRelations(4, 1336) 144 | const relations = await createRelations(3, 1337) 145 | 146 | const response = await app.request( 147 | `/api/v2/themoviedb?id=${relations[0].themoviedb!.toString()}`, 148 | ) 149 | 150 | await expect(response.json()).resolves.toStrictEqual(relations) 151 | expect(response.status).toBe(200) 152 | expect(response.headers.get("content-type")).toContain("application/json") 153 | }) 154 | 155 | it("returns empty array when id doesn't exist", async () => { 156 | const response = await app.request(`/api/v2/themoviedb?id=${(404).toString()}`) 157 | 158 | await expect(response.json()).resolves.toStrictEqual([]) 159 | expect(response.status).toBe(200) 160 | expect(response.headers.get("content-type")).toContain("application/json") 161 | }) 162 | 163 | it("can return a partial response", async () => { 164 | const relation: Relation = { 165 | anidb: 1337, 166 | anilist: 1337, 167 | "anime-planet": null!, 168 | anisearch: null!, 169 | imdb: null!, 170 | kitsu: null!, 171 | livechart: null!, 172 | "notify-moe": null!, 173 | themoviedb: 1337, 174 | thetvdb: null!, 175 | myanimelist: null!, 176 | } 177 | await db.insertInto("relations").values(relation).execute() 178 | 179 | const response = await app.request( 180 | `/api/v2/themoviedb?id=${relation.themoviedb!.toString()}`, 181 | ) 182 | 183 | await expect(response.json()).resolves.toStrictEqual([relation]) 184 | expect(response.status).toBe(200) 185 | expect(response.headers.get("content-type")).toContain("application/json") 186 | }) 187 | 188 | testIncludeQueryParam(app, "/api/v2/themoviedb", Source.TheMovieDB) 189 | }) 190 | -------------------------------------------------------------------------------- /src/routes/v1/ids/handler.test.ts: -------------------------------------------------------------------------------- 1 | import { afterAll, afterEach, describe, expect, it } from "vitest" 2 | 3 | import { createApp } from "../../../app.ts" 4 | import { db, type Relation, Source } from "../../../db.ts" 5 | 6 | let id = 1 7 | const createRelations = async ( 8 | amount: N, 9 | ): Promise => { 10 | const relations = Array.from({ length: amount }).map(() => ({ 11 | anilist: id++, 12 | anidb: id++, 13 | kitsu: id++, 14 | myanimelist: id++, 15 | })) 16 | 17 | await db.insertInto("relations").values(relations).execute() 18 | 19 | if (amount === 1) { 20 | return relations[0] as never 21 | } 22 | 23 | return relations as never 24 | } 25 | 26 | const app = createApp() 27 | 28 | afterEach(async () => db.deleteFrom("relations").execute()) 29 | 30 | afterAll(async () => { 31 | await db.destroy() 32 | }) 33 | 34 | describe("query params", () => { 35 | it("fetches relation correctly", async () => { 36 | const relation = await createRelations(1) 37 | 38 | const params = new URLSearchParams({ 39 | source: Source.AniList, 40 | id: relation.anilist!.toString(), 41 | }) 42 | const response = await app.request(`/api/ids?${params.toString()}`) 43 | 44 | expect(await response.json()).toStrictEqual(relation) 45 | expect(response.status).toBe(200) 46 | expect(response.headers.get("content-type")).toContain("application/json") 47 | }) 48 | 49 | it("returns null when id doesn't exist", async () => { 50 | const params = new URLSearchParams({ 51 | source: Source.Kitsu, 52 | id: "404", 53 | }) 54 | const response = await app.request(`/api/ids?${params.toString()}`) 55 | 56 | await expect(response.json()).resolves.toBe(null) 57 | expect(response.status).toBe(200) 58 | expect(response.headers.get("content-type")).toContain("application/json") 59 | }) 60 | 61 | it("can return a partial response", async () => { 62 | const relation: Relation = { 63 | anidb: 1337, 64 | anilist: 1337, 65 | // TODO 66 | myanimelist: null!, 67 | kitsu: null!, 68 | } 69 | await db.insertInto("relations").values(relation).execute() 70 | 71 | const params = new URLSearchParams({ 72 | source: Source.AniList, 73 | id: relation.anilist!.toString(), 74 | }) 75 | const response = await app.request(`/api/ids?${params.toString()}`) 76 | 77 | await expect(response.json()).resolves.toStrictEqual(relation) 78 | expect(response.status).toBe(200) 79 | expect(response.headers.get("content-type")).toContain("application/json") 80 | }) 81 | }) 82 | 83 | describe("json body", () => { 84 | describe("object input", () => { 85 | it.skip("gET fails with json body", async () => { 86 | const relations = await createRelations(4) 87 | 88 | const response = await app.request("/api/ids", { 89 | body: JSON.stringify({ 90 | [Source.AniDB]: relations[0].anidb, 91 | }), 92 | }) 93 | 94 | await expect(response.json()).resolves.toMatchSnapshot() 95 | expect(response.status).toBe(400) 96 | expect(response.headers.get("content-type")).toContain("application/json") 97 | }) 98 | 99 | it("fetches a single relation", async () => { 100 | const relations = await createRelations(4) 101 | 102 | const response = await app.request("/api/ids", { 103 | method: "POST", 104 | body: JSON.stringify({ 105 | [Source.AniDB]: relations[0].anidb, 106 | }), 107 | }) 108 | 109 | await expect(response.json()).resolves.toStrictEqual(relations[0]) 110 | expect(response.status).toBe(200) 111 | expect(response.headers.get("content-type")).toContain("application/json") 112 | }) 113 | 114 | it("errors correctly on an empty object", async () => { 115 | await createRelations(4) 116 | 117 | const response = await app.request("/api/ids", { 118 | method: "POST", 119 | body: JSON.stringify({}), 120 | }) 121 | 122 | await expect(response.json()).resolves.toMatchSnapshot() 123 | expect(response.status).toBe(400) 124 | expect(response.headers.get("content-type")).toContain("application/json") 125 | }) 126 | 127 | it("returns null if not found", async () => { 128 | await createRelations(4) 129 | 130 | const response = await app.request("/api/ids", { 131 | method: "POST", 132 | body: JSON.stringify({ anidb: 100_000 }), 133 | }) 134 | 135 | await expect(response.json()).resolves.toBe(null) 136 | expect(response.status).toBe(200) 137 | expect(response.headers.get("content-type")).toContain("application/json") 138 | }) 139 | 140 | it("can return a partial response", async () => { 141 | const relation: Relation = { 142 | anidb: 1337, 143 | anilist: 1337, 144 | myanimelist: null as never, 145 | kitsu: null as never, 146 | } 147 | await db.insertInto("relations").values(relation).execute() 148 | 149 | const response = await app.request("/api/ids", { 150 | method: "POST", 151 | body: JSON.stringify({ anilist: 1337 }), 152 | }) 153 | 154 | await expect(response.json()).resolves.toStrictEqual(relation) 155 | expect(response.status).toBe(200) 156 | expect(response.headers.get("content-type")).toContain("application/json") 157 | }) 158 | }) 159 | 160 | describe("array input", () => { 161 | it("fetches relations correctly", async () => { 162 | const relations = await createRelations(4) 163 | 164 | const body = [ 165 | { [Source.AniDB]: relations[0].anidb }, 166 | { [Source.AniList]: 1000 }, 167 | { [Source.Kitsu]: relations[2].kitsu }, 168 | ] 169 | 170 | const result = [relations[0], null, relations[2]] 171 | 172 | const response = await app.request("/api/ids", { 173 | method: "POST", 174 | body: JSON.stringify(body), 175 | }) 176 | 177 | await expect(response.json()).resolves.toStrictEqual(result) 178 | expect(response.status).toBe(200) 179 | expect(response.headers.get("content-type")).toContain("application/json") 180 | }) 181 | 182 | it("responds correctly on no finds", async () => { 183 | const body = [{ [Source.AniList]: 1000 }, { [Source.Kitsu]: 1000 }] 184 | 185 | const result = [null, null] 186 | 187 | const response = await app.request("/api/ids", { 188 | method: "POST", 189 | body: JSON.stringify(body), 190 | }) 191 | 192 | await expect(response.json()).resolves.toStrictEqual(result) 193 | expect(response.status).toBe(200) 194 | expect(response.headers.get("content-type")).toContain("application/json") 195 | }) 196 | 197 | it("requires at least one source", async () => { 198 | const body = [{}] 199 | 200 | const response = await app.request("/api/ids", { 201 | method: "POST", 202 | body: JSON.stringify(body), 203 | }) 204 | 205 | await expect(response.json()).resolves.toMatchSnapshot() 206 | expect(response.status).toBe(400) 207 | expect(response.headers.get("content-type")).toContain("application/json") 208 | }) 209 | }) 210 | }) 211 | -------------------------------------------------------------------------------- /src/routes/v2/ids/handler.test.ts: -------------------------------------------------------------------------------- 1 | import { afterAll, afterEach, describe, expect, it } from "vitest" 2 | 3 | import { createApp } from "../../../app.ts" 4 | import { db, type Relation, Source } from "../../../db.ts" 5 | import { testIncludeQueryParam } from "../include.test-utils.ts" 6 | 7 | let id = 1 8 | const createRelations = async ( 9 | amount: N, 10 | ): Promise => { 11 | const relations = Array.from({ length: amount }).map(() => ({ 12 | anidb: id++, 13 | anilist: id++, 14 | "anime-planet": `${id++}`, 15 | anisearch: id++, 16 | imdb: `tt${id++}`, 17 | kitsu: id++, 18 | livechart: id++, 19 | "notify-moe": `${id++}`, 20 | themoviedb: id++, 21 | thetvdb: id++, 22 | myanimelist: id++, 23 | })) 24 | 25 | // Insert each relation 26 | for (const relation of relations) { 27 | await db.insertInto("relations").values(relation).execute() 28 | } 29 | 30 | if (amount === 1) { 31 | return relations[0] as never 32 | } 33 | 34 | return relations as never 35 | } 36 | 37 | const app = createApp() 38 | 39 | afterEach(async () => { 40 | await db.deleteFrom("relations").execute() 41 | }) 42 | 43 | afterAll(async () => { 44 | await db.destroy() 45 | }) 46 | 47 | describe("query params", () => { 48 | it("fetches relation correctly", async () => { 49 | const relation = await createRelations(1) 50 | 51 | const params = new URLSearchParams({ 52 | source: Source.AniList, 53 | id: relation.anilist!.toString(), 54 | }) 55 | const response = await app.request(`/api/v2/ids?${params.toString()}`) 56 | 57 | expect(await response.json()).toStrictEqual(relation) 58 | expect(response.status).toBe(200) 59 | expect(response.headers.get("content-type")).toContain("application/json") 60 | }) 61 | 62 | it("returns null when id doesn't exist", async () => { 63 | const params = new URLSearchParams({ 64 | source: Source.Kitsu, 65 | id: "404" as never, 66 | }) 67 | const response = await app.request(`/api/v2/ids?${params.toString()}`) 68 | 69 | expect(await response.json()).toStrictEqual(null) 70 | expect(response.status).toBe(200) 71 | expect(response.headers.get("content-type")).toContain("application/json") 72 | }) 73 | 74 | it("can return a partial response", async () => { 75 | const relation: Relation = { 76 | anidb: 1337, 77 | anilist: 1337, 78 | "anime-planet": null!, 79 | anisearch: null!, 80 | imdb: null!, 81 | kitsu: null!, 82 | livechart: null!, 83 | "notify-moe": null!, 84 | themoviedb: null!, 85 | thetvdb: null!, 86 | myanimelist: null!, 87 | } 88 | await db.insertInto("relations").values(relation).execute() 89 | 90 | const params = new URLSearchParams({ 91 | source: Source.AniList, 92 | id: relation.anilist!.toString(), 93 | }) 94 | const response = await app.request(`/api/v2/ids?${params.toString()}`) 95 | 96 | expect(await response.json()).toStrictEqual(relation) 97 | expect(response.status).toBe(200) 98 | expect(response.headers.get("content-type")).toContain("application/json") 99 | }) 100 | }) 101 | 102 | describe("json body", () => { 103 | describe("object input", () => { 104 | it.skip("gET fails with json body", async () => { 105 | const relations = await createRelations(4) 106 | 107 | const response = await app.request("/api/v2/ids", { 108 | body: JSON.stringify({ 109 | [Source.AniDB]: relations[0].anidb, 110 | }), 111 | }) 112 | 113 | await expect(response.json()).resolves.toMatchSnapshot() 114 | expect(response.status).toBe(400) 115 | expect(response.headers.get("content-type")).toContain("application/json") 116 | }) 117 | 118 | it("fetches a single relation", async () => { 119 | const relations = await createRelations(4) 120 | 121 | const response = await app.request("/api/v2/ids", { 122 | method: "POST", 123 | body: JSON.stringify({ 124 | [Source.AniDB]: relations[0].anidb, 125 | }), 126 | }) 127 | 128 | await expect(response.json()).resolves.toStrictEqual(relations[0]) 129 | expect(response.status).toBe(200) 130 | expect(response.headers.get("content-type")).toContain("application/json") 131 | }) 132 | 133 | it("errors correctly on an empty object", async () => { 134 | await createRelations(4) 135 | 136 | const response = await app.request("/api/v2/ids", { 137 | method: "POST", 138 | body: JSON.stringify({}), 139 | }) 140 | 141 | await expect(response.json()).resolves.toMatchSnapshot() 142 | expect(response.status).toBe(400) 143 | expect(response.headers.get("content-type")).toContain("application/json") 144 | }) 145 | 146 | it("returns null if not found", async () => { 147 | await createRelations(4) 148 | 149 | const response = await app.request("/api/v2/ids", { 150 | method: "POST", 151 | body: JSON.stringify({ anidb: 100_000 }), 152 | }) 153 | 154 | await expect(response.json()).resolves.toBe(null) 155 | expect(response.status).toBe(200) 156 | expect(response.headers.get("content-type")).toContain("application/json") 157 | }) 158 | 159 | it("can return a partial response", async () => { 160 | const relation: Relation = { 161 | anidb: 1337, 162 | anilist: 1337, 163 | "anime-planet": null!, 164 | anisearch: null!, 165 | imdb: null!, 166 | kitsu: null!, 167 | livechart: null!, 168 | "notify-moe": null!, 169 | themoviedb: null!, 170 | thetvdb: null!, 171 | myanimelist: null!, 172 | } 173 | await db.insertInto("relations").values(relation).execute() 174 | 175 | const response = await app.request("/api/v2/ids", { 176 | method: "POST", 177 | body: JSON.stringify({ anilist: 1337 }), 178 | }) 179 | 180 | await expect(response.json()).resolves.toStrictEqual(relation) 181 | expect(response.status).toBe(200) 182 | expect(response.headers.get("content-type")).toContain("application/json") 183 | }) 184 | }) 185 | 186 | describe("array input", () => { 187 | it("fetches relations correctly", async () => { 188 | const relations = await createRelations(4) 189 | 190 | const body = [ 191 | { [Source.AniDB]: relations[0].anidb }, 192 | { [Source.AniList]: 1000 }, 193 | { [Source.Kitsu]: relations[2].kitsu }, 194 | ] 195 | 196 | const result = [relations[0], null, relations[2]] 197 | 198 | const response = await app.request("/api/v2/ids", { 199 | method: "POST", 200 | body: JSON.stringify(body), 201 | }) 202 | 203 | await expect(response.json()).resolves.toStrictEqual(result) 204 | expect(response.status).toBe(200) 205 | expect(response.headers.get("content-type")).toContain("application/json") 206 | }) 207 | 208 | it("responds correctly on no finds", async () => { 209 | const body = [{ [Source.AniList]: 1000 }, { [Source.Kitsu]: 1000 }] 210 | 211 | const result = [null, null] 212 | 213 | const response = await app.request("/api/v2/ids", { 214 | method: "POST", 215 | body: JSON.stringify(body), 216 | }) 217 | 218 | await expect(response.json()).resolves.toStrictEqual(result) 219 | expect(response.status).toBe(200) 220 | expect(response.headers.get("content-type")).toContain("application/json") 221 | }) 222 | 223 | it("requires at least one source", async () => { 224 | const body = [{}] 225 | 226 | const response = await app.request("/api/v2/ids", { 227 | method: "POST", 228 | body: JSON.stringify(body), 229 | }) 230 | 231 | await expect(response.json()).resolves.toMatchSnapshot() 232 | expect(response.status).toBe(400) 233 | expect(response.headers.get("content-type")).toContain("application/json") 234 | }) 235 | }) 236 | }) 237 | 238 | testIncludeQueryParam(app, "/api/v2/ids") 239 | -------------------------------------------------------------------------------- /docs/openapi.yaml: -------------------------------------------------------------------------------- 1 | openapi: 3.1.0 2 | 3 | info: 4 | version: 2.1.0 5 | title: API Docs - arm-server 6 | license: 7 | name: GNU Affero General Public License v3.0 only 8 | identifier: AGPL-3.0-only 9 | contact: 10 | name: BeeeQueue 11 | url: https://github.com/BeeeQueue/arm-server 12 | description: | 13 | A service for mapping Anime IDs. 14 | 15 | **Important:** 16 | 17 | Every endpoint returns `null` and not `404` when it does not find any match based on a query. 18 | 19 | servers: 20 | - url: https://arm.haglund.dev 21 | description: Live service 22 | - url: http://localhost:3000 23 | description: Local dev server 24 | 25 | tags: 26 | - name: v2 27 | description: | 28 | `v2` adds more Sources thanks to [Fribb/anime-lists](https://github.com/Fribb/anime-lists). 29 | 30 | Unfortunately IMDB, TheMovieDB, and TheTVDB use one entry per **show** instead of **season** meaning their IDs become one-to-many mappings. 31 | 32 | This means it cannot be queried for in `/api/v2/ids` since it's impossible for the API to look the same in that case. 33 | 34 | Instead I added `/api/v2/imdb`, `/api/v2/themoviedb`, and `/api/v2/thetvdb` if you want to query by their IDs. 35 | - name: v1 36 | 37 | $defs: 38 | include_param: 39 | name: include 40 | in: query 41 | required: false 42 | example: anilist,anidb 43 | description: "Comma-separated list of sources to return in response objects." 44 | schema: 45 | type: string 46 | 47 | '400': 48 | description: Invalid request 49 | content: 50 | application/json: 51 | schema: 52 | type: object 53 | properties: 54 | statusCode: 55 | type: integer 56 | enum: 57 | - 400 58 | error: 59 | type: string 60 | example: Bad request 61 | message: 62 | type: string 63 | example: "1: Number must be greater than or equal to 1, Number must be greater than 0" 64 | 65 | relation: 66 | type: object 67 | properties: 68 | anidb: 69 | oneOf: 70 | - type: 'null' 71 | - type: integer 72 | minimum: 0 73 | maximum: 50000000 74 | example: 1337 75 | anilist: 76 | oneOf: 77 | - type: 'null' 78 | - type: integer 79 | minimum: 0 80 | maximum: 50000000 81 | example: 1337 82 | anime-planet: 83 | oneOf: 84 | - type: 'null' 85 | - type: string 86 | minLength: 1 87 | maxLength: 50 88 | example: dororon-enma-kun 89 | anisearch: 90 | oneOf: 91 | - type: 'null' 92 | - type: integer 93 | minimum: 0 94 | maximum: 50000000 95 | example: 1337 96 | imdb: 97 | oneOf: 98 | - type: 'null' 99 | - type: string 100 | pattern: tt\d+ 101 | minLength: 1 102 | maxLength: 50 103 | example: tt0164917 104 | kitsu: 105 | oneOf: 106 | - type: 'null' 107 | - type: integer 108 | minimum: 0 109 | maximum: 50000000 110 | example: 1337 111 | livechart: 112 | oneOf: 113 | - type: 'null' 114 | - type: integer 115 | minimum: 0 116 | maximum: 50000000 117 | example: 1337 118 | notify-moe: 119 | oneOf: 120 | - type: 'null' 121 | - type: string 122 | minLength: 1 123 | maxLength: 50 124 | example: "-cQb5Fmmg" 125 | themoviedb: 126 | oneOf: 127 | - type: 'null' 128 | - type: integer 129 | minimum: 0 130 | maximum: 50000000 131 | example: 1337 132 | thetvdb: 133 | oneOf: 134 | - type: 'null' 135 | - type: integer 136 | minimum: 0 137 | maximum: 50000000 138 | example: 1337 139 | myanimelist: 140 | oneOf: 141 | - type: 'null' 142 | - type: integer 143 | minimum: 0 144 | maximum: 50000000 145 | example: 1337 146 | 147 | nullable_relation: 148 | oneOf: 149 | - $ref: '#/$defs/relation' 150 | - type: 'null' 151 | 152 | response: 153 | example: 154 | anidb: 1337 155 | anilist: 1337 156 | anime-planet: spriggan 157 | anisearch: null 158 | imdb: tt0164917 159 | kitsu: null 160 | livechart: null 161 | notify-moe: "-cQb5Fmmg" 162 | themoviedb: null 163 | thetvdb: null 164 | myanimelist: null 165 | oneOf: 166 | - $ref: '#/$defs/nullable_relation' 167 | - type: array 168 | items: 169 | $ref: '#/$defs/nullable_relation' 170 | 171 | v1_relation: 172 | type: object 173 | properties: 174 | anidb: 175 | oneOf: 176 | - type: 'null' 177 | - type: integer 178 | example: 1337 179 | anilist: 180 | oneOf: 181 | - type: 'null' 182 | - type: integer 183 | example: 1337 184 | myanimelist: 185 | oneOf: 186 | - type: 'null' 187 | - type: integer 188 | example: 1337 189 | kitsu: 190 | oneOf: 191 | - type: 'null' 192 | - type: integer 193 | example: 1337 194 | 195 | nullable_v1_relation: 196 | oneOf: 197 | - $ref: '#/$defs/v1_relation' 198 | - type: 'null' 199 | 200 | v1_response: 201 | example: 202 | anidb: 1337 203 | anilist: 1337 204 | kitsu: null 205 | myanimelist: null 206 | oneOf: 207 | - $ref: '#/$defs/nullable_v1_relation' 208 | - type: array 209 | items: 210 | $ref: '#/$defs/nullable_v1_relation' 211 | 212 | paths: 213 | /api/ids: 214 | get: 215 | operationId: getIds 216 | summary: Fetch IDs via query parameters 217 | security: [{}] 218 | tags: 219 | - v1 220 | 221 | parameters: 222 | - name: source 223 | in: query 224 | required: true 225 | example: anilist 226 | schema: 227 | type: string 228 | enum: 229 | - anidb 230 | - anilist 231 | - kitsu 232 | - myanimelist 233 | - name: id 234 | in: query 235 | required: true 236 | example: 1337 237 | schema: 238 | type: integer 239 | minimum: 1 240 | 241 | responses: 242 | '200': 243 | description: OK 244 | content: 245 | application/json: 246 | schema: 247 | $ref: '#/$defs/v1_response' 248 | '400': 249 | $ref: '#/$defs/400' 250 | 251 | post: 252 | operationId: postIds 253 | summary: Fetch IDs via a JSON body 254 | description: | 255 | The JSON body can either be an object containing the query, or an array containing multiple queries. 256 | 257 | If using array queries, the resulting array will map to the corresponding input! 258 | 259 | e.g. `body[1]` will be the result of `query[1]`. 260 | security: [{}] 261 | tags: 262 | - v1 263 | 264 | requestBody: 265 | required: true 266 | content: 267 | application/json: 268 | schema: 269 | example: 270 | - anilist: 1337 271 | - anidb: 1337 272 | oneOf: 273 | - $ref: '#/$defs/v1_relation' 274 | - type: array 275 | items: 276 | $ref: '#/$defs/v1_relation' 277 | 278 | responses: 279 | '200': 280 | description: OK 281 | content: 282 | application/json: 283 | schema: 284 | $ref: '#/$defs/v1_response' 285 | '400': 286 | $ref: '#/$defs/400' 287 | 288 | /api/v2/ids: 289 | get: 290 | operationId: v2-getIds 291 | summary: Fetch IDs via query parameters 292 | description: ' ' 293 | security: [{}] 294 | tags: 295 | - v2 296 | 297 | parameters: 298 | - name: source 299 | in: query 300 | required: true 301 | example: anilist 302 | schema: 303 | type: string 304 | enum: 305 | - anilist 306 | - anidb 307 | - anime-planet 308 | - anisearch 309 | - kitsu 310 | - livechart 311 | - notify-moe 312 | - myanimelist 313 | - name: id 314 | in: query 315 | required: true 316 | example: 1337 317 | schema: 318 | oneOf: 319 | - type: integer 320 | minimum: 1 321 | - type: string 322 | minLength: 1 323 | - $ref: "#/$defs/include_param" 324 | 325 | responses: 326 | '200': 327 | description: OK 328 | content: 329 | application/json: 330 | schema: 331 | $ref: '#/$defs/response' 332 | '400': 333 | $ref: '#/$defs/400' 334 | 335 | post: 336 | operationId: v2-postIds 337 | summary: Fetch IDs via a JSON body 338 | description: | 339 | The JSON body can either be an object containing the query, or an array containing multiple queries. 340 | 341 | If using array queries, the resulting array will map to the corresponding input! 342 | 343 | e.g. `body[1]` will be the result of `query[1]`. 344 | security: [{}] 345 | tags: 346 | - v2 347 | 348 | parameters: 349 | - $ref: "#/$defs/include_param" 350 | 351 | requestBody: 352 | required: true 353 | content: 354 | application/json: 355 | schema: 356 | example: 357 | - anilist: 1337 358 | - anidb: 1337 359 | - notify-moe: -cQb5Fmmg 360 | oneOf: 361 | - type: object 362 | minProperties: 1 363 | additionalProperties: false 364 | properties: 365 | anidb: 366 | oneOf: 367 | - type: 'null' 368 | - type: integer 369 | minimum: 0 370 | maximum: 50000000 371 | anilist: 372 | oneOf: 373 | - type: 'null' 374 | - type: integer 375 | minimum: 0 376 | maximum: 50000000 377 | anime-planet: 378 | oneOf: 379 | - type: 'null' 380 | - type: string 381 | minLength: 1 382 | maxLength: 50 383 | anisearch: 384 | oneOf: 385 | - type: 'null' 386 | - type: integer 387 | minimum: 0 388 | maximum: 50000000 389 | kitsu: 390 | oneOf: 391 | - type: 'null' 392 | - type: integer 393 | minimum: 0 394 | maximum: 50000000 395 | livechart: 396 | oneOf: 397 | - type: 'null' 398 | - type: integer 399 | minimum: 0 400 | maximum: 50000000 401 | notify-moe: 402 | oneOf: 403 | - type: 'null' 404 | - type: string 405 | minLength: 1 406 | maxLength: 50 407 | myanimelist: 408 | oneOf: 409 | - type: 'null' 410 | - type: integer 411 | minimum: 0 412 | maximum: 50000000 413 | - type: array 414 | minItems: 1 415 | maxItems: 100 416 | items: 417 | type: object 418 | minProperties: 1 419 | additionalProperties: false 420 | properties: 421 | anidb: 422 | oneOf: 423 | - type: 'null' 424 | - type: integer 425 | minimum: 0 426 | maximum: 50000000 427 | anilist: 428 | oneOf: 429 | - type: 'null' 430 | - type: integer 431 | minimum: 0 432 | maximum: 50000000 433 | anime-planet: 434 | oneOf: 435 | - type: 'null' 436 | - type: string 437 | minLength: 1 438 | maxLength: 50 439 | anisearch: 440 | oneOf: 441 | - type: 'null' 442 | - type: integer 443 | minimum: 0 444 | maximum: 50000000 445 | kitsu: 446 | oneOf: 447 | - type: 'null' 448 | - type: integer 449 | minimum: 0 450 | maximum: 50000000 451 | livechart: 452 | oneOf: 453 | - type: 'null' 454 | - type: integer 455 | minimum: 0 456 | maximum: 50000000 457 | notify-moe: 458 | oneOf: 459 | - type: 'null' 460 | - type: string 461 | minLength: 1 462 | maxLength: 50 463 | myanimelist: 464 | oneOf: 465 | - type: 'null' 466 | - type: integer 467 | minimum: 0 468 | maximum: 50000000 469 | 470 | responses: 471 | '200': 472 | description: OK 473 | content: 474 | application/json: 475 | schema: 476 | $ref: '#/$defs/response' 477 | '400': 478 | $ref: '#/$defs/400' 479 | 480 | /api/v2/imdb: 481 | get: 482 | operationId: v2-imdb 483 | summary: Fetch IDs by IMDB ID 484 | description: ' ' 485 | security: [{}] 486 | tags: 487 | - v2 488 | 489 | parameters: 490 | - name: id 491 | in: query 492 | required: true 493 | example: tt5370118 494 | schema: 495 | type: integer 496 | minimum: 1 497 | - $ref: "#/$defs/include_param" 498 | 499 | responses: 500 | '200': 501 | description: OK 502 | content: 503 | application/json: 504 | schema: 505 | type: array 506 | items: 507 | $ref: '#/$defs/nullable_relation' 508 | '400': 509 | $ref: '#/$defs/400' 510 | 511 | /api/v2/themoviedb: 512 | get: 513 | operationId: v2-themoviedb 514 | summary: Fetch IDs by TheMovieDB ID 515 | description: ' ' 516 | security: [{}] 517 | tags: 518 | - v2 519 | 520 | parameters: 521 | - name: id 522 | in: query 523 | required: true 524 | example: 1337 525 | schema: 526 | type: integer 527 | minimum: 1 528 | - $ref: "#/$defs/include_param" 529 | 530 | responses: 531 | '200': 532 | description: OK 533 | content: 534 | application/json: 535 | schema: 536 | type: array 537 | items: 538 | $ref: '#/$defs/nullable_relation' 539 | '400': 540 | $ref: '#/$defs/400' 541 | 542 | /api/v2/thetvdb: 543 | get: 544 | operationId: v2-thetvdb 545 | summary: Fetch IDs by TheTVDB ID 546 | description: ' ' 547 | security: [{}] 548 | tags: 549 | - v2 550 | 551 | parameters: 552 | - name: id 553 | in: query 554 | required: true 555 | example: 1337 556 | schema: 557 | type: integer 558 | minimum: 1 559 | - $ref: "#/$defs/include_param" 560 | 561 | responses: 562 | '200': 563 | description: OK 564 | content: 565 | application/json: 566 | schema: 567 | type: array 568 | items: 569 | $ref: '#/$defs/nullable_relation' 570 | '400': 571 | $ref: '#/$defs/400' 572 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published 637 | by the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . --------------------------------------------------------------------------------