├── .dockerignore
├── .editorconfig
├── .eslintignore
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ └── manual-rule-request.md
├── renovate.json5
└── workflows
│ └── cicd.yml
├── .gitignore
├── .npmrc
├── Dockerfile
├── LICENSE.md
├── README.md
├── bin
└── update.ts
├── biome.json
├── docs
└── openapi.yaml
├── eslint.config.js
├── knexfile.js
├── migrations
├── 20190611171759_create_tables.js
├── 20221230042725_anime-lists.js
├── 20240616234100_wal.js
└── 20240617000000_special_dbs.js
├── package.json
├── patches
└── knex.patch
├── pnpm-lock.yaml
├── sqlite
└── .gitkeep
├── src
├── app.ts
├── config.ts
├── db.ts
├── docs.ts
├── index.ts
├── lib
│ └── logger.ts
├── manual-rules.ts
├── routes
│ ├── v1
│ │ └── ids
│ │ │ ├── __snapshots__
│ │ │ └── handler.test.ts.snap
│ │ │ ├── handler.test.ts
│ │ │ ├── handler.ts
│ │ │ └── schemas
│ │ │ ├── json-body.test.ts
│ │ │ ├── json-body.ts
│ │ │ ├── query-params.test.ts
│ │ │ └── query-params.ts
│ └── v2
│ │ ├── ids
│ │ ├── __snapshots__
│ │ │ └── handler.test.ts.snap
│ │ ├── handler.test.ts
│ │ ├── handler.ts
│ │ └── schemas
│ │ │ ├── common.ts
│ │ │ ├── json-body.test.ts
│ │ │ ├── json-body.ts
│ │ │ ├── query-params.test.ts
│ │ │ └── query-params.ts
│ │ ├── include.test-utils.ts
│ │ ├── include.test.ts
│ │ ├── include.ts
│ │ └── special
│ │ ├── handler.test.ts
│ │ ├── handler.ts
│ │ └── schemas
│ │ └── special.ts
├── shared-schemas.ts
├── shims.d.ts
├── update.test.ts
├── update.ts
└── utils.ts
├── tsconfig.json
├── tsup.config.ts
├── vitest.config.ts
└── vitest.setup.ts
/.dockerignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .git
3 | .github
4 | dist
5 | node_modules
6 | redoc-static.html
7 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*] # all files
4 | indent_style = tab
5 | indent_size = 2
6 | end_of_line = lf
7 | insert_final_newline = true
8 | charset = utf-8
9 |
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | *.snap
2 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | ko_fi: beequeue
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/manual-rule-request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Manual Rule Request
3 | about: ...
4 |
5 | ---
6 |
7 |
12 |
13 | **Which entry is missing or duplicated?**
14 |
15 | `source:id`
16 |
17 | **What should it be mapped to, if you know?**
18 |
19 | `source:id`
20 |
--------------------------------------------------------------------------------
/.github/renovate.json5:
--------------------------------------------------------------------------------
1 | {
2 | extends: ["config:js-app", "helpers:disableTypesNodeMajor", "schedule:earlyMondays", "group:allNonMajor"],
3 | prConcurrentLimit: 5,
4 | branchConcurrentLimit: 5,
5 | labels: ["dependencies"],
6 | baseBranches: ["master"],
7 | automerge: false,
8 | packageRules: [
9 | {
10 | packageNames: ["node"],
11 | allowedVersions: "<=22",
12 | rangeStrategy: "replace",
13 | },
14 | {
15 | groupName: "linters",
16 | matchPackagePatterns: ["lint"],
17 | automerge: true,
18 | },
19 | {
20 | groupName: "testing packages",
21 | matchPackagePatterns: ["jest", "test", "vitest"],
22 | automerge: true,
23 | },
24 | {
25 | groupName: "`@types` package patches",
26 | matchPackagePrefixes: ["@types/"],
27 | patch: true,
28 | automerge: true,
29 | },
30 | ],
31 | }
32 |
--------------------------------------------------------------------------------
/.github/workflows/cicd.yml:
--------------------------------------------------------------------------------
1 | name: ci&cd
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 |
9 | jobs:
10 | lint:
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - uses: actions/checkout@v4
15 |
16 | - uses: actions/setup-node@v4
17 | with:
18 | node-version: 22
19 |
20 | - run: corepack enable
21 |
22 | - name: find pnpm cache path
23 | id: cache
24 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT
25 |
26 | - uses: actions/cache@v4
27 | with:
28 | path: ${{ steps.cache.outputs.path }}
29 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
30 | restore-keys: |
31 | v1-pnpm-
32 |
33 | - name: Install dependencies
34 | run: pnpm install --frozen-lockfile
35 |
36 | - run: pnpm lint
37 |
38 | fmt:
39 | runs-on: ubuntu-latest
40 |
41 | steps:
42 | - uses: actions/checkout@v4
43 |
44 | - uses: biomejs/setup-biome@v2
45 | with:
46 | version: latest
47 |
48 | - run: biome check
49 |
50 | typecheck:
51 | runs-on: ubuntu-latest
52 |
53 | steps:
54 | - uses: actions/checkout@v4
55 |
56 | - uses: actions/setup-node@v4
57 | with:
58 | node-version: 22
59 |
60 | - run: corepack enable
61 |
62 | - name: find pnpm cache path
63 | id: cache
64 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT
65 |
66 | - uses: actions/cache@v4
67 | with:
68 | path: ${{ steps.cache.outputs.path }}
69 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
70 | restore-keys: |
71 | v1-pnpm-
72 |
73 | - name: Install dependencies
74 | run: pnpm install --frozen-lockfile
75 |
76 | - run: pnpm typecheck
77 |
78 | test:
79 | runs-on: ubuntu-latest
80 |
81 | steps:
82 | - uses: actions/checkout@v4
83 |
84 | - uses: actions/setup-node@v4
85 | with:
86 | node-version: 22
87 |
88 | - run: corepack enable
89 |
90 | - name: find pnpm cache path
91 | id: cache
92 | run: echo "path=$(pnpm store path)" >> $GITHUB_OUTPUT
93 |
94 | - uses: actions/cache@v4
95 | with:
96 | path: ${{ steps.cache.outputs.path }}
97 | key: v1-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
98 | restore-keys: |
99 | v1-pnpm-
100 |
101 | - name: Install dependencies
102 | run: pnpm install --frozen-lockfile
103 |
104 | - run: pnpm test
105 | env:
106 | NODE_ENV: test
107 |
108 | build:
109 | runs-on: ubuntu-latest
110 | permissions:
111 | contents: read
112 | packages: write
113 | id-token: write
114 |
115 | steps:
116 | - name: Docker meta
117 | id: meta
118 | uses: docker/metadata-action@v5
119 | with:
120 | images: |
121 | ghcr.io/${{ github.repository }}
122 | tags: |
123 | type=raw,value={{sha}}
124 | type=raw,value=latest
125 |
126 | - name: Set up depot
127 | uses: depot/setup-action@v1
128 |
129 | - name: Login to GHCR
130 | if: github.ref == 'refs/heads/master'
131 | uses: docker/login-action@v3
132 | with:
133 | registry: ghcr.io
134 | username: ${{ github.actor }}
135 | password: ${{ secrets.GITHUB_TOKEN }}
136 |
137 | - name: Build and maybe Push Docker image
138 | uses: depot/build-push-action@v1
139 | with:
140 | project: ks849krng9
141 | push: ${{ github.ref == 'refs/heads/master' }}
142 | tags: ${{ steps.meta.outputs.tags }}
143 | labels: ${{ steps.meta.outputs.labels }}
144 |
145 | deploy:
146 | if: github.ref == 'refs/heads/master'
147 | needs: [lint, test, typecheck, build]
148 | runs-on: ubuntu-latest
149 | environment:
150 | name: prod
151 | url: https://arm.haglund.dev/api
152 |
153 | steps:
154 | - run: echo 'Deployed! :)'
155 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .env
3 | coverage/
4 | dist/
5 | sqlite/
6 | redoc-static.html
7 |
8 | # Logs
9 | logs
10 | *.log
11 |
12 | # Dependency directories
13 | node_modules/
14 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | save-exact = true
2 | shell-emulator = true
3 | use-lockfile-v6 = true
4 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:22-alpine as base
2 |
3 | WORKDIR /app
4 |
5 | ENV PNPM_HOME=/pnpm
6 | ENV CI=1
7 | # Use production in case any dependencies use it in any way
8 | ENV NODE_ENV=production
9 |
10 | # Enable node compile cache
11 | ENV NODE_COMPILE_CACHE=/node-cc
12 | RUN mkdir -p $NODE_COMPILE_CACHE
13 |
14 | FROM base as base_deps
15 |
16 | ENV CI=1
17 |
18 | COPY .npmrc package.json pnpm-lock.yaml ./
19 | COPY patches/ patches/
20 |
21 | RUN corepack enable
22 | RUN corepack prepare --activate
23 |
24 | # Install dependencies
25 | RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
26 | pnpm install --frozen-lockfile
27 |
28 | FROM base_deps as build
29 |
30 | COPY knexfile.js tsconfig.json tsup.config.ts ./
31 | COPY src/ src/
32 |
33 | RUN pnpm run build
34 |
35 | FROM base_deps AS docs
36 |
37 | COPY docs/openapi.yaml docs/openapi.yaml
38 |
39 | RUN --mount=type=cache,id=pnpm,target=/pnpm/store \
40 | pnpm run docs
41 |
42 | FROM base
43 |
44 | COPY .npmrc knexfile.js package.json pnpm-lock.yaml ./
45 | COPY src/ src/
46 | COPY migrations/ migrations/
47 |
48 | COPY --from=build /app/dist dist/
49 | COPY --from=docs /app/redoc-static.html .
50 |
51 | # Run with...
52 | # Source maps enabled, since it does not affect performance from what I found
53 | ENV NODE_OPTIONS="--enable-source-maps"
54 | # Warnings disabled, we know what we're doing and they're annoying
55 | ENV NODE_NO_WARNINGS=1
56 |
57 | CMD ["node", "dist/index.js"]
58 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # arm-server
2 |
3 | [](https://github.com/BeeeQueue/arm-server/actions?query=branch%3Amaster+workflow%3ACI)
4 | [](https://uptime.h.haglund.dev/status/arm-server)
5 |
6 | This app uses data from [`Fribb/anime-lists`](https://github.com/Fribb/anime-lists) - fetching
7 | and updating itself every 24 hours.
8 |
9 | [`Fribb/anime-lists`](https://github.com/Fribb/anime-lists) is an automatic merged copy of
10 | [`anime-offline-database`](https://github.com/manami-project/anime-offline-database)
11 | and
12 | [`Anime-Lists/anime-lists`](https://github.com/Anime-Lists/anime-lists).
13 |
14 |
15 | #### Get notifications on important API changes
16 |
17 | Subscribe to new releases in this repo:
18 |
19 | 
20 |
21 | ### Missing or duplicate entries
22 |
23 | Some entries in the database are not mapped correctly due to inconsistent naming - the owner of `anime-offline-database`
24 | cannot fix them due to complexity. Therefore this service has manual rules that combines known failures.
25 |
26 | You can help add rules by submitting
27 | a [manual rule request](https://github.com/BeeeQueue/arm-server/issues/new?template=manual-rule-request.md).
28 |
29 | ## [API Docs](https://arm.haglund.dev/docs)
30 |
31 | ## Self-hosting
32 |
33 | Docker images are built and provided for each commit on master!
34 |
35 | The minimum configuration needed can be found in the following command:
36 |
37 | ```
38 | docker run -it --name arm-server -p 3000:3000 ghcr.io/beeequeue/arm-server:latest
39 | ```
40 |
41 | ## Development
42 |
43 | ### Server
44 |
45 | 1. Clone the project
46 | 1. Install dependencies - `pnpm`
47 | 1. Run database migrations - `pnpm migrate`
48 | 1. Download data (optional) - `pnpm fetch-data`
49 | 1. Start the server - `pnpm dev`
50 |
51 | If the database connection fails double check that your `NODE_ENV` is set to `development`.
52 |
53 | ### Docs
54 |
55 | 1. Clone the project
56 | 1. Install dependencies - `pnpm`
57 | 1. Start the build - `pnpm docs:dev`
58 | 1. Open the file in a browser - `redoc-static.html`
59 | 1. Edit `docs/openapi.yaml` file
60 |
--------------------------------------------------------------------------------
/bin/update.ts:
--------------------------------------------------------------------------------
1 | import { updateRelations } from "../src/update.ts"
2 |
3 | await updateRelations()
4 |
--------------------------------------------------------------------------------
/biome.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://biomejs.dev/schemas/1.8.0/schema.json",
3 | "formatter": {
4 | "lineEnding": "lf",
5 | "indentStyle": "tab",
6 | "lineWidth": 90,
7 | "formatWithErrors": true
8 | },
9 | "organizeImports": {
10 | "enabled": false
11 | },
12 | "javascript": {
13 | "formatter": {
14 | "quoteStyle": "double",
15 | "semicolons": "asNeeded"
16 | }
17 | },
18 | "linter": {
19 | "enabled": false
20 | },
21 | "vcs": {
22 | "enabled": true,
23 | "clientKind": "git",
24 | "defaultBranch": "main",
25 | "useIgnoreFile": true
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/docs/openapi.yaml:
--------------------------------------------------------------------------------
1 | openapi: 3.1.0
2 |
3 | info:
4 | version: 2.1.0
5 | title: API Docs - arm-server
6 | license:
7 | name: GNU Affero General Public License v3.0 only
8 | identifier: AGPL-3.0-only
9 | contact:
10 | name: BeeeQueue
11 | url: https://github.com/BeeeQueue/arm-server
12 | description: |
13 | A service for mapping Anime IDs.
14 |
15 | **Important:**
16 |
17 | Every endpoint returns `null` and not `404` when it does not find any match based on a query.
18 |
19 | servers:
20 | - url: https://arm.haglund.dev
21 | description: Live service
22 | - url: http://localhost:3000
23 | description: Local dev server
24 |
25 | tags:
26 | - name: v2
27 | description: |
28 | `v2` adds more Sources thanks to [Fribb/anime-lists](https://github.com/Fribb/anime-lists).
29 |
30 | Unfortunately IMDB, TheMovieDB, and TheTVDB use one entry per **show** instead of **season** meaning their IDs become one-to-many mappings.
31 |
32 | This means it cannot be queried for in `/api/v2/ids` since it's impossible for the API to look the same in that case.
33 |
34 | Instead I added `/api/v2/imdb`, `/api/v2/themoviedb`, and `/api/v2/thetvdb` if you want to query by their IDs.
35 | - name: v1
36 |
37 | $defs:
38 | include_param:
39 | name: include
40 | in: query
41 | required: false
42 | example: anilist,anidb
43 | description: "Comma-separated list of sources to return in response objects."
44 | schema:
45 | type: string
46 |
47 | '400':
48 | description: Invalid request
49 | content:
50 | application/json:
51 | schema:
52 | type: object
53 | properties:
54 | statusCode:
55 | type: integer
56 | enum:
57 | - 400
58 | error:
59 | type: string
60 | example: Bad request
61 | message:
62 | type: string
63 | example: "1: Number must be greater than or equal to 1, Number must be greater than 0"
64 |
65 | relation:
66 | type: object
67 | properties:
68 | anidb:
69 | oneOf:
70 | - type: 'null'
71 | - type: integer
72 | minimum: 0
73 | maximum: 50000000
74 | example: 1337
75 | anilist:
76 | oneOf:
77 | - type: 'null'
78 | - type: integer
79 | minimum: 0
80 | maximum: 50000000
81 | example: 1337
82 | anime-planet:
83 | oneOf:
84 | - type: 'null'
85 | - type: string
86 | minLength: 1
87 | maxLength: 50
88 | example: dororon-enma-kun
89 | anisearch:
90 | oneOf:
91 | - type: 'null'
92 | - type: integer
93 | minimum: 0
94 | maximum: 50000000
95 | example: 1337
96 | imdb:
97 | oneOf:
98 | - type: 'null'
99 | - type: string
100 | pattern: tt\d+
101 | minLength: 1
102 | maxLength: 50
103 | example: tt0164917
104 | kitsu:
105 | oneOf:
106 | - type: 'null'
107 | - type: integer
108 | minimum: 0
109 | maximum: 50000000
110 | example: 1337
111 | livechart:
112 | oneOf:
113 | - type: 'null'
114 | - type: integer
115 | minimum: 0
116 | maximum: 50000000
117 | example: 1337
118 | notify-moe:
119 | oneOf:
120 | - type: 'null'
121 | - type: string
122 | minLength: 1
123 | maxLength: 50
124 | example: "-cQb5Fmmg"
125 | themoviedb:
126 | oneOf:
127 | - type: 'null'
128 | - type: integer
129 | minimum: 0
130 | maximum: 50000000
131 | example: 1337
132 | thetvdb:
133 | oneOf:
134 | - type: 'null'
135 | - type: integer
136 | minimum: 0
137 | maximum: 50000000
138 | example: 1337
139 | myanimelist:
140 | oneOf:
141 | - type: 'null'
142 | - type: integer
143 | minimum: 0
144 | maximum: 50000000
145 | example: 1337
146 |
147 | nullable_relation:
148 | oneOf:
149 | - $ref: '#/$defs/relation'
150 | - type: 'null'
151 |
152 | response:
153 | example:
154 | anidb: 1337
155 | anilist: 1337
156 | anime-planet: spriggan
157 | anisearch: null
158 | imdb: tt0164917
159 | kitsu: null
160 | livechart: null
161 | notify-moe: "-cQb5Fmmg"
162 | themoviedb: null
163 | thetvdb: null
164 | myanimelist: null
165 | oneOf:
166 | - $ref: '#/$defs/nullable_relation'
167 | - type: array
168 | items:
169 | $ref: '#/$defs/nullable_relation'
170 |
171 | v1_relation:
172 | type: object
173 | properties:
174 | anidb:
175 | oneOf:
176 | - type: 'null'
177 | - type: integer
178 | example: 1337
179 | anilist:
180 | oneOf:
181 | - type: 'null'
182 | - type: integer
183 | example: 1337
184 | myanimelist:
185 | oneOf:
186 | - type: 'null'
187 | - type: integer
188 | example: 1337
189 | kitsu:
190 | oneOf:
191 | - type: 'null'
192 | - type: integer
193 | example: 1337
194 |
195 | nullable_v1_relation:
196 | oneOf:
197 | - $ref: '#/$defs/v1_relation'
198 | - type: 'null'
199 |
200 | v1_response:
201 | example:
202 | anidb: 1337
203 | anilist: 1337
204 | kitsu: null
205 | myanimelist: null
206 | oneOf:
207 | - $ref: '#/$defs/nullable_v1_relation'
208 | - type: array
209 | items:
210 | $ref: '#/$defs/nullable_v1_relation'
211 |
212 | paths:
213 | /api/ids:
214 | get:
215 | operationId: getIds
216 | summary: Fetch IDs via query parameters
217 | security: [{}]
218 | tags:
219 | - v1
220 |
221 | parameters:
222 | - name: source
223 | in: query
224 | required: true
225 | example: anilist
226 | schema:
227 | type: string
228 | enum:
229 | - anidb
230 | - anilist
231 | - kitsu
232 | - myanimelist
233 | - name: id
234 | in: query
235 | required: true
236 | example: 1337
237 | schema:
238 | type: integer
239 | minimum: 1
240 |
241 | responses:
242 | '200':
243 | description: OK
244 | content:
245 | application/json:
246 | schema:
247 | $ref: '#/$defs/v1_response'
248 | '400':
249 | $ref: '#/$defs/400'
250 |
251 | post:
252 | operationId: postIds
253 | summary: Fetch IDs via a JSON body
254 | description: |
255 | The JSON body can either be an object containing the query, or an array containing multiple queries.
256 |
257 | If using array queries, the resulting array will map to the corresponding input!
258 |
259 | e.g. `body[1]` will be the result of `query[1]`.
260 | security: [{}]
261 | tags:
262 | - v1
263 |
264 | requestBody:
265 | required: true
266 | content:
267 | application/json:
268 | schema:
269 | example:
270 | - anilist: 1337
271 | - anidb: 1337
272 | oneOf:
273 | - $ref: '#/$defs/v1_relation'
274 | - type: array
275 | items:
276 | $ref: '#/$defs/v1_relation'
277 |
278 | responses:
279 | '200':
280 | description: OK
281 | content:
282 | application/json:
283 | schema:
284 | $ref: '#/$defs/v1_response'
285 | '400':
286 | $ref: '#/$defs/400'
287 |
288 | /api/v2/ids:
289 | get:
290 | operationId: v2-getIds
291 | summary: Fetch IDs via query parameters
292 | description: ' '
293 | security: [{}]
294 | tags:
295 | - v2
296 |
297 | parameters:
298 | - name: source
299 | in: query
300 | required: true
301 | example: anilist
302 | schema:
303 | type: string
304 | enum:
305 | - anilist
306 | - anidb
307 | - anime-planet
308 | - anisearch
309 | - kitsu
310 | - livechart
311 | - notify-moe
312 | - myanimelist
313 | - name: id
314 | in: query
315 | required: true
316 | example: 1337
317 | schema:
318 | oneOf:
319 | - type: integer
320 | minimum: 1
321 | - type: string
322 | minLength: 1
323 | - $ref: "#/$defs/include_param"
324 |
325 | responses:
326 | '200':
327 | description: OK
328 | content:
329 | application/json:
330 | schema:
331 | $ref: '#/$defs/response'
332 | '400':
333 | $ref: '#/$defs/400'
334 |
335 | post:
336 | operationId: v2-postIds
337 | summary: Fetch IDs via a JSON body
338 | description: |
339 | The JSON body can either be an object containing the query, or an array containing multiple queries.
340 |
341 | If using array queries, the resulting array will map to the corresponding input!
342 |
343 | e.g. `body[1]` will be the result of `query[1]`.
344 | security: [{}]
345 | tags:
346 | - v2
347 |
348 | parameters:
349 | - $ref: "#/$defs/include_param"
350 |
351 | requestBody:
352 | required: true
353 | content:
354 | application/json:
355 | schema:
356 | example:
357 | - anilist: 1337
358 | - anidb: 1337
359 | - notify-moe: -cQb5Fmmg
360 | oneOf:
361 | - type: object
362 | minProperties: 1
363 | additionalProperties: false
364 | properties:
365 | anidb:
366 | oneOf:
367 | - type: 'null'
368 | - type: integer
369 | minimum: 0
370 | maximum: 50000000
371 | anilist:
372 | oneOf:
373 | - type: 'null'
374 | - type: integer
375 | minimum: 0
376 | maximum: 50000000
377 | anime-planet:
378 | oneOf:
379 | - type: 'null'
380 | - type: string
381 | minLength: 1
382 | maxLength: 50
383 | anisearch:
384 | oneOf:
385 | - type: 'null'
386 | - type: integer
387 | minimum: 0
388 | maximum: 50000000
389 | kitsu:
390 | oneOf:
391 | - type: 'null'
392 | - type: integer
393 | minimum: 0
394 | maximum: 50000000
395 | livechart:
396 | oneOf:
397 | - type: 'null'
398 | - type: integer
399 | minimum: 0
400 | maximum: 50000000
401 | notify-moe:
402 | oneOf:
403 | - type: 'null'
404 | - type: string
405 | minLength: 1
406 | maxLength: 50
407 | myanimelist:
408 | oneOf:
409 | - type: 'null'
410 | - type: integer
411 | minimum: 0
412 | maximum: 50000000
413 | - type: array
414 | minItems: 1
415 | maxItems: 100
416 | items:
417 | type: object
418 | minProperties: 1
419 | additionalProperties: false
420 | properties:
421 | anidb:
422 | oneOf:
423 | - type: 'null'
424 | - type: integer
425 | minimum: 0
426 | maximum: 50000000
427 | anilist:
428 | oneOf:
429 | - type: 'null'
430 | - type: integer
431 | minimum: 0
432 | maximum: 50000000
433 | anime-planet:
434 | oneOf:
435 | - type: 'null'
436 | - type: string
437 | minLength: 1
438 | maxLength: 50
439 | anisearch:
440 | oneOf:
441 | - type: 'null'
442 | - type: integer
443 | minimum: 0
444 | maximum: 50000000
445 | kitsu:
446 | oneOf:
447 | - type: 'null'
448 | - type: integer
449 | minimum: 0
450 | maximum: 50000000
451 | livechart:
452 | oneOf:
453 | - type: 'null'
454 | - type: integer
455 | minimum: 0
456 | maximum: 50000000
457 | notify-moe:
458 | oneOf:
459 | - type: 'null'
460 | - type: string
461 | minLength: 1
462 | maxLength: 50
463 | myanimelist:
464 | oneOf:
465 | - type: 'null'
466 | - type: integer
467 | minimum: 0
468 | maximum: 50000000
469 |
470 | responses:
471 | '200':
472 | description: OK
473 | content:
474 | application/json:
475 | schema:
476 | $ref: '#/$defs/response'
477 | '400':
478 | $ref: '#/$defs/400'
479 |
480 | /api/v2/imdb:
481 | get:
482 | operationId: v2-imdb
483 | summary: Fetch IDs by IMDB ID
484 | description: ' '
485 | security: [{}]
486 | tags:
487 | - v2
488 |
489 | parameters:
490 | - name: id
491 | in: query
492 | required: true
493 | example: tt5370118
494 | schema:
495 | type: integer
496 | minimum: 1
497 | - $ref: "#/$defs/include_param"
498 |
499 | responses:
500 | '200':
501 | description: OK
502 | content:
503 | application/json:
504 | schema:
505 | type: array
506 | items:
507 | $ref: '#/$defs/nullable_relation'
508 | '400':
509 | $ref: '#/$defs/400'
510 |
511 | /api/v2/themoviedb:
512 | get:
513 | operationId: v2-themoviedb
514 | summary: Fetch IDs by TheMovieDB ID
515 | description: ' '
516 | security: [{}]
517 | tags:
518 | - v2
519 |
520 | parameters:
521 | - name: id
522 | in: query
523 | required: true
524 | example: 1337
525 | schema:
526 | type: integer
527 | minimum: 1
528 | - $ref: "#/$defs/include_param"
529 |
530 | responses:
531 | '200':
532 | description: OK
533 | content:
534 | application/json:
535 | schema:
536 | type: array
537 | items:
538 | $ref: '#/$defs/nullable_relation'
539 | '400':
540 | $ref: '#/$defs/400'
541 |
542 | /api/v2/thetvdb:
543 | get:
544 | operationId: v2-thetvdb
545 | summary: Fetch IDs by TheTVDB ID
546 | description: ' '
547 | security: [{}]
548 | tags:
549 | - v2
550 |
551 | parameters:
552 | - name: id
553 | in: query
554 | required: true
555 | example: 1337
556 | schema:
557 | type: integer
558 | minimum: 1
559 | - $ref: "#/$defs/include_param"
560 |
561 | responses:
562 | '200':
563 | description: OK
564 | content:
565 | application/json:
566 | schema:
567 | type: array
568 | items:
569 | $ref: '#/$defs/nullable_relation'
570 | '400':
571 | $ref: '#/$defs/400'
572 |
--------------------------------------------------------------------------------
/eslint.config.js:
--------------------------------------------------------------------------------
1 | import antfu from "@antfu/eslint-config"
2 |
3 | export default antfu({
4 | ignores: ["**/*.json"],
5 | markdown: false,
6 | stylistic: false,
7 | jsonc: false,
8 | jsx: false,
9 | toml: false,
10 | yaml: false,
11 | test: { overrides: { "test/no-import-node-test": "off" } },
12 | typescript: {
13 | tsconfigPath: "tsconfig.json",
14 | overrides: {
15 | "no-console": "off",
16 | "ts/no-use-before-define": "off",
17 | "ts/consistent-type-definitions": "off",
18 | "ts/consistent-type-imports": ["error", { fixStyle: "inline-type-imports" }],
19 | "ts/no-unsafe-argument": "off",
20 | "ts/no-unsafe-assignment": "off",
21 | "node/prefer-global/process": "off",
22 | "antfu/no-top-level-await": "off",
23 | "import/consistent-type-specifier-style": "off",
24 |
25 | "perfectionist/sort-imports": [
26 | "error",
27 | {
28 | type: "natural",
29 | internalPattern: ["^@/", "^~/", "^#[a-zA-Z0-9-]+/"],
30 | newlinesBetween: "always",
31 | groups: [
32 | ["builtin", "builtin-type"],
33 | ["external", "external-type"],
34 | ["internal", "internal-type"],
35 | ["parent", "parent-type"],
36 | ["sibling", "sibling-type"],
37 | ["index", "index-type"],
38 | "object",
39 | "unknown",
40 | ],
41 | },
42 | ],
43 | },
44 | },
45 | })
46 |
--------------------------------------------------------------------------------
/knexfile.js:
--------------------------------------------------------------------------------
1 | import { mkdirSync } from "node:fs"
2 |
3 | mkdirSync("./sqlite", { recursive: true })
4 |
5 | /** @type5 {import("knex").Knex.Config} */
6 | export default {
7 | client: "better-sqlite3",
8 | migrations: {
9 | tableName: "migrations",
10 | directory: "migrations",
11 | },
12 | useNullAsDefault: true,
13 | connection: {
14 | filename: `./sqlite/${process.env.NODE_ENV ?? "development"}.sqlite3`,
15 | options: {
16 | nativeBinding:
17 | process.env.NODE_ENV === "production" ? "./dist/better_sqlite3.node" : undefined,
18 | },
19 | },
20 | }
21 |
--------------------------------------------------------------------------------
/migrations/20190611171759_create_tables.js:
--------------------------------------------------------------------------------
1 | export async function up(knex) {
2 | if (await knex.schema.hasTable("relations")) return
3 |
4 | const promises = []
5 |
6 | promises.push(
7 | knex.schema.createTable("relations", (table) => {
8 | table.integer("anilist").unique()
9 | table.integer("anidb").unique()
10 | table.integer("myanimelist").unique()
11 | table.integer("kitsu").unique()
12 | }),
13 | )
14 |
15 | await Promise.all(promises)
16 | }
17 |
18 | export async function down(knex) {
19 | await knex.schema.dropTableIfExists("relations")
20 | }
21 |
--------------------------------------------------------------------------------
/migrations/20221230042725_anime-lists.js:
--------------------------------------------------------------------------------
1 | export async function up(knex) {
2 | await knex.schema.alterTable("relations", (table) => {
3 | table.text("anime-planet").unique()
4 | table.integer("anisearch").unique()
5 | table.text("imdb").unique()
6 | table.integer("livechart").unique()
7 | table.text("notify-moe").unique()
8 | table.integer("themoviedb").unique()
9 | table.integer("thetvdb")
10 | })
11 | }
12 |
13 | export async function down(knex) {
14 | await knex.schema.alterTable("relations", (table) => {
15 | table.dropColumns(
16 | "anime-planet",
17 | "anisearch",
18 | "imdb",
19 | "livechart",
20 | "notify-moe",
21 | "themoviedb",
22 | "thetvdb",
23 | )
24 | })
25 | }
26 |
--------------------------------------------------------------------------------
/migrations/20240616234100_wal.js:
--------------------------------------------------------------------------------
1 | /**
2 | *
3 | * @param knex {import("knex").Knex}
4 | * @return {Promise}
5 | */
6 | export async function up(knex) {
7 | await knex.raw("PRAGMA journal_mode=WAL;")
8 | }
9 |
10 | export async function down() {}
11 |
--------------------------------------------------------------------------------
/migrations/20240617000000_special_dbs.js:
--------------------------------------------------------------------------------
1 | /** @param knex {import("knex").Knex} */
2 | export async function up(knex) {
3 | await knex.schema.alterTable("relations", (table) => {
4 | table.dropUnique("imdb")
5 | table.dropUnique("themoviedb")
6 | })
7 | }
8 |
9 | /** @param knex {import("knex").Knex} */
10 | export async function down(knex) {
11 | await knex.schema.alterTable("relations", (table) => {
12 | table.unique("imdb")
13 | table.unique("themoviedb")
14 | })
15 | }
16 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "arm-server",
3 | "type": "module",
4 | "author": "BeeeQueue ",
5 | "version": "1.0.0",
6 | "private": true,
7 | "license": "AGPL-3.0-only",
8 | "homepage": "https://arm.haglund.dev/docs",
9 | "packageManager": "pnpm@10.6.2",
10 | "engines": {
11 | "node": ">=22"
12 | },
13 | "simple-git-hooks": {
14 | "pre-commit": "node_modules/.bin/nano-staged"
15 | },
16 | "nano-staged": {
17 | "*.{js,cjs,mjs,ts,cts,mts,json}": ["biome check --fix"]
18 | },
19 | "scripts": {
20 | "build": "tsup",
21 | "dev": "node --experimental-strip-types --experimental-transform-types --watch src/index.ts",
22 | "docs": "pnpm --package=@redocly/cli dlx redocly build-docs docs/openapi.yaml",
23 | "docs:dev": "onchange --initial --kill docs/openapi.yaml -- pnpm --silent run docs",
24 | "docker:build": "docker build . --tag arm-server",
25 | "docker:start": "pnpm --silent docker:build; pnpm --silent docker:run",
26 | "docker:run": "docker run -it --rm --name arm -p 3000:3000 arm-server",
27 | "fetch-data": "node --experimental-strip-types --experimental-transform-types bin/update.ts --exit",
28 | "lint": "eslint src",
29 | "migrate:create": "knex migrate:make --knexfile knexfile.js -x ts",
30 | "start": "node --experimental-strip-types --experimental-transform-types src/index.ts",
31 | "test": "vitest",
32 | "typecheck": "tsc --noEmit",
33 | "prepare": "is-ci || simple-git-hooks"
34 | },
35 | "resolutions": {
36 | "esbuild": "0.25.0"
37 | },
38 | "devDependencies": {
39 | "@antfu/eslint-config": "4.13.2",
40 | "@biomejs/biome": "1.9.4",
41 | "@hono/node-server": "1.14.1",
42 | "@hono/standard-validator": "0.1.2",
43 | "@standard-schema/spec": "1.0.0",
44 | "@tsconfig/node22": "22.0.1",
45 | "@tsconfig/strictest": "2.0.5",
46 | "@types/json-schema": "7.0.15",
47 | "@types/node": "22.15.21",
48 | "@vitest/coverage-v8": "3.1.3",
49 | "better-sqlite3": "11.9.1",
50 | "dotenv": "16.5.0",
51 | "eslint": "9.27.0",
52 | "hono": "4.7.6",
53 | "is-ci": "4.1.0",
54 | "knex": "3.1.0",
55 | "mentoss": "0.9.2",
56 | "nano-staged": "0.8.0",
57 | "onchange": "7.1.0",
58 | "pino": "9.6.0",
59 | "pino-pretty": "13.0.0",
60 | "simple-git-hooks": "2.12.1",
61 | "tsconfig-paths": "4.2.0",
62 | "tsup": "8.4.0",
63 | "type-fest": "4.40.0",
64 | "typescript": "5.8.3",
65 | "valibot": "1.0.0",
66 | "vitest": "3.1.3",
67 | "xior": "0.7.8"
68 | },
69 | "pnpm": {
70 | "overrides": {
71 | "@eslint/markdown": "-",
72 | "@stylistic/eslint-plugin": "-",
73 | "eslint-plugin-jsonc": "-",
74 | "eslint-plugin-toml": "-",
75 | "eslint-plugin-vue": "-",
76 | "eslint-plugin-yml": "-",
77 | "eslint-processor-vue-blocks": "-",
78 | "jsonc-eslint-parser": "-",
79 | "sucrase": "-",
80 | "toml-eslint-parser": "-",
81 | "yaml-eslint-parser": "-",
82 | "is-core-module": "npm:@nolyfill/is-core-module@^1",
83 | "safe-buffer": "npm:@nolyfill/safe-buffer@^1"
84 | },
85 | "onlyBuiltDependencies": ["better-sqlite3"],
86 | "ignoredBuiltDependencies": ["@biomejs/biome", "esbuild", "simple-git-hooks"],
87 | "patchedDependencies": {
88 | "knex": "patches/knex.patch"
89 | }
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/patches/knex.patch:
--------------------------------------------------------------------------------
1 | diff --git a/lib/dialects/index.js b/lib/dialects/index.js
2 | index 559a366685d6517de819d1925233e972089928e7..476dcc63fd84758a7c65b1d4b493b73460f62764 100644
3 | --- a/lib/dialects/index.js
4 | +++ b/lib/dialects/index.js
5 | @@ -4,16 +4,6 @@ exports.getDialectByNameOrAlias = void 0;
6 | const { resolveClientNameWithAliases } = require('../util/helpers');
7 | const dbNameToDialectLoader = Object.freeze({
8 | 'better-sqlite3': () => require('./better-sqlite3'),
9 | - cockroachdb: () => require('./cockroachdb'),
10 | - mssql: () => require('./mssql'),
11 | - mysql: () => require('./mysql'),
12 | - mysql2: () => require('./mysql2'),
13 | - oracle: () => require('./oracle'),
14 | - oracledb: () => require('./oracledb'),
15 | - pgnative: () => require('./pgnative'),
16 | - postgres: () => require('./postgres'),
17 | - redshift: () => require('./redshift'),
18 | - sqlite3: () => require('./sqlite3'),
19 | });
20 | /**
21 | * Gets the Dialect object with the given client name or throw an
22 |
--------------------------------------------------------------------------------
/sqlite/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/beeequeue/arm-server/bfae7194787c2be5ed29e615f49b33cf7f196388/sqlite/.gitkeep
--------------------------------------------------------------------------------
/src/app.ts:
--------------------------------------------------------------------------------
1 | import { Hono } from "hono"
2 | import { cors } from "hono/cors"
3 | import { HTTPException } from "hono/http-exception"
4 | import { secureHeaders } from "hono/secure-headers"
5 |
6 | import { docsRoutes } from "./docs.ts"
7 | import { logger } from "./lib/logger.ts"
8 | import { v1Routes } from "./routes/v1/ids/handler.ts"
9 | import { v2Routes } from "./routes/v2/ids/handler.ts"
10 | import { specialRoutes } from "./routes/v2/special/handler.ts"
11 | import { cacheReply, CacheTimes, createErrorJson } from "./utils.ts"
12 |
13 | export const createApp = () =>
14 | new Hono()
15 |
16 | .use("*", async (c, next) => {
17 | const start = Date.now()
18 | logger.info(
19 | {
20 | method: c.req.method,
21 | path: c.req.path,
22 | headers: c.req.header(),
23 | },
24 | "req",
25 | )
26 |
27 | await next()
28 |
29 | logger.info(
30 | {
31 | status: c.res.status,
32 | ms: Date.now() - start,
33 | },
34 | "res",
35 | )
36 | })
37 |
38 | .use("*", cors({ origin: (origin) => origin }))
39 | .use("*", secureHeaders())
40 |
41 | .notFound((c) => createErrorJson(c, new HTTPException(404)))
42 |
43 | .onError((error, c) => {
44 | /* c8 ignore next 4 */
45 | if (error instanceof HTTPException) {
46 | const res = error.getResponse()
47 |
48 | if (c.req.method === "GET") {
49 | cacheReply(res, CacheTimes.WEEK)
50 | }
51 |
52 | return createErrorJson(c, error)
53 | }
54 |
55 | logger.error(error, "unhandled error")
56 |
57 | const badImpl = new HTTPException(500, { cause: error })
58 | return createErrorJson(c, badImpl)
59 | })
60 |
61 | .route("/api", v1Routes)
62 | .route("/api/v2", v2Routes)
63 | .route("/api/v2", specialRoutes)
64 | .route("/docs", docsRoutes)
65 |
66 | .get("/", (c) => {
67 | cacheReply(c.res, CacheTimes.WEEK * 4)
68 |
69 | return c.redirect(process.env.HOMEPAGE!, 301)
70 | })
71 |
--------------------------------------------------------------------------------
/src/config.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | export enum Environment {
4 | Development = "development",
5 | Test = "test",
6 | Production = "production",
7 | }
8 |
9 | const schema = v.object({
10 | NODE_ENV: v.optional(v.enum(Environment), Environment.Development),
11 | PORT: v.optional(v.pipe(v.string(), v.transform(Number), v.integer()), "3000"),
12 | LOG_LEVEL: v.optional(
13 | v.picklist(["fatal", "error", "warn", "info", "debug", "trace"]),
14 | process.env.NODE_ENV === "development" ? "debug" : "info",
15 | ),
16 | USER_AGENT: v.optional(v.string(), "arm-server"),
17 | })
18 |
19 | const result = v.safeParse(schema, process.env)
20 |
21 | if (!result.success) {
22 | console.error(
23 | "❌ Invalid environment variables:",
24 | JSON.stringify(result.issues, null, 4),
25 | )
26 |
27 | process.exit(1)
28 | }
29 |
30 | export const config = result.output
31 |
--------------------------------------------------------------------------------
/src/db.ts:
--------------------------------------------------------------------------------
1 | import Knex from "knex"
2 |
3 | import knexfile from "../knexfile.js"
4 |
5 | export enum Source {
6 | AniDB = "anidb",
7 | AniList = "anilist",
8 | AnimePlanet = "anime-planet",
9 | AniSearch = "anisearch",
10 | IMDB = "imdb",
11 | Kitsu = "kitsu",
12 | LiveChart = "livechart",
13 | NotifyMoe = "notify-moe",
14 | TheMovieDB = "themoviedb",
15 | TheTVDB = "thetvdb",
16 | MAL = "myanimelist",
17 | }
18 |
19 | export type Relation = {
20 | [Source.AniDB]?: number
21 | [Source.AniList]?: number
22 | [Source.AnimePlanet]?: string
23 | [Source.AniSearch]?: number
24 | [Source.IMDB]?: `tt${string}`
25 | [Source.Kitsu]?: number
26 | [Source.LiveChart]?: number
27 | [Source.NotifyMoe]?: string
28 | [Source.TheMovieDB]?: number
29 | [Source.TheTVDB]?: number
30 | [Source.MAL]?: number
31 | }
32 |
33 | export type OldRelation = Pick<
34 | Relation,
35 | Source.AniDB | Source.AniList | Source.MAL | Source.Kitsu
36 | >
37 |
38 | export const knex = Knex(knexfile)
39 |
--------------------------------------------------------------------------------
/src/docs.ts:
--------------------------------------------------------------------------------
1 | import { existsSync } from "node:fs"
2 | import { readFile } from "node:fs/promises"
3 | import path from "node:path"
4 |
5 | import { Hono } from "hono"
6 |
7 | import { cacheReply, CacheTimes } from "./utils.ts"
8 |
9 | const filePath = path.resolve(import.meta.dirname, "../redoc-static.html")
10 | let docsHtml: string | null = null
11 |
12 | export const docsRoutes = new Hono()
13 |
14 | docsRoutes.get("/", async (c) => {
15 | if (docsHtml != null) {
16 | cacheReply(c.res, CacheTimes.DAY)
17 |
18 | return c.html(docsHtml)
19 | }
20 |
21 | docsHtml = existsSync(filePath) ? await readFile(filePath, "utf8") : null
22 |
23 | if (docsHtml == null) {
24 | throw new Error("docs.html not found")
25 | } else {
26 | cacheReply(c.res, CacheTimes.DAY)
27 |
28 | return c.html(docsHtml)
29 | }
30 | })
31 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import { serve } from "@hono/node-server"
2 |
3 | import { createApp } from "./app.ts"
4 | import { config } from "./config.ts"
5 | import { knex } from "./db.ts"
6 | import { updateRelations } from "./update.ts"
7 |
8 | const { NODE_ENV, PORT } = config
9 |
10 | await knex.migrate.latest()
11 |
12 | const runUpdateScript = async () => updateRelations()
13 |
14 | if (NODE_ENV === "production") {
15 | void runUpdateScript()
16 |
17 | // eslint-disable-next-line ts/no-misused-promises
18 | setInterval(runUpdateScript, 1000 * 60 * 60 * 24)
19 | }
20 |
21 | const app = createApp()
22 |
23 | serve({ fetch: app.fetch, hostname: "0.0.0.0", port: PORT }, () => {
24 | console.log(`Server running on ${PORT}`)
25 | })
26 |
--------------------------------------------------------------------------------
/src/lib/logger.ts:
--------------------------------------------------------------------------------
1 | import { pino } from "pino"
2 |
3 | import { config } from "../config.ts"
4 |
5 | const stream =
6 | process.env.NODE_ENV !== "production"
7 | ? (await import("pino-pretty")).PinoPretty()
8 | : undefined
9 |
10 | export const logger = pino(
11 | {
12 | level: config.LOG_LEVEL,
13 | redact: ["headers.authorization", "headers.cookie", "*.token"],
14 | },
15 | stream,
16 | )
17 |
--------------------------------------------------------------------------------
/src/manual-rules.ts:
--------------------------------------------------------------------------------
1 | import { knex, type Relation } from "./db.ts"
2 |
3 | type Rule = `${keyof Relation}:${number}`
4 | const rules: Record = {}
5 |
6 | export const updateBasedOnManualRules = async () => {
7 | const promises = Object.entries(rules).map(async ([from, to]) => {
8 | const [fromSource, fromId] = from.split(":")
9 | const fromWhere = { [fromSource]: Number(fromId) }
10 | const [toSource, toId] = (to as string).split(":")
11 | const toWhere = { [toSource]: Number(toId) }
12 |
13 | const badRelation = (await knex("relations")
14 | .where(fromWhere)
15 | .first()) as Relation | null
16 |
17 | if (!badRelation) {
18 | throw new Error(`Could not find rule source for ${from}->${to as string}!!!!!`)
19 | }
20 |
21 | if (badRelation[toSource as keyof Relation] === Number(toId)) {
22 | return console.warn(
23 | `${from}:${to as string} has been fixed, can be removed from manual rules.`,
24 | )
25 | }
26 |
27 | await knex
28 | .transaction(async (trx) =>
29 | knex("relations")
30 | .delete()
31 | .where(fromWhere)
32 | .transacting(trx)
33 | .then(() =>
34 | knex("relations").update(fromWhere).where(toWhere).transacting(trx),
35 | ),
36 | )
37 | .catch(console.error)
38 | })
39 |
40 | await Promise.all(promises)
41 | }
42 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/__snapshots__/handler.test.ts.snap:
--------------------------------------------------------------------------------
1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2 |
3 | exports[`json body > array input > requires at least one source 1`] = `
4 | {
5 | "code": "FST_ERR_VALIDATION",
6 | "details": {
7 | "0": [
8 | "At least one source is required.",
9 | ],
10 | },
11 | "error": "Bad Request",
12 | "message": "Validation error",
13 | "statusCode": 400,
14 | }
15 | `;
16 |
17 | exports[`json body > object input > errors correctly on an empty object 1`] = `
18 | {
19 | "code": "FST_ERR_VALIDATION",
20 | "details": {
21 | "$": [
22 | "At least one source is required.",
23 | ],
24 | },
25 | "error": "Bad Request",
26 | "message": "Validation error",
27 | "statusCode": 400,
28 | }
29 | `;
30 |
31 | exports[`json body > object input > gET fails with json body 1`] = `
32 | {
33 | "code": "FST_ERR_VALIDATION",
34 | "details": {
35 | "id": [
36 | "Invalid key: Expected "id" but received undefined",
37 | ],
38 | "source": [
39 | "Invalid key: Expected "source" but received undefined",
40 | ],
41 | },
42 | "error": "Bad Request",
43 | "message": "Validation error",
44 | "statusCode": 400,
45 | }
46 | `;
47 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/handler.test.ts:
--------------------------------------------------------------------------------
1 | import { testClient } from "hono/testing"
2 | import { afterAll, afterEach, describe, expect, it } from "vitest"
3 |
4 | import { createApp } from "../../../app.ts"
5 | import { knex, type Relation, Source } from "../../../db.ts"
6 |
7 | let id = 1
8 | const createRelations = async (
9 | amount: N,
10 | ): Promise => {
11 | const relations = Array.from({ length: amount }).map(() => ({
12 | anilist: id++,
13 | anidb: id++,
14 | kitsu: id++,
15 | myanimelist: id++,
16 | }))
17 |
18 | await knex.insert(relations).into("relations")
19 |
20 | if (amount === 1) {
21 | return relations[0] as never
22 | }
23 |
24 | return relations as never
25 | }
26 |
27 | const app = createApp()
28 |
29 | afterEach(() => knex.delete().from("relations"))
30 |
31 | afterAll(async () => {
32 | await knex.destroy()
33 | })
34 |
35 | describe("query params", () => {
36 | it("fetches relation correctly", async () => {
37 | const relation = await createRelations(1)
38 |
39 | const response = await testClient(app).api.ids.$get({
40 | query: {
41 | source: Source.AniList,
42 | id: relation.anilist!.toString(),
43 | },
44 | })
45 |
46 | expect(await response.json()).toStrictEqual(relation)
47 | expect(response.status).toBe(200)
48 | expect(response.headers.get("content-type")).toContain("application/json")
49 | })
50 |
51 | it("returns null when id doesn't exist", async () => {
52 | const response = await testClient(app).api.ids.$get({
53 | query: {
54 | source: Source.Kitsu,
55 | id: "404",
56 | },
57 | })
58 |
59 | await expect(response.json()).resolves.toBe(null)
60 | expect(response.status).toBe(200)
61 | expect(response.headers.get("content-type")).toContain("application/json")
62 | })
63 |
64 | it("can return a partial response", async () => {
65 | const relation: Relation = {
66 | anidb: 1337,
67 | anilist: 1337,
68 | // TODO
69 | myanimelist: null!,
70 | kitsu: null!,
71 | }
72 | await knex.insert(relation).into("relations")
73 |
74 | const response = await testClient(app).api.ids.$get({
75 | query: {
76 | source: Source.AniList,
77 | id: relation.anilist!.toString(),
78 | },
79 | })
80 |
81 | await expect(response.json()).resolves.toStrictEqual(relation)
82 | expect(response.status).toBe(200)
83 | expect(response.headers.get("content-type")).toContain("application/json")
84 | })
85 | })
86 |
87 | describe("json body", () => {
88 | describe("object input", () => {
89 | it("gET fails with json body", async () => {
90 | const relations = await createRelations(4)
91 |
92 | const response = await testClient(app).api.ids.$get({
93 | // @ts-expect-error: We want to make an invalid request
94 | json: {
95 | [Source.AniDB]: relations[0].anidb,
96 | },
97 | })
98 |
99 | await expect(response.json()).resolves.toMatchSnapshot()
100 | expect(response.status).toBe(400)
101 | expect(response.headers.get("content-type")).toContain("application/json")
102 | })
103 |
104 | it("fetches a single relation", async () => {
105 | const relations = await createRelations(4)
106 |
107 | const response = await testClient(app).api.ids.$post({
108 | json: {
109 | [Source.AniDB]: relations[0].anidb,
110 | },
111 | })
112 |
113 | await expect(response.json()).resolves.toStrictEqual(relations[0])
114 | expect(response.status).toBe(200)
115 | expect(response.headers.get("content-type")).toContain("application/json")
116 | })
117 |
118 | it("errors correctly on an empty object", async () => {
119 | await createRelations(4)
120 |
121 | const response = await testClient(app).api.ids.$post({
122 | json: {},
123 | })
124 |
125 | await expect(response.json()).resolves.toMatchSnapshot()
126 | expect(response.status).toBe(400)
127 | expect(response.headers.get("content-type")).toContain("application/json")
128 | })
129 |
130 | it("returns null if not found", async () => {
131 | await createRelations(4)
132 |
133 | const response = await testClient(app).api.ids.$post({
134 | json: { anidb: 100_000 },
135 | })
136 |
137 | await expect(response.json()).resolves.toBe(null)
138 | expect(response.status).toBe(200)
139 | expect(response.headers.get("content-type")).toContain("application/json")
140 | })
141 |
142 | it("can return a partial response", async () => {
143 | const relation: Relation = {
144 | anidb: 1337,
145 | anilist: 1337,
146 | myanimelist: null as never,
147 | kitsu: null as never,
148 | }
149 | await knex.insert(relation).into("relations")
150 |
151 | const response = await testClient(app).api.ids.$post({
152 | json: { anilist: 1337 },
153 | })
154 |
155 | await expect(response.json()).resolves.toStrictEqual(relation)
156 | expect(response.status).toBe(200)
157 | expect(response.headers.get("content-type")).toContain("application/json")
158 | })
159 | })
160 |
161 | describe("array input", () => {
162 | it("fetches relations correctly", async () => {
163 | const relations = await createRelations(4)
164 |
165 | const body = [
166 | { [Source.AniDB]: relations[0].anidb },
167 | { [Source.AniList]: 1000 },
168 | { [Source.Kitsu]: relations[2].kitsu },
169 | ]
170 |
171 | const result = [relations[0], null, relations[2]]
172 |
173 | const response = await testClient(app).api.ids.$post({
174 | json: body,
175 | })
176 |
177 | await expect(response.json()).resolves.toStrictEqual(result)
178 | expect(response.status).toBe(200)
179 | expect(response.headers.get("content-type")).toContain("application/json")
180 | })
181 |
182 | it("responds correctly on no finds", async () => {
183 | const body = [{ [Source.AniList]: 1000 }, { [Source.Kitsu]: 1000 }]
184 |
185 | const result = [null, null]
186 |
187 | const response = await testClient(app).api.ids.$post({
188 | json: body,
189 | })
190 |
191 | await expect(response.json()).resolves.toStrictEqual(result)
192 | expect(response.status).toBe(200)
193 | expect(response.headers.get("content-type")).toContain("application/json")
194 | })
195 |
196 | it("requires at least one source", async () => {
197 | const body = [{}]
198 |
199 | const response = await testClient(app).api.ids.$post({
200 | json: body,
201 | })
202 |
203 | await expect(response.json()).resolves.toMatchSnapshot()
204 | expect(response.status).toBe(400)
205 | expect(response.headers.get("content-type")).toContain("application/json")
206 | })
207 | })
208 | })
209 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/handler.ts:
--------------------------------------------------------------------------------
1 | import { sValidator } from "@hono/standard-validator"
2 | import { Hono } from "hono"
3 | import type { InferOutput } from "valibot"
4 |
5 | import { knex, type OldRelation, type Relation, type Source } from "../../../db.ts"
6 | import { cacheReply, CacheTimes, validationHook } from "../../../utils.ts"
7 |
8 | import { bodyInputSchema } from "./schemas/json-body.ts"
9 | import { queryInputSchema } from "./schemas/query-params.ts"
10 |
11 | export const v1Routes = new Hono()
12 | .get("/ids", sValidator("query", queryInputSchema, validationHook), async (c) => {
13 | const query = c.req.query()
14 |
15 | const row = (await knex
16 | .select(["anidb", "anilist", "myanimelist", "kitsu"])
17 | .where({ [query.source]: query.id })
18 | .from("relations")
19 | .first()) as Relation | undefined
20 |
21 | cacheReply(c.res, CacheTimes.SIX_HOURS)
22 |
23 | return c.json((row as OldRelation) ?? null)
24 | })
25 | .post("/ids", sValidator("json", bodyInputSchema, validationHook), async (c) => {
26 | const input = await c.req.json>()
27 |
28 | if (!Array.isArray(input)) {
29 | const relation = (await knex
30 | .select(["anidb", "anilist", "myanimelist", "kitsu"])
31 | .where(input)
32 | .from("relations")
33 | .first()) as Relation | undefined
34 |
35 | return c.json(relation ?? null)
36 | }
37 |
38 | let relations: Array = []
39 |
40 | // Get relations
41 | relations = await knex
42 | .select(["anidb", "anilist", "myanimelist", "kitsu"])
43 | .where(function () {
44 | for (const item of input) this.orWhere(item)
45 | })
46 | .from("relations")
47 |
48 | // Map them against the input, so we get results like [{item}, null, {item}]
49 | relations = input.map((item) => {
50 | const realItem = Object.entries(item)[0] as [Source, number]
51 |
52 | return relations.find((relation) => relation![realItem[0]] === realItem[1]) ?? null
53 | })
54 |
55 | return c.json(relations)
56 | })
57 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/schemas/json-body.test.ts:
--------------------------------------------------------------------------------
1 | import type { JsonValue } from "type-fest"
2 | import { safeParse } from "valibot"
3 | import { describe, expect, it } from "vitest"
4 |
5 | import type { Relation } from "../../../../db.ts"
6 |
7 | import { bodyInputSchema } from "./json-body.ts"
8 |
9 | type Case = [V, boolean]
10 | type Cases = Array>
11 |
12 | const okCases = [
13 | [{ anilist: 1337 }, true],
14 | [{ anidb: 1337 }, true],
15 | [{ anidb: 1337, anilist: 1337 }, true],
16 | [{ anidb: 1337, anilist: 1337, myanimelist: 1337, kitsu: 1337 }, true],
17 | ] satisfies Cases
18 |
19 | const badCases = [
20 | // No source
21 | [{}, false],
22 | // Invalid ID (negative)
23 | [{ anilist: -1 }, false],
24 | // Invalid ID (not integer)
25 | [{ anilist: 1.5 }, false],
26 | [{ anidb: 1.5 }, false],
27 | // Invalid source
28 | [{ aniDb: 1337 }, false],
29 | [{ aniList: 1337 }, false],
30 | [{ anidb: 1337, test: 123 }, false],
31 | ] satisfies Cases
32 |
33 | const mapToSingularArrayInput = (cases: Cases): Cases =>
34 | cases.map(([input, expected]) => [[input], expected])
35 |
36 | describe("schema", () => {
37 | const inputs = [
38 | ...okCases,
39 | ...badCases,
40 | [[], false],
41 | ...mapToSingularArrayInput(okCases),
42 | ...mapToSingularArrayInput(badCases),
43 | ] satisfies Cases
44 |
45 | it.each(inputs)("%o = %s", (input, expected) => {
46 | const result = safeParse(bodyInputSchema, input)
47 |
48 | if (expected) {
49 | expect(result.issues?.[0]).not.toBeDefined()
50 | } else {
51 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1)
52 | }
53 | })
54 | })
55 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/schemas/json-body.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import type { Source } from "../../../../db.ts"
4 | import { numberIdSchema, oldSourceSchema } from "../../../../shared-schemas.ts"
5 |
6 | export const singularItemInputSchema = v.pipe(
7 | v.record(oldSourceSchema, numberIdSchema),
8 | v.check((data) => Object.keys(data).length > 0, "At least one source is required."),
9 | )
10 |
11 | const arrayInputSchema = v.pipe(
12 | v.array(singularItemInputSchema),
13 | v.check((data) => data.length > 0, "At least one source is required."),
14 | )
15 |
16 | export const bodyInputSchema = v.union([singularItemInputSchema, arrayInputSchema])
17 |
18 | type BodyItem = {
19 | [key in Source]?: number
20 | }
21 |
22 | export type BodyQuery = BodyItem | BodyItem[]
23 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/schemas/query-params.test.ts:
--------------------------------------------------------------------------------
1 | import type { JsonValue } from "type-fest"
2 | import { safeParse } from "valibot"
3 | import { describe, expect, it } from "vitest"
4 |
5 | import { Source } from "../../../../db.ts"
6 |
7 | import { queryInputSchema } from "./query-params.ts"
8 |
9 | type Case = [JsonValue, boolean]
10 | type Cases = Case[]
11 |
12 | const okCases: Cases = [
13 | [{ source: Source.AniList, id: 1337 }, true],
14 | [{ source: Source.AniDB, id: 1337 }, true],
15 | [{ source: Source.MAL, id: 1337 }, true],
16 | [{ source: Source.Kitsu, id: 1337 }, true],
17 | [{ source: Source.Kitsu, id: 133_700 }, true],
18 | ]
19 |
20 | const badCases: Cases = [
21 | [{}, false],
22 | [{ id: 1337 }, false],
23 | [{ source: Source.AniList }, false],
24 | [{ source: Source.AniList, id: null }, false],
25 | [{ source: Source.AniList, id: -1234 }, false],
26 | [{ source: Source.AniList, id: 50_000_001 }, false],
27 | ]
28 |
29 | describe("schema", () => {
30 | const inputs: Cases = [...okCases, ...badCases]
31 |
32 | it.each(inputs)("%o = %s", (input, expected) => {
33 | const result = safeParse(queryInputSchema, input)
34 |
35 | if (expected) {
36 | expect(result.issues?.[0]).not.toBeDefined()
37 | } else {
38 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1)
39 | }
40 | })
41 | })
42 |
--------------------------------------------------------------------------------
/src/routes/v1/ids/schemas/query-params.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import { numberIdSchema, oldSourceSchema } from "../../../../shared-schemas.ts"
4 |
5 | export const queryInputSchema = v.pipe(
6 | v.object({
7 | source: oldSourceSchema,
8 | id: numberIdSchema,
9 | }),
10 | v.check((data) => Object.keys(data).length > 0, "At least one source is required."),
11 | )
12 |
13 | export type QueryParamQuery = v.InferOutput
14 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/__snapshots__/handler.test.ts.snap:
--------------------------------------------------------------------------------
1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2 |
3 | exports[`json body > array input > requires at least one source 1`] = `
4 | {
5 | "code": "FST_ERR_VALIDATION",
6 | "details": {
7 | "0": [
8 | "At least one ID must be provided",
9 | ],
10 | },
11 | "error": "Bad Request",
12 | "message": "Validation error",
13 | "statusCode": 400,
14 | }
15 | `;
16 |
17 | exports[`json body > object input > errors correctly on an empty object 1`] = `
18 | {
19 | "code": "FST_ERR_VALIDATION",
20 | "details": {
21 | "$": [
22 | "At least one ID must be provided",
23 | ],
24 | },
25 | "error": "Bad Request",
26 | "message": "Validation error",
27 | "statusCode": 400,
28 | }
29 | `;
30 |
31 | exports[`json body > object input > gET fails with json body 1`] = `
32 | {
33 | "code": "FST_ERR_VALIDATION",
34 | "details": {
35 | "$": [
36 | "Invalid type: Expected Object but received Object",
37 | ],
38 | },
39 | "error": "Bad Request",
40 | "message": "Validation error",
41 | "statusCode": 400,
42 | }
43 | `;
44 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/handler.test.ts:
--------------------------------------------------------------------------------
1 | import { testClient } from "hono/testing"
2 | import { afterAll, afterEach, describe, expect, it } from "vitest"
3 |
4 | import { createApp } from "../../../app.ts"
5 | import { knex, type Relation, Source } from "../../../db.ts"
6 | import { testIncludeQueryParam } from "../include.test-utils.ts"
7 |
8 | let id = 1
9 | const createRelations = async (
10 | amount: N,
11 | ): Promise => {
12 | const relations = Array.from({ length: amount }).map(() => ({
13 | anidb: id++,
14 | anilist: id++,
15 | "anime-planet": `${id++}`,
16 | anisearch: id++,
17 | imdb: `tt${id++}`,
18 | kitsu: id++,
19 | livechart: id++,
20 | "notify-moe": `${id++}`,
21 | themoviedb: id++,
22 | thetvdb: id++,
23 | myanimelist: id++,
24 | }))
25 |
26 | await knex.insert(relations).into("relations")
27 |
28 | if (amount === 1) {
29 | return relations[0] as never
30 | }
31 |
32 | return relations as never
33 | }
34 |
35 | const app = createApp()
36 |
37 | afterEach(() => knex.delete().from("relations"))
38 |
39 | afterAll(async () => {
40 | await knex.destroy()
41 | })
42 |
43 | describe("query params", () => {
44 | it("fetches relation correctly", async () => {
45 | const relation = await createRelations(1)
46 |
47 | const response = await testClient(app).api.v2.ids.$get({
48 | query: {
49 | source: Source.AniList,
50 | id: relation.anilist!.toString(),
51 | },
52 | })
53 |
54 | expect(await response.json()).toStrictEqual(relation)
55 | expect(response.status).toBe(200)
56 | expect(response.headers.get("content-type")).toContain("application/json")
57 | })
58 |
59 | it("returns null when id doesn't exist", async () => {
60 | const response = await testClient(app).api.v2.ids.$get({
61 | query: {
62 | source: Source.Kitsu,
63 | id: "404" as never,
64 | },
65 | })
66 |
67 | expect(await response.json()).toStrictEqual(null)
68 | expect(response.status).toBe(200)
69 | expect(response.headers.get("content-type")).toContain("application/json")
70 | })
71 |
72 | it("can return a partial response", async () => {
73 | const relation: Relation = {
74 | anidb: 1337,
75 | anilist: 1337,
76 | "anime-planet": null!,
77 | anisearch: null!,
78 | imdb: null!,
79 | kitsu: null!,
80 | livechart: null!,
81 | "notify-moe": null!,
82 | themoviedb: null!,
83 | thetvdb: null!,
84 | myanimelist: null!,
85 | }
86 | await knex.insert(relation).into("relations")
87 |
88 | const response = await testClient(app).api.v2.ids.$get({
89 | query: {
90 | source: Source.AniList,
91 | id: relation.anilist!.toString(),
92 | },
93 | })
94 |
95 | expect(await response.json()).toStrictEqual(relation)
96 | expect(response.status).toBe(200)
97 | expect(response.headers.get("content-type")).toContain("application/json")
98 | })
99 | })
100 |
101 | describe("json body", () => {
102 | describe("object input", () => {
103 | it("gET fails with json body", async () => {
104 | const relations = await createRelations(4)
105 |
106 | const response = await testClient(app).api.v2.ids.$get({
107 | // @ts-expect-error: We want to make an invalid request
108 | json: {
109 | [Source.AniDB]: relations[0].anidb,
110 | },
111 | })
112 |
113 | await expect(response.json()).resolves.toMatchSnapshot()
114 | expect(response.status).toBe(400)
115 | expect(response.headers.get("content-type")).toContain("application/json")
116 | })
117 |
118 | it("fetches a single relation", async () => {
119 | const relations = await createRelations(4)
120 |
121 | const response = await testClient(app).api.v2.ids.$post({
122 | query: {},
123 | json: {
124 | [Source.AniDB]: relations[0].anidb,
125 | },
126 | })
127 |
128 | await expect(response.json()).resolves.toStrictEqual(relations[0])
129 | expect(response.status).toBe(200)
130 | expect(response.headers.get("content-type")).toContain("application/json")
131 | })
132 |
133 | it("errors correctly on an empty object", async () => {
134 | await createRelations(4)
135 |
136 | const response = await testClient(app).api.v2.ids.$post({
137 | query: {},
138 | json: {},
139 | })
140 |
141 | await expect(response.json()).resolves.toMatchSnapshot()
142 | expect(response.status).toBe(400)
143 | expect(response.headers.get("content-type")).toContain("application/json")
144 | })
145 |
146 | it("returns null if not found", async () => {
147 | await createRelations(4)
148 |
149 | const response = await testClient(app).api.v2.ids.$post({
150 | query: {},
151 | json: { anidb: 100_000 },
152 | })
153 |
154 | await expect(response.json()).resolves.toBe(null)
155 | expect(response.status).toBe(200)
156 | expect(response.headers.get("content-type")).toContain("application/json")
157 | })
158 |
159 | it("can return a partial response", async () => {
160 | const relation: Relation = {
161 | anidb: 1337,
162 | anilist: 1337,
163 | "anime-planet": null!,
164 | anisearch: null!,
165 | imdb: null!,
166 | kitsu: null!,
167 | livechart: null!,
168 | "notify-moe": null!,
169 | themoviedb: null!,
170 | thetvdb: null!,
171 | myanimelist: null!,
172 | }
173 | await knex.insert(relation).into("relations")
174 |
175 | const response = await testClient(app).api.v2.ids.$post({
176 | query: {},
177 | json: { anilist: 1337 },
178 | })
179 |
180 | await expect(response.json()).resolves.toStrictEqual(relation)
181 | expect(response.status).toBe(200)
182 | expect(response.headers.get("content-type")).toContain("application/json")
183 | })
184 | })
185 |
186 | describe("array input", () => {
187 | it("fetches relations correctly", async () => {
188 | const relations = await createRelations(4)
189 |
190 | const body = [
191 | { [Source.AniDB]: relations[0].anidb },
192 | { [Source.AniList]: 1000 },
193 | { [Source.Kitsu]: relations[2].kitsu },
194 | ]
195 |
196 | const result = [relations[0], null, relations[2]]
197 |
198 | const response = await testClient(app).api.v2.ids.$post({
199 | query: {},
200 | json: body,
201 | })
202 |
203 | await expect(response.json()).resolves.toStrictEqual(result)
204 | expect(response.status).toBe(200)
205 | expect(response.headers.get("content-type")).toContain("application/json")
206 | })
207 |
208 | it("responds correctly on no finds", async () => {
209 | const body = [{ [Source.AniList]: 1000 }, { [Source.Kitsu]: 1000 }]
210 |
211 | const result = [null, null]
212 |
213 | const response = await testClient(app).api.v2.ids.$post({
214 | query: {},
215 | json: body,
216 | })
217 |
218 | await expect(response.json()).resolves.toStrictEqual(result)
219 | expect(response.status).toBe(200)
220 | expect(response.headers.get("content-type")).toContain("application/json")
221 | })
222 |
223 | it("requires at least one source", async () => {
224 | const body = [{}]
225 |
226 | const response = await testClient(app).api.v2.ids.$post({
227 | query: {},
228 | json: body,
229 | })
230 |
231 | await expect(response.json()).resolves.toMatchSnapshot()
232 | expect(response.status).toBe(400)
233 | expect(response.headers.get("content-type")).toContain("application/json")
234 | })
235 | })
236 | })
237 |
238 | testIncludeQueryParam(app, "/api/v2/ids")
239 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/handler.ts:
--------------------------------------------------------------------------------
1 | import { sValidator } from "@hono/standard-validator"
2 | import { Hono } from "hono"
3 | import type { InferOutput } from "valibot"
4 |
5 | import { knex, type Relation, type Source } from "../../../db.ts"
6 | import { cacheReply, CacheTimes, validationHook } from "../../../utils.ts"
7 | import { buildSelectFromInclude, includeSchema } from "../include.ts"
8 |
9 | import { bodyInputSchema } from "./schemas/json-body.ts"
10 | import { queryInputSchema } from "./schemas/query-params.ts"
11 |
12 | export const v2Routes = new Hono()
13 | .get("/ids", sValidator("query", queryInputSchema, validationHook), async (c) => {
14 | const query = c.req.query()
15 | const data = (await knex
16 | .select(buildSelectFromInclude(query.include))
17 | .where({ [query.source]: query.id })
18 | .from("relations")
19 | .first()) as Relation | undefined
20 |
21 | cacheReply(c.res, CacheTimes.SIX_HOURS)
22 |
23 | return c.json((data as Relation | null) ?? null)
24 | })
25 | .post(
26 | "/ids",
27 | sValidator("json", bodyInputSchema, validationHook),
28 | sValidator("query", includeSchema, validationHook),
29 | async (c) => {
30 | const input = await c.req.json>()
31 | const query = c.req.query()
32 |
33 | const select = buildSelectFromInclude(query.include)
34 |
35 | if (!Array.isArray(input)) {
36 | const relation = (await knex
37 | .select(select)
38 | .where(input)
39 | .from("relations")
40 | .first()) as Relation | undefined
41 |
42 | return c.json(relation ?? null)
43 | }
44 |
45 | let relations: Array = []
46 |
47 | // Get relations
48 | relations = await knex
49 | .select(select)
50 | .where(function () {
51 | for (const item of input) void this.orWhere(item)
52 | })
53 | .from("relations")
54 |
55 | // Map them against the input, so we get results like [{item}, null, {item}]
56 | relations = input.map((item) => {
57 | const realItem = Object.entries(item)[0] as [Source, number]
58 |
59 | return (
60 | relations.find((relation) => relation![realItem[0]] === realItem[1]) ?? null
61 | )
62 | })
63 |
64 | return c.json(relations)
65 | },
66 | )
67 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/schemas/common.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | // Does not include `thetvdb` due to the one-to-many issue
4 | export const numberIdSourceSchema = v.picklist([
5 | "anilist",
6 | "anidb",
7 | "anisearch",
8 | "kitsu",
9 | "livechart",
10 | "myanimelist",
11 | ])
12 |
13 | export const stringIdSourceSchema = v.picklist(["anime-planet", "notify-moe"])
14 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/schemas/json-body.test.ts:
--------------------------------------------------------------------------------
1 | import type { JsonValue } from "type-fest"
2 | import { safeParse } from "valibot"
3 | import { describe, expect, it } from "vitest"
4 |
5 | import type { Relation } from "../../../../db.ts"
6 |
7 | import { bodyInputSchema } from "./json-body.ts"
8 |
9 | type Case = [V, boolean]
10 | type Cases = Array>
11 |
12 | const okCases = [
13 | [{ anilist: 1337 }, true],
14 | [{ anidb: 1337 }, true],
15 | [{ anidb: 1337, anilist: 1337 }, true],
16 | [{ anidb: 1337, anilist: 1337, myanimelist: 1337, kitsu: 1337 }, true],
17 | [
18 | {
19 | anidb: 1337,
20 | anilist: 1337,
21 | "anime-planet": "1337",
22 | anisearch: 1337,
23 | kitsu: 1337,
24 | livechart: 1337,
25 | "notify-moe": "1337",
26 | myanimelist: 1337,
27 | },
28 | true,
29 | ],
30 | ] satisfies Cases
31 |
32 | const badCases = [
33 | // No source
34 | [{}, false],
35 | // Invalid ID (negative)
36 | [{ anilist: -1 }, false],
37 | // Invalid ID (not integer)
38 | [{ anilist: 1.5 }, false],
39 | [{ anidb: 1.5 }, false],
40 | // Invalid source
41 | [{ aniDb: 1337 }, false],
42 | [{ aniList: 1337 }, false],
43 | [{ anidb: 1337, test: 123 }, false],
44 | // Invalid IMDB IDs
45 | [{ imdb: "1337" }, false],
46 | // No filtering by special dbs in this endpoint
47 | [{ imdb: 1337 }, false],
48 | [{ themoviedb: 1337 }, false],
49 | [{ thetvdb: 1337 }, false],
50 | ] satisfies Cases
51 |
52 | const mapToSingularArrayInput = (cases: Cases): Cases =>
53 | cases.map(([input, expected]) => [[input], expected])
54 |
55 | describe("schema", () => {
56 | const inputs = [
57 | [[], false],
58 | ...okCases,
59 | ...badCases,
60 | ...mapToSingularArrayInput(okCases),
61 | ...mapToSingularArrayInput(badCases),
62 | ] satisfies Cases
63 |
64 | it.each(inputs)("%o = %s", (input, expected) => {
65 | const result = safeParse(bodyInputSchema, input)
66 |
67 | if (expected) {
68 | expect(result.issues?.[0]).not.toBeDefined()
69 | } else {
70 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1)
71 | }
72 | })
73 | })
74 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/schemas/json-body.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import type { Relation } from "../../../../db.ts"
4 | import { numberIdSchema, stringIdSchema } from "../../../../shared-schemas.ts"
5 |
6 | // Does not include `thetvdb` due to the one-to-many issue
7 | type BodyItem = Omit
8 | export const singularItemInputSchema = v.pipe(
9 | v.partial(
10 | v.strictObject({
11 | anidb: numberIdSchema,
12 | anilist: numberIdSchema,
13 | "anime-planet": stringIdSchema,
14 | anisearch: numberIdSchema,
15 | kitsu: numberIdSchema,
16 | livechart: numberIdSchema,
17 | "notify-moe": stringIdSchema,
18 | myanimelist: numberIdSchema,
19 | }),
20 | ),
21 | v.check(
22 | (value) => Object.values(value).some((id) => id != null),
23 | "At least one ID must be provided",
24 | ),
25 | )
26 |
27 | export type BodyQuery = BodyItem | BodyItem[]
28 |
29 | const arrayInputSchema = v.pipe(
30 | v.array(singularItemInputSchema),
31 | v.minLength(1),
32 | v.maxLength(100),
33 | )
34 |
35 | export const bodyInputSchema = v.union([singularItemInputSchema, arrayInputSchema])
36 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/schemas/query-params.test.ts:
--------------------------------------------------------------------------------
1 | import type { JsonValue } from "type-fest"
2 | import { safeParse } from "valibot"
3 | import { describe, expect, it } from "vitest"
4 |
5 | import { Source } from "../../../../db.ts"
6 |
7 | import { queryInputSchema, type QueryParamQuery } from "./query-params.ts"
8 |
9 | type Case = [V, boolean]
10 | type Cases = Array>
11 |
12 | const okCases = [
13 | [{ source: Source.AniList, id: 1337 }, true],
14 | [{ source: Source.AniDB, id: 1337 }, true],
15 | [{ source: Source.MAL, id: 1337 }, true],
16 | [{ source: Source.Kitsu, id: 1337 }, true],
17 | [{ source: Source.Kitsu, id: 133_700 }, true],
18 | [{ source: Source.AnimePlanet, id: "1337" }, true],
19 | ] satisfies Cases
20 |
21 | const badCases: Cases = [
22 | [{}, false],
23 | [{ id: 1337 }, false],
24 | [{ source: Source.AniList }, false],
25 | [{ source: Source.AniList, id: null }, false],
26 | [{ source: Source.AniList, id: -1234 }, false],
27 | [{ source: Source.AniList, id: 50_000_001 }, false],
28 | [{ source: Source.IMDB, id: "tt1337" }, false],
29 | [{ source: Source.TheTVDB, id: 1337 }, false],
30 | ]
31 |
32 | describe("schema", () => {
33 | const inputs = [...okCases, ...badCases] satisfies Cases
34 |
35 | it.each(inputs)("%o = %s", (input, expected) => {
36 | const result = safeParse(queryInputSchema, input)
37 |
38 | if (expected) {
39 | expect(result.issues?.[0]).not.toBeDefined()
40 | } else {
41 | expect(result.issues?.length ?? 0).toBeGreaterThanOrEqual(1)
42 | }
43 | })
44 | })
45 |
--------------------------------------------------------------------------------
/src/routes/v2/ids/schemas/query-params.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import { numberIdSchema, stringIdSchema } from "../../../../shared-schemas.ts"
4 | import { includeSchema } from "../../include.ts"
5 |
6 | import { numberIdSourceSchema, stringIdSourceSchema } from "./common.ts"
7 |
8 | export const queryInputSchema = v.intersect([
9 | v.union([
10 | v.object({
11 | source: numberIdSourceSchema,
12 | id: numberIdSchema,
13 | }),
14 | v.object({
15 | source: stringIdSourceSchema,
16 | id: stringIdSchema,
17 | }),
18 | ]),
19 | includeSchema,
20 | ])
21 |
22 | export type QueryParamQuery = v.InferOutput
23 |
--------------------------------------------------------------------------------
/src/routes/v2/include.test-utils.ts:
--------------------------------------------------------------------------------
1 | import type { Hono } from "hono"
2 | import { describe, expect, test } from "vitest"
3 |
4 | import { knex, Source } from "../../db.ts"
5 |
6 | export const testIncludeQueryParam = (
7 | app: Hono,
8 | path: string,
9 | source = Source.AniList,
10 | ) => {
11 | const arrayify = (data: T) => (source !== Source.AniList ? [data] : data)
12 | const prefixify = (source: S, input: T) =>
13 | source === "imdb" ? (`tt${input}` as const) : input
14 |
15 | describe("?include", () => {
16 | test("single source", async () => {
17 | await knex
18 | .insert({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" })
19 | .into("relations")
20 |
21 | const query = new URLSearchParams({
22 | source,
23 | id: prefixify(source, "1337"),
24 | include: source,
25 | })
26 | const response = await app.fetch(
27 | new Request(`http://localhost${path}?${query.toString()}`),
28 | )
29 |
30 | await expect(response.json()).resolves.toStrictEqual(
31 | arrayify({ [source]: prefixify(source, 1337) }),
32 | )
33 | expect(response.status).toBe(200)
34 | expect(response.headers.get("content-type")).toContain("application/json")
35 | })
36 |
37 | test("multiple sources (anilist,thetvdb,themoviedb)", async () => {
38 | await knex
39 | .insert({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" })
40 | .into("relations")
41 |
42 | const query = new URLSearchParams({
43 | source,
44 | id: prefixify(source, "1337"),
45 | include: [Source.AniList, Source.TheTVDB, Source.TheMovieDB, Source.IMDB].join(
46 | ",",
47 | ),
48 | })
49 | const response = await app.fetch(
50 | new Request(`http://localhost${path}?${query.toString()}`),
51 | )
52 |
53 | await expect(response.json()).resolves.toStrictEqual(
54 | arrayify({ anilist: 1337, thetvdb: 1337, themoviedb: 1337, imdb: "tt1337" }),
55 | )
56 | expect(response.status).toBe(200)
57 | expect(response.headers.get("content-type")).toContain("application/json")
58 | })
59 |
60 | test("all the sources", async () => {
61 | await knex
62 | .insert({ anilist: 1337, [source]: prefixify(source, 1337) })
63 | .into("relations")
64 |
65 | const query = new URLSearchParams({
66 | source,
67 | id: prefixify(source, "1337"),
68 | include: Object.values(Source).join(","),
69 | })
70 | const response = await app.fetch(
71 | new Request(`http://localhost${path}?${query.toString()}`),
72 | )
73 |
74 | const expectedResult: Record = {
75 | anidb: null,
76 | anilist: 1337,
77 | "anime-planet": null,
78 | anisearch: null,
79 | imdb: null,
80 | kitsu: null,
81 | livechart: null,
82 | "notify-moe": null,
83 | themoviedb: null,
84 | thetvdb: null,
85 | myanimelist: null,
86 | }
87 | expectedResult[source] = prefixify(source, 1337) as never
88 |
89 | await expect(response.json()).resolves.toStrictEqual(arrayify(expectedResult))
90 | expect(response.status).toBe(200)
91 | expect(response.headers.get("content-type")).toContain("application/json")
92 | })
93 | })
94 | }
95 |
--------------------------------------------------------------------------------
/src/routes/v2/include.test.ts:
--------------------------------------------------------------------------------
1 | import { sValidator } from "@hono/standard-validator"
2 | import type { Context } from "hono"
3 | import { Hono } from "hono"
4 | import { testClient } from "hono/testing"
5 | import { afterAll, beforeEach, describe, expect, it, vi } from "vitest"
6 |
7 | import { knex, Source } from "../../db.ts"
8 | import { validationHook } from "../../utils.ts"
9 |
10 | import { includeSchema } from "./include.ts"
11 |
12 | const handlerFn = vi.fn((c: Context) => c.json({ message: "ok" }))
13 | const app = new Hono().get(
14 | "/test",
15 | sValidator("query", includeSchema, validationHook),
16 | handlerFn,
17 | )
18 |
19 | beforeEach(async () => {
20 | await knex.delete().from("relations")
21 | })
22 |
23 | afterAll(async () => {
24 | await knex.destroy()
25 | })
26 |
27 | describe("schema", () => {
28 | it("single source (anilist)", async () => {
29 | const response = await testClient(app).test.$get({
30 | query: {
31 | include: Source.AniList,
32 | },
33 | })
34 |
35 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" })
36 | expect(response.status).toBe(200)
37 | expect(response.headers.get("content-type")).toContain("application/json")
38 | })
39 |
40 | it("multiple sources (anilist,thetvdb)", async () => {
41 | const response = await testClient(app).test.$get({
42 | query: {
43 | include: [Source.AniList, Source.TheTVDB].join(","),
44 | },
45 | })
46 |
47 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" })
48 | expect(response.status).toBe(200)
49 | expect(response.headers.get("content-type")).toContain("application/json")
50 | })
51 |
52 | it("all the sources", async () => {
53 | const response = await testClient(app).test.$get({
54 | query: {
55 | include: Object.values(Source).join(","),
56 | },
57 | })
58 |
59 | await expect(response.json()).resolves.toStrictEqual({ message: "ok" })
60 | expect(response.status).toBe(200)
61 | expect(response.headers.get("content-type")).toContain("application/json")
62 | })
63 | })
64 |
--------------------------------------------------------------------------------
/src/routes/v2/include.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import { Source } from "../../db.ts"
4 |
5 | export const includeSchema = v.object({
6 | include: v.optional(
7 | v.pipe(
8 | v.string(),
9 | v.regex(/^[\-a-z,]+$/, "Invalid `include` query"),
10 | v.minLength(1),
11 | v.maxLength(100),
12 | ),
13 | ),
14 | })
15 |
16 | export type IncludeQuery = v.InferOutput
17 |
18 | const sources = Object.values(Source)
19 | export const buildSelectFromInclude = (include: string | null | undefined) => {
20 | if (include == null) {
21 | return "*"
22 | }
23 |
24 | return include.split(",").filter((inclusion) => sources.includes(inclusion as Source))
25 | }
26 |
--------------------------------------------------------------------------------
/src/routes/v2/special/handler.test.ts:
--------------------------------------------------------------------------------
1 | import { testClient } from "hono/testing"
2 | import { afterAll, beforeEach, describe, expect, it } from "vitest"
3 |
4 | import { createApp } from "../../../app.ts"
5 | import { knex, type Relation, Source } from "../../../db.ts"
6 | import { testIncludeQueryParam } from "../include.test-utils.ts"
7 |
8 | let id = 1
9 | const createRelations = async (
10 | amount: N,
11 | specialId?: number,
12 | ): Promise => {
13 | const relations = Array.from({ length: amount }).map(() => ({
14 | anidb: id++,
15 | anilist: id++,
16 | "anime-planet": `${id++}`,
17 | anisearch: id++,
18 | imdb: `tt${specialId ?? id++}`,
19 | kitsu: id++,
20 | livechart: id++,
21 | "notify-moe": `${id++}`,
22 | themoviedb: specialId ?? id++,
23 | thetvdb: specialId ?? id++,
24 | myanimelist: id++,
25 | }))
26 |
27 | await knex.insert(relations).into("relations")
28 |
29 | if (amount === 1) {
30 | return relations[0] as never
31 | }
32 |
33 | return relations as never
34 | }
35 |
36 | const app = createApp()
37 |
38 | beforeEach(async () => {
39 | await knex.delete().from("relations")
40 | })
41 |
42 | afterAll(async () => {
43 | await knex.destroy()
44 | })
45 |
46 | describe("imdb", () => {
47 | it("fetches relations correctly", async () => {
48 | await createRelations(4, 1336)
49 | const relations = await createRelations(3, 1337)
50 |
51 | const response = await testClient(app).api.v2.imdb.$get({
52 | query: {
53 | id: relations[0].imdb!,
54 | },
55 | })
56 |
57 | await expect(response.json()).resolves.toStrictEqual(relations)
58 | expect(response.status).toBe(200)
59 | expect(response.headers.get("content-type")).toContain("application/json")
60 | })
61 |
62 | it("returns empty array when id doesn't exist", async () => {
63 | const response = await testClient(app).api.v2.imdb.$get({
64 | query: {
65 | id: "tt404",
66 | },
67 | })
68 |
69 | await expect(response.json()).resolves.toStrictEqual([])
70 | expect(response.status).toBe(200)
71 | expect(response.headers.get("content-type")).toContain("application/json")
72 | })
73 |
74 | it("can return a partial response", async () => {
75 | const relation: Relation = {
76 | anidb: 1337,
77 | anilist: 1337,
78 | "anime-planet": null!,
79 | anisearch: null!,
80 | imdb: "tt1337",
81 | kitsu: null!,
82 | livechart: null!,
83 | "notify-moe": null!,
84 | themoviedb: null!,
85 | thetvdb: null!,
86 | myanimelist: null!,
87 | }
88 | await knex.insert(relation).into("relations")
89 |
90 | const response = await testClient(app).api.v2.imdb.$get({
91 | query: {
92 | id: relation.imdb!,
93 | },
94 | })
95 |
96 | await expect(response.json()).resolves.toStrictEqual([relation])
97 | expect(response.status).toBe(200)
98 | expect(response.headers.get("content-type")).toContain("application/json")
99 | })
100 |
101 | testIncludeQueryParam(app, "/api/v2/imdb", Source.IMDB)
102 | })
103 |
104 | describe("thetvdb", () => {
105 | it("fetches relations correctly", async () => {
106 | await createRelations(4, 1336)
107 | const relations = await createRelations(3, 1337)
108 |
109 | const response = await testClient(app).api.v2.thetvdb.$get({
110 | query: {
111 | id: relations[0].thetvdb!.toString(),
112 | },
113 | })
114 |
115 | await expect(response.json()).resolves.toStrictEqual(relations)
116 | expect(response.status).toBe(200)
117 | expect(response.headers.get("content-type")).toContain("application/json")
118 | })
119 |
120 | it("returns empty array when id doesn't exist", async () => {
121 | const response = await testClient(app).api.v2.thetvdb.$get({
122 | query: {
123 | id: (404).toString(),
124 | },
125 | })
126 |
127 | await expect(response.json()).resolves.toStrictEqual([])
128 | expect(response.status).toBe(200)
129 | expect(response.headers.get("content-type")).toContain("application/json")
130 | })
131 |
132 | it("can return a partial response", async () => {
133 | const relation: Relation = {
134 | anidb: 1337,
135 | anilist: 1337,
136 | "anime-planet": null!,
137 | anisearch: null!,
138 | imdb: null!,
139 | kitsu: null!,
140 | livechart: null!,
141 | "notify-moe": null!,
142 | themoviedb: null!,
143 | thetvdb: 1337,
144 | myanimelist: null!,
145 | }
146 | await knex.insert(relation).into("relations")
147 |
148 | const response = await testClient(app).api.v2.thetvdb.$get({
149 | query: {
150 | id: relation.thetvdb!.toString(),
151 | },
152 | })
153 |
154 | await expect(response.json()).resolves.toStrictEqual([relation])
155 | expect(response.status).toBe(200)
156 | expect(response.headers.get("content-type")).toContain("application/json")
157 | })
158 |
159 | testIncludeQueryParam(app, "/api/v2/thetvdb", Source.TheTVDB)
160 | })
161 |
162 | describe("themoviedb", () => {
163 | it("fetches relations correctly", async () => {
164 | await createRelations(4, 1336)
165 | const relations = await createRelations(3, 1337)
166 |
167 | const response = await testClient(app).api.v2.themoviedb.$get({
168 | query: {
169 | id: relations[0].themoviedb!.toString(),
170 | },
171 | })
172 |
173 | await expect(response.json()).resolves.toStrictEqual(relations)
174 | expect(response.status).toBe(200)
175 | expect(response.headers.get("content-type")).toContain("application/json")
176 | })
177 |
178 | it("returns empty array when id doesn't exist", async () => {
179 | const response = await testClient(app).api.v2.themoviedb.$get({
180 | query: {
181 | id: (404).toString(),
182 | },
183 | })
184 |
185 | await expect(response.json()).resolves.toStrictEqual([])
186 | expect(response.status).toBe(200)
187 | expect(response.headers.get("content-type")).toContain("application/json")
188 | })
189 |
190 | it("can return a partial response", async () => {
191 | const relation: Relation = {
192 | anidb: 1337,
193 | anilist: 1337,
194 | "anime-planet": null!,
195 | anisearch: null!,
196 | imdb: null!,
197 | kitsu: null!,
198 | livechart: null!,
199 | "notify-moe": null!,
200 | themoviedb: 1337,
201 | thetvdb: null!,
202 | myanimelist: null!,
203 | }
204 | await knex.insert(relation).into("relations")
205 |
206 | const response = await testClient(app).api.v2.themoviedb.$get({
207 | query: {
208 | id: relation.themoviedb!.toString(),
209 | },
210 | })
211 |
212 | await expect(response.json()).resolves.toStrictEqual([relation])
213 | expect(response.status).toBe(200)
214 | expect(response.headers.get("content-type")).toContain("application/json")
215 | })
216 |
217 | testIncludeQueryParam(app, "/api/v2/themoviedb", Source.TheMovieDB)
218 | })
219 |
--------------------------------------------------------------------------------
/src/routes/v2/special/handler.ts:
--------------------------------------------------------------------------------
1 | import { sValidator } from "@hono/standard-validator"
2 | import { Hono } from "hono"
3 |
4 | import { knex, Source } from "../../../db.ts"
5 | import { cacheReply, CacheTimes, validationHook } from "../../../utils.ts"
6 | import { buildSelectFromInclude } from "../include.ts"
7 |
8 | import { specialImdbInputSchema, specialInputSchema } from "./schemas/special.ts"
9 |
10 | export const specialRoutes = new Hono()
11 | .get(
12 | "/imdb",
13 | sValidator("query", specialImdbInputSchema, validationHook),
14 | async (c) => {
15 | const query = c.req.query()
16 |
17 | const data = await knex
18 | .select(buildSelectFromInclude(query.include))
19 | .where({ [Source.IMDB]: query.id })
20 | .from("relations")
21 |
22 | cacheReply(c.res, CacheTimes.SIX_HOURS)
23 |
24 | return c.json(data)
25 | },
26 | )
27 | .get(
28 | "/themoviedb",
29 | sValidator("query", specialInputSchema, validationHook),
30 | async (c) => {
31 | const query = c.req.query()
32 |
33 | const data = await knex
34 | .select(buildSelectFromInclude(query.include))
35 | .where({ [Source.TheMovieDB]: query.id })
36 | .from("relations")
37 |
38 | cacheReply(c.res, CacheTimes.SIX_HOURS)
39 |
40 | return c.json(data)
41 | },
42 | )
43 | .get("/thetvdb", sValidator("query", specialInputSchema, validationHook), async (c) => {
44 | const query = c.req.query()
45 |
46 | const data = await knex
47 | .select(buildSelectFromInclude(query.include))
48 | .where({ [Source.TheTVDB]: query.id })
49 | .from("relations")
50 |
51 | cacheReply(c.res, CacheTimes.SIX_HOURS)
52 |
53 | return c.json(data)
54 | })
55 |
--------------------------------------------------------------------------------
/src/routes/v2/special/schemas/special.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | import { imdbIdSchema, numberIdSchema } from "../../../../shared-schemas.ts"
4 | import { includeSchema } from "../../include.ts"
5 |
6 | export const specialInputSchema = v.intersect([
7 | v.object({ id: numberIdSchema }),
8 | includeSchema,
9 | ])
10 |
11 | export const specialImdbInputSchema = v.intersect([
12 | v.object({ id: imdbIdSchema }),
13 | includeSchema,
14 | ])
15 |
16 | export type SpecialQuery = v.InferOutput
17 |
--------------------------------------------------------------------------------
/src/shared-schemas.ts:
--------------------------------------------------------------------------------
1 | import * as v from "valibot"
2 |
3 | export const oldSourceSchema = v.picklist(
4 | ["anilist", "anidb", "myanimelist", "kitsu"],
5 | "Invalid source",
6 | )
7 |
8 | export const numberIdSchema = v.pipe(
9 | v.unknown(),
10 | v.transform(Number),
11 | v.integer("Invalid ID"),
12 | v.minValue(1),
13 | v.maxValue(50_000_000),
14 | )
15 |
16 | export const stringIdSchema = v.pipe(
17 | v.string("Invalid ID"),
18 | v.minLength(1),
19 | v.maxLength(150),
20 | )
21 |
22 | export const imdbIdSchema = v.pipe(
23 | v.string("Invalid IMDB ID"),
24 | v.startsWith("tt"),
25 | v.minLength(3),
26 | v.maxLength(50),
27 | )
28 |
--------------------------------------------------------------------------------
/src/shims.d.ts:
--------------------------------------------------------------------------------
1 | declare module "*/knexfile" {
2 | import type { Config } from "knex"
3 |
4 | const config: {
5 | development: Config
6 | production: Config
7 | }
8 |
9 | export = config
10 | }
11 |
--------------------------------------------------------------------------------
/src/update.test.ts:
--------------------------------------------------------------------------------
1 | import { FetchMocker, MockServer } from "mentoss"
2 | import { afterAll, afterEach, beforeEach, expect, it, vi } from "vitest"
3 |
4 | import { knex, type Relation, Source } from "./db.ts"
5 | import {
6 | type AnimeListsSchema,
7 | formatEntry,
8 | removeDuplicates,
9 | updateRelations,
10 | } from "./update.ts"
11 |
12 | // create a new server with the given base URL
13 | const server = new MockServer("https://raw.githubusercontent.com")
14 | const mocker = new FetchMocker({ servers: [server] })
15 |
16 | beforeEach(() => {
17 | mocker.mockGlobal()
18 | })
19 |
20 | afterEach(async () => {
21 | mocker.clearAll()
22 | vi.resetAllMocks()
23 | await knex.delete().from("relations")
24 | })
25 |
26 | afterAll(async () => {
27 | mocker.unmockGlobal()
28 | await Promise.all([knex.destroy()])
29 | })
30 |
31 | it("handles bad values", async () => {
32 | server.get("/Fribb/anime-lists/master/anime-list-full.json", {
33 | status: 200,
34 | body: [
35 | { anidb_id: 1337, themoviedb_id: "unknown" },
36 | { anidb_id: 1338, thetvdb_id: "unknown" as never },
37 | { anidb_id: 1339, imdb_id: "tt1337,tt1338,tt1339" },
38 | { anidb_id: 1340, themoviedb_id: "unknown" },
39 | { anidb_id: 1341, themoviedb_id: 1341 },
40 | ] satisfies AnimeListsSchema,
41 | })
42 |
43 | await updateRelations()
44 |
45 | await expect(
46 | knex.from("relations").select(["anidb", "imdb", "themoviedb", "thetvdb"]),
47 | ).resolves.toMatchInlineSnapshot(`
48 | [
49 | {
50 | "anidb": 1337,
51 | "imdb": null,
52 | "themoviedb": null,
53 | "thetvdb": null,
54 | },
55 | {
56 | "anidb": 1338,
57 | "imdb": null,
58 | "themoviedb": null,
59 | "thetvdb": null,
60 | },
61 | {
62 | "anidb": 1339,
63 | "imdb": null,
64 | "themoviedb": null,
65 | "thetvdb": null,
66 | },
67 | {
68 | "anidb": 1340,
69 | "imdb": null,
70 | "themoviedb": null,
71 | "thetvdb": null,
72 | },
73 | {
74 | "anidb": 1341,
75 | "imdb": null,
76 | "themoviedb": 1341,
77 | "thetvdb": null,
78 | },
79 | ]
80 | `)
81 | })
82 |
83 | it("handles duplicates", async () => {
84 | mocker.unmockGlobal()
85 |
86 | const entries: Relation[] = await fetch(
87 | "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json",
88 | )
89 | .then(async (r) => r.json())
90 | .then((e) => (e as any[]).map(formatEntry))
91 |
92 | // There should be >=5 Konosuba entries
93 | const konosubaEntries = entries.filter(({ themoviedb }) => themoviedb === 65844)
94 | expect(konosubaEntries.length).toBeGreaterThanOrEqual(5)
95 |
96 | const results = removeDuplicates(entries)
97 |
98 | // There should still be 5 Konosuba entries
99 | expect(results.filter(({ themoviedb }) => themoviedb === 65844).length).toBe(
100 | konosubaEntries.length,
101 | )
102 |
103 | const goodSources = [
104 | Source.AniDB,
105 | Source.AniList,
106 | Source.AnimePlanet,
107 | Source.AniSearch,
108 | Source.Kitsu,
109 | Source.LiveChart,
110 | Source.NotifyMoe,
111 | Source.MAL,
112 | ]
113 |
114 | // Check if any sources have duplicate ids
115 | const duplicates = Object.fromEntries(
116 | goodSources.map((source) => {
117 | const groups = Object.groupBy(results, (e) => e[source]?.toString() ?? "undefined")
118 |
119 | return [
120 | source,
121 | Object.fromEntries(
122 | Object.entries(groups)
123 | .filter(([id, g]) => id !== "undefined" && id !== "null" && g!.length > 1)
124 | .map(([id, g]) => [id, g!.length]),
125 | ),
126 | ]
127 | }),
128 | )
129 | for (const goodSource of goodSources) {
130 | expect(duplicates[goodSource], `${goodSource} has duplicates`).toStrictEqual({})
131 | }
132 |
133 | const findEntry = (source: Source, id: number | string) =>
134 | results.find((entry) => entry[source] === id)
135 | expect(findEntry(Source.AniDB, 11261)).toBeDefined()
136 | expect(findEntry(Source.AniDB, 11992)).toBeDefined()
137 | })
138 |
--------------------------------------------------------------------------------
/src/update.ts:
--------------------------------------------------------------------------------
1 | import xior, { type XiorError } from "xior"
2 | import errorRetryPlugin from "xior/plugins/error-retry"
3 |
4 | import { knex, type Relation, Source } from "./db.ts"
5 | import { logger } from "./lib/logger.ts"
6 | import { updateBasedOnManualRules } from "./manual-rules.ts"
7 |
8 | const http = xior.create({ responseType: "json" })
9 | http.plugins.use(errorRetryPlugin({ retryTimes: 5 }))
10 |
11 | const isXiorError = (response: T | XiorError): response is XiorError =>
12 | "stack" in (response as XiorError)
13 |
14 | export type AnimeListsSchema = Array<{
15 | anidb_id?: number
16 | anilist_id?: number
17 | "anime-planet_id"?: string
18 | anisearch_id?: number
19 | imdb_id?: `tt${string}` | ""
20 | kitsu_id?: number
21 | livechart_id?: number
22 | mal_id?: number
23 | "notify.moe_id"?: string
24 | themoviedb_id?: number | "unknown"
25 | thetvdb_id?: number
26 | }>
27 |
28 | const fetchDatabase = async (): Promise => {
29 | const response = await http
30 | .get(
31 | "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json",
32 | )
33 | .catch((error: XiorError) => error)
34 |
35 | if (isXiorError(response)) {
36 | const error = new Error("Could not fetch updated database!!", {
37 | cause: response,
38 | })
39 |
40 | console.error(error)
41 |
42 | return null
43 | }
44 |
45 | return response.data
46 | }
47 |
48 | const badValues = ["", "unknown", "tv special"] as const
49 |
50 | const handleBadValues = (
51 | value: T | (typeof badValues)[number],
52 | ): T | undefined => {
53 | if (
54 | typeof value === "string" &&
55 | (badValues.includes(value as never) || value.includes(","))
56 | ) {
57 | return undefined
58 | }
59 |
60 | return value as T
61 | }
62 |
63 | // Removes duplicate source-id pairs from the list, except for thetvdb and themoviedb ids
64 | export const removeDuplicates = (entries: Relation[]): Relation[] => {
65 | const sources = (Object.values(Source) as Source[]).filter(
66 | (source) =>
67 | source !== Source.TheTVDB && source !== Source.TheMovieDB && source !== Source.IMDB,
68 | )
69 | const existing = new Map>(sources.map((name) => [name, new Set()]))
70 |
71 | const goodEntries = entries.filter((entry) => {
72 | for (const source of Object.keys(entry) as (keyof typeof entry)[]) {
73 | const id = entry[source]
74 |
75 | // Ignore nulls
76 | if (id == null) continue
77 | // Ignore sources with one-to-many relations
78 | if (
79 | source === Source.TheTVDB ||
80 | source === Source.TheMovieDB ||
81 | source === Source.IMDB
82 | ) {
83 | continue
84 | }
85 |
86 | if (existing.get(source)!.has(id)) return false
87 |
88 | existing.get(source)!.add(id)
89 | }
90 |
91 | return true
92 | })
93 |
94 | return goodEntries
95 | }
96 |
97 | export const formatEntry = (entry: AnimeListsSchema[number]): Relation => ({
98 | anidb: handleBadValues(entry.anidb_id),
99 | anilist: handleBadValues(entry.anilist_id),
100 | "anime-planet": handleBadValues(entry["anime-planet_id"]),
101 | anisearch: handleBadValues(entry.anisearch_id),
102 | imdb: handleBadValues(entry.imdb_id),
103 | kitsu: handleBadValues(entry.kitsu_id),
104 | livechart: handleBadValues(entry.livechart_id),
105 | myanimelist: handleBadValues(entry.mal_id),
106 | "notify-moe": handleBadValues(entry["notify.moe_id"]),
107 | themoviedb: handleBadValues(entry.themoviedb_id),
108 | thetvdb: handleBadValues(entry.thetvdb_id),
109 | })
110 |
111 | export const updateRelations = async () => {
112 | logger.debug(`Using ${process.env.NODE_ENV!} database configuration...`)
113 |
114 | logger.info("Fetching updated Database...")
115 | const data = await fetchDatabase()
116 | logger.info("Fetched updated Database.")
117 |
118 | if (data == null) {
119 | logger.error("got no data")
120 | return
121 | }
122 |
123 | logger.info("Formatting entries...")
124 | const formattedEntries = data
125 | .map(formatEntry)
126 | .filter((entry) => Object.values(entry).some((value) => value != null))
127 | logger.info({ remaining: formattedEntries.length }, `Formatted entries.`)
128 |
129 | logger.info(`Removing duplicates.`)
130 | const goodEntries = removeDuplicates(formattedEntries)
131 | logger.info({ remaining: goodEntries.length }, `Removed duplicates.`)
132 |
133 | logger.info("Updating database...")
134 | await knex.transaction(async (trx) =>
135 | knex
136 | .delete()
137 | .from("relations")
138 | .transacting(trx)
139 | .then(async () => {
140 | await knex.batchInsert("relations", goodEntries, 100).transacting(trx)
141 | }),
142 | )
143 | logger.info("Updated database.")
144 |
145 | logger.info("Executing manual rules...")
146 | await updateBasedOnManualRules()
147 |
148 | logger.info("Done.")
149 |
150 | if (process.argv.includes("--exit")) {
151 | await knex.destroy()
152 | }
153 | }
154 |
--------------------------------------------------------------------------------
/src/utils.ts:
--------------------------------------------------------------------------------
1 | import type { StandardSchemaV1 } from "@standard-schema/spec"
2 | import type { Context } from "hono"
3 | import type { HTTPException } from "hono/http-exception"
4 | import type { StatusCode } from "hono/utils/http-status"
5 |
6 | type ErrorJson = {
7 | code?: string
8 | statusCode: number
9 | error: string
10 | message?: string
11 | }
12 |
13 | const getErrorText = (code: StatusCode) => {
14 | // eslint-disable-next-line ts/switch-exhaustiveness-check
15 | switch (code) {
16 | case 400:
17 | return "Bad Request"
18 | case 401:
19 | return "Unauthorized"
20 | case 403:
21 | return "Forbidden"
22 | case 404:
23 | return "Not Found"
24 | case 405:
25 | return "Method Not Allowed"
26 | case 406:
27 | return "Not Acceptable"
28 | case 408:
29 | return "Request Timeout"
30 | case 409:
31 | return "Conflict"
32 | case 410:
33 | return "Gone"
34 | case 411:
35 | return "Length Required"
36 | case 412:
37 | return "Precondition Failed"
38 | case 413:
39 | return "Payload Too Large"
40 | case 414:
41 | return "URI Too Long"
42 | case 415:
43 | return "Unsupported Media Type"
44 | case 416:
45 | return "Range Not Satisfiable"
46 | case 417:
47 | return "Expectation Failed"
48 | case 418:
49 | return "I'm a teapot"
50 | case 421:
51 | return "Misdirected Request"
52 | case 422:
53 | return "Unprocessable Entity"
54 | case 423:
55 | return "Locked"
56 | case 424:
57 | return "Failed Dependency"
58 | case 425:
59 | return "Too Early"
60 | case 426:
61 | return "Upgrade Required"
62 | case 428:
63 | return "Precondition Required"
64 | case 429:
65 | return "Too Many Requests"
66 | case 431:
67 | return "Request Header Fields Too Large"
68 | case 451:
69 | return "Unavailable For Legal Reasons"
70 | case 500:
71 | return "Internal Server Error"
72 | case 501:
73 | return "Not Implemented"
74 | case 502:
75 | return "Bad Gateway"
76 | case 503:
77 | return "Service Unavailable"
78 | case 504:
79 | return "Gateway Timeout"
80 | case 505:
81 | return "HTTP Version Not Supported"
82 | case 506:
83 | return "Variant Also Negotiates"
84 | case 507:
85 | return "Insufficient Storage"
86 | case 508:
87 | return "Loop Detected"
88 | case 510:
89 | return "Not Extended"
90 | case 511:
91 | return "Network Authentication Required"
92 | default:
93 | return "Error"
94 | }
95 | }
96 |
97 | export const createErrorJson = (
98 | c: Context,
99 | input: Pick & {
100 | code?: string
101 | details?: Record
102 | },
103 | ) => {
104 | const status: StatusCode = input.status
105 | const body: Omit & {
106 | details?: Record
107 | } = {
108 | message: input.message ?? "An error occurred.",
109 | }
110 |
111 | if (input.code != null) {
112 | body.code = input.code
113 | }
114 | if (input.details != null) {
115 | body.details = input.details
116 | }
117 |
118 | c.status(status)
119 | return c.json({
120 | ...body,
121 | statusCode: status,
122 | error: getErrorText(status),
123 | })
124 | }
125 |
126 | export const validationHook = (
127 | result:
128 | | { success: true; data: Data }
129 | | { success: false; error: ReadonlyArray; data: Data },
130 | c: Context,
131 | ) => {
132 | if (result.success) return
133 |
134 | const issuesByPath = {} as Record
135 | for (const { path, message } of result.error) {
136 | const issuePath =
137 | path
138 | ?.map((p) => (typeof p === "object" ? p.key.toString() : p.toString()))
139 | .join(".") ?? "$"
140 |
141 | issuesByPath[issuePath] ??= []
142 | issuesByPath[issuePath].push(message)
143 | }
144 |
145 | return createErrorJson(c, {
146 | status: 400,
147 | message: "Validation error",
148 | code: "FST_ERR_VALIDATION",
149 | details: issuesByPath,
150 | })
151 | }
152 |
153 | export enum CacheTimes {
154 | HOUR = 3600,
155 | SIX_HOURS = 21_600,
156 | DAY = 86_400,
157 | WEEK = 1_209_600,
158 | }
159 |
160 | export const cacheReply = (response: Response, value: CacheTimes | number | string) => {
161 | response.headers.set("Cache-Control", `public, max-age=${value.toString()}`)
162 |
163 | return response
164 | }
165 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["@tsconfig/node22/tsconfig.json", "@tsconfig/strictest/tsconfig.json"],
3 | "compilerOptions": {
4 | "noEmit": true,
5 | "sourceMap": true,
6 |
7 | "module": "esnext",
8 | "moduleResolution": "bundler",
9 | "resolveJsonModule": true,
10 | "esModuleInterop": true,
11 | "allowImportingTsExtensions": true,
12 |
13 | "lib": ["esnext"],
14 | "importsNotUsedAsValues": "remove",
15 | "noImplicitReturns": false,
16 | "noPropertyAccessFromIndexSignature": false,
17 | "noUncheckedIndexedAccess": false,
18 | "exactOptionalPropertyTypes": false,
19 | "verbatimModuleSyntax": false,
20 |
21 | "types": ["node"]
22 | },
23 | "include": ["**/*.ts"],
24 | "exclude": ["node_modules"]
25 | }
26 |
--------------------------------------------------------------------------------
/tsup.config.ts:
--------------------------------------------------------------------------------
1 | import { cpSync } from "node:fs"
2 |
3 | import { defineConfig } from "tsup"
4 |
5 | import pkgJson from "./package.json" with { type: "json" }
6 |
7 | export default defineConfig({
8 | entry: ["src/index.ts"],
9 | external: ["sqlite3"],
10 | outDir: "dist",
11 |
12 | bundle: true,
13 | sourcemap: false,
14 | clean: true,
15 | minify: true,
16 |
17 | env: {
18 | NODE_ENV: process.env.NODE_ENV ?? "production",
19 | DEV: (process.env.NODE_ENV === "development") as unknown as string,
20 | PROD: (process.env.NODE_ENV === "production") as unknown as string,
21 | TEST: false as unknown as string,
22 | HOMEPAGE: JSON.stringify(pkgJson.homepage),
23 | },
24 |
25 | shims: true,
26 | target: "node22",
27 | format: ["esm"],
28 | banner: {
29 | js: "import {createRequire} from 'module';const require=createRequire(import.meta.url);",
30 | },
31 | esbuildOptions: (options) => {
32 | options.supported = {
33 | // For better performance: https://github.com/evanw/esbuild/issues/951
34 | "object-rest-spread": false,
35 | }
36 | },
37 | esbuildPlugins: [
38 | {
39 | name: "better-sqlite3-copy",
40 | setup({ onEnd }) {
41 | onEnd(() => {
42 | cpSync(
43 | "node_modules/better-sqlite3/build/Release/better_sqlite3.node",
44 | "dist/better_sqlite3.node",
45 | { recursive: true },
46 | )
47 | })
48 | },
49 | },
50 | ],
51 | })
52 |
--------------------------------------------------------------------------------
/vitest.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "vitest/config"
2 |
3 | export default defineConfig(async ({ command }) => ({
4 | test: {
5 | reporters: ["verbose"],
6 |
7 | setupFiles: ["./vitest.setup.ts"],
8 | poolOptions: {
9 | forks: { singleFork: true, minForks: 1, maxForks: 1 },
10 | },
11 |
12 | env: {
13 | NODE_ENV: "test",
14 | },
15 |
16 | coverage: {
17 | enabled: command === "build",
18 | exclude: ["config.ts"],
19 |
20 | lines: 90,
21 | functions: 85,
22 | branches: 85,
23 | statements: 90,
24 | },
25 | },
26 | }))
27 |
--------------------------------------------------------------------------------
/vitest.setup.ts:
--------------------------------------------------------------------------------
1 | import Knex from "knex"
2 |
3 | import knexfile from "./knexfile"
4 |
5 | const knex = Knex(knexfile)
6 |
7 | await knex.migrate.latest()
8 |
--------------------------------------------------------------------------------