├── .dockerignore
├── .eslintignore
├── .eslintrc.json
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ └── build-artifacts.yml
├── .gitignore
├── Changelog.md
├── Dockerfile
├── LICENSE
├── README.md
├── api.js
├── app.js
├── auth
└── utils.js
├── config.js
├── database
├── db.js
├── init.js
├── knex-migrate.js
├── knexfile.js
├── memdb.js
├── migrations
│ ├── 20210105232420_fix_duplicate_title.js
│ ├── 20210106013350_create_t_review.js
│ ├── 20210108093032_alter_t_reviews.js
│ ├── 20210115014202_add_on_delete_cascade.js
│ ├── 20210206135836_drop_t_favorite.js
│ ├── 20210206141840_drop_not_null_constraints.js
│ ├── 20210213224539_alter_t_va.js
│ ├── 20210213233544_fill_va_uuid.js
│ ├── 20210223200240_drop_not_null_constraints.js
│ ├── 20210301141407_fix_title_redundant_chars.js
│ ├── 20210304101412_fix_view_userMetatdata.js
│ ├── 20210305161906_remove_ddl_from_review_api.js
│ ├── 20210307061415_refactor_queries.js
│ └── 20210502081522_remove_obsolete_view.js
├── schema.js
└── storage.js
├── dist
├── css
│ ├── app.0a399f4f.css
│ ├── app.0a399f4f.css.map
│ ├── app.3273c4c2.css
│ ├── app.3273c4c2.css.map
│ ├── app.5cbae10b.css
│ ├── app.5cbae10b.css.map
│ ├── app.f8322749.css
│ └── app.f8322749.css.map
├── fonts
│ ├── KFOkCnqEu92Fr1MmgVxIIzQ.9391e6e2.woff
│ ├── KFOlCnqEu92Fr1MmEU9fBBc-.ddd11dab.woff
│ ├── KFOlCnqEu92Fr1MmSU5fBBc-.877b9231.woff
│ ├── KFOlCnqEu92Fr1MmWUlfBBc-.0344cc3c.woff
│ ├── KFOlCnqEu92Fr1MmYUtfBBc-.b555d228.woff
│ ├── KFOmCnqEu92Fr1Mu4mxM.9b78ea3b.woff
│ ├── flUhRq6tzZclQEJ-Vdg-IuiaDsNa.0d57c481.woff
│ ├── flUhRq6tzZclQEJ-Vdg-IuiaDsNa.6b3adb7e.woff
│ ├── flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.ae520e14.woff2
│ └── flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.b833408f.woff2
├── index.html
├── js
│ ├── 2.052836f6.js
│ ├── 2.052836f6.js.map
│ ├── 3.4efbaad5.js
│ ├── 3.4efbaad5.js.map
│ ├── app.0d3b9cf5.js
│ ├── app.0d3b9cf5.js.map
│ ├── app.1ddf2869.js
│ ├── app.1ddf2869.js.map
│ ├── app.5c5e1e42.js
│ ├── app.5c5e1e42.js.map
│ ├── app.612f3191.js
│ ├── app.612f3191.js.map
│ ├── app.6ab52ada.js
│ ├── app.6ab52ada.js.map
│ ├── app.6ad3b034.js
│ ├── app.6ad3b034.js.map
│ ├── app.8cdb667e.js
│ ├── app.8cdb667e.js.map
│ ├── app.980148e7.js
│ ├── app.980148e7.js.map
│ ├── app.9fbd38f4.js
│ ├── app.9fbd38f4.js.map
│ ├── app.a4dce0d3.js
│ ├── app.a4dce0d3.js.map
│ ├── app.ada637aa.js
│ ├── app.ada637aa.js.map
│ ├── app.b691a13e.js
│ ├── app.b691a13e.js.map
│ ├── app.bd01b415.js
│ ├── app.bd01b415.js.map
│ ├── app.f3d36aee.js
│ ├── app.f3d36aee.js.map
│ ├── runtime.4271bde0.js
│ ├── runtime.4271bde0.js.map
│ ├── vendor.11de91d7.js
│ ├── vendor.11de91d7.js.map
│ ├── vendor.691318be.js
│ ├── vendor.691318be.js.map
│ ├── vendor.c4acbea1.js
│ └── vendor.c4acbea1.js.map
├── manifest.json
├── precache-manifest.886b655a7197ec328a3b6fbb5262cccf.js
├── service-worker.js
├── statics
│ ├── app-logo-128x128.png
│ ├── default_avatar.gif
│ ├── icons
│ │ ├── apple-icon-120x120.png
│ │ ├── apple-icon-152x152.png
│ │ ├── apple-icon-167x167.png
│ │ ├── apple-icon-180x180.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── favicon-96x96.png
│ │ ├── favicon.ico
│ │ ├── icon-128x128.png
│ │ ├── icon-192x192.png
│ │ ├── icon-256x256.png
│ │ ├── icon-384x384.png
│ │ ├── icon-512x512.png
│ │ ├── ms-icon-144x144.png
│ │ └── safari-pinned-tab.svg
│ └── material.png
└── workbox-v4.3.1
│ ├── workbox-background-sync.dev.js
│ ├── workbox-background-sync.dev.js.map
│ ├── workbox-background-sync.prod.js
│ ├── workbox-background-sync.prod.js.map
│ ├── workbox-broadcast-update.dev.js
│ ├── workbox-broadcast-update.dev.js.map
│ ├── workbox-broadcast-update.prod.js
│ ├── workbox-broadcast-update.prod.js.map
│ ├── workbox-cacheable-response.dev.js
│ ├── workbox-cacheable-response.dev.js.map
│ ├── workbox-cacheable-response.prod.js
│ ├── workbox-cacheable-response.prod.js.map
│ ├── workbox-core.dev.js
│ ├── workbox-core.dev.js.map
│ ├── workbox-core.prod.js
│ ├── workbox-core.prod.js.map
│ ├── workbox-expiration.dev.js
│ ├── workbox-expiration.dev.js.map
│ ├── workbox-expiration.prod.js
│ ├── workbox-expiration.prod.js.map
│ ├── workbox-navigation-preload.dev.js
│ ├── workbox-navigation-preload.dev.js.map
│ ├── workbox-navigation-preload.prod.js
│ ├── workbox-navigation-preload.prod.js.map
│ ├── workbox-offline-ga.dev.js
│ ├── workbox-offline-ga.dev.js.map
│ ├── workbox-offline-ga.prod.js
│ ├── workbox-offline-ga.prod.js.map
│ ├── workbox-precaching.dev.js
│ ├── workbox-precaching.dev.js.map
│ ├── workbox-precaching.prod.js
│ ├── workbox-precaching.prod.js.map
│ ├── workbox-range-requests.dev.js
│ ├── workbox-range-requests.dev.js.map
│ ├── workbox-range-requests.prod.js
│ ├── workbox-range-requests.prod.js.map
│ ├── workbox-routing.dev.js
│ ├── workbox-routing.dev.js.map
│ ├── workbox-routing.prod.js
│ ├── workbox-routing.prod.js.map
│ ├── workbox-strategies.dev.js
│ ├── workbox-strategies.dev.js.map
│ ├── workbox-strategies.prod.js
│ ├── workbox-strategies.prod.js.map
│ ├── workbox-streams.dev.js
│ ├── workbox-streams.dev.js.map
│ ├── workbox-streams.prod.js
│ ├── workbox-streams.prod.js.map
│ ├── workbox-sw.js
│ ├── workbox-sw.js.map
│ ├── workbox-window.dev.es5.mjs
│ ├── workbox-window.dev.es5.mjs.map
│ ├── workbox-window.dev.mjs
│ ├── workbox-window.dev.mjs.map
│ ├── workbox-window.dev.umd.js
│ ├── workbox-window.dev.umd.js.map
│ ├── workbox-window.prod.es5.mjs
│ ├── workbox-window.prod.es5.mjs.map
│ ├── workbox-window.prod.mjs
│ ├── workbox-window.prod.mjs.map
│ ├── workbox-window.prod.umd.js
│ └── workbox-window.prod.umd.js.map
├── docker-compose.yml
├── filesystem
├── scanner.js
├── scannerModules.js
├── updater.js
└── utils.js
├── package-lock.json
├── package.json
├── routes
├── auth.js
├── config.js
├── credentials.js
├── index.js
├── media.js
├── metadata.js
├── review.js
├── utils
│ ├── normalize.js
│ ├── strftime.js
│ ├── url.js
│ └── validate.js
└── version.js
├── scraper
├── axios.js
├── dlsite.js
├── hvdb.js
└── utils.js
├── socket.js
├── static
└── no-image.jpg
├── test
├── migration..js
├── spinup
│ ├── spinup-0.3.0.js
│ └── spinup-0.6.0-rc4.js
├── teardown
│ └── teardown-0.6.0.js
└── urljoin.js
├── upgrade.js
└── 用户文档.md
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Git
2 | .git
3 |
4 | # Package
5 | package/*
6 | package-macos/*
7 |
8 | # Config
9 | config/*
10 |
11 | # Covers
12 | covers/*
13 |
14 | # SQLite
15 | sqlite/*
16 |
17 | # Node modules
18 | node_modules
19 |
20 | *.md
21 |
22 | # Storage
23 | VoiceWork
24 |
25 | # HTTPS
26 | kikoeru.crt
27 | kikoeru.key
28 |
29 | # Release template
30 | Release-Template.md
31 |
32 | # GitHub
33 | .github
34 |
35 | # Env
36 | .env
37 |
38 | # Use build artifact from muveex/kikoeru-quasar instead
39 | dist
40 |
41 | # GitHub Actions
42 | .github
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 | dist/
2 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "commonjs": true,
4 | "es2021": true,
5 | "node": true,
6 | "mocha": true
7 | },
8 | "extends": [
9 | "eslint:recommended",
10 | "plugin:node/recommended"
11 | ],
12 | "parserOptions": {
13 | "ecmaVersion": 12
14 | },
15 | "rules": {
16 | "no-prototype-builtins": "off",
17 | "no-process-exit": "off"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto
2 | *.txt text
3 | *.js text eol=lf
4 | LICENSE text eol=lf
5 | .gitattributes text eol=lf
6 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: 错误报告
3 | about: 请尽可能详细描述问题以帮助改进此项目
4 | title: "[Bug] "
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **问题描述**
11 |
12 |
13 | **错误信息**
14 |
19 |
20 | **版本**
21 |
22 | - 系统:Windows/Linux/Mac OS/群晖
23 | - 安装方式:Windows打包版/Mac OS打包版/Docker/源码安装
24 | - 浏览器:Chrome/Firefox/Safari
25 | - 程序版本:例如v0.6.0-rc.3
26 |
27 | **截图**
28 |
29 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: 功能建议
3 | about: 新功能或者改进建议
4 | title: "[Feature request] "
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # TypeScript v1 declaration files
45 | typings/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 |
78 | # Next.js build output
79 | .next
80 |
81 | # Nuxt.js build / generate output
82 | .nuxt
83 |
84 | # Gatsby files
85 | .cache/
86 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
87 | # https://nextjs.org/blog/next-9-1#public-directory-support
88 | # public
89 |
90 | # vuepress build output
91 | .vuepress/dist
92 |
93 | # Serverless directories
94 | .serverless/
95 |
96 | # FuseBox cache
97 | .fusebox/
98 |
99 | # DynamoDB Local files
100 | .dynamodb/
101 |
102 | # TernJS port file
103 | .tern-port
104 |
105 | # Visual Studio Code
106 | .vscode/
107 |
108 | # Windows Batch
109 | /*.bat
110 |
111 | # Package
112 | package
113 | package-macos
114 |
115 | # Config
116 | config
117 |
118 | # Covers
119 | covers
120 |
121 | # SQLite
122 | sqlite
123 |
124 | # VoiceWork
125 | VoiceWork
126 |
127 | # HTTPS
128 | kikoeru.crt
129 | kikoeru.key
130 |
131 | # Release template
132 | Release-Template.md
133 |
134 | # Dependecy graph
135 | graph.svg
136 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # This dockerfile generates a single-container application
2 | # It copies build artifacts the from front-end image
3 | # If you want to separate the front-end from the back-end, it should work as well
4 |
5 | FROM node:14-alpine as build-dep
6 |
7 | # Create app directory
8 | WORKDIR /usr/src/kikoeru
9 |
10 | RUN apk update && apk add python3 make gcc g++
11 |
12 | # Install app dependencies
13 | # A wildcard is used to ensure both package.json AND package-lock.json are copied
14 | # where available (npm@5+)
15 | COPY package*.json ./
16 | RUN npm ci --only=production
17 |
18 | # Build SPA and PWA
19 | FROM node:14 as build-frontend
20 | WORKDIR /frontend
21 | # @quasar/app v1 requires node-ass, which takes 30 minutes to compile libsass in CI for arm64 and armv7
22 | # So I prebuilt the binaries for arm64 and armv7
23 | # @quasar/app v2 no longer uses this deprecated package, so this line will be removed in the future
24 | ENV SASS_BINARY_SITE="https://github.com/umonaca/node-sass/releases/download"
25 | RUN npm install -g @quasar/cli
26 | ARG FRONTEND_VERSION="unstable"
27 | # Workaround docker cache
28 | # https://stackoverflow.com/questions/36996046/how-to-prevent-dockerfile-caching-git-clone
29 | ADD https://api.github.com/repos/KirieHaruna/kikoeru-quasar/git/refs/heads/unstable /tmp/version.json
30 | RUN git clone -b ${FRONTEND_VERSION} https://github.com/KirieHaruna/kikoeru-quasar.git .
31 | RUN npm ci
32 | RUN quasar build && quasar build -m pwa
33 |
34 | # Final stage
35 | FROM node:14-alpine
36 | ENV IS_DOCKER=true
37 | WORKDIR /usr/src/kikoeru
38 |
39 | # Copy build artifacts
40 | COPY --from=build-dep /usr/src/kikoeru /usr/src/kikoeru
41 | ARG FRONTEND_TYPE="pwa"
42 | COPY --from=build-frontend /frontend/dist/${FRONTEND_TYPE} /usr/src/kikoeru/dist
43 |
44 | # Bundle app source
45 | COPY . .
46 |
47 | # Tini
48 | RUN apk add --no-cache tini
49 | ENTRYPOINT ["/sbin/tini", "--"]
50 |
51 | # 持久化
52 | VOLUME [ "/usr/src/kikoeru/sqlite", "/usr/src/kikoeru/config", "/usr/src/kikoeru/covers"]
53 |
54 | EXPOSE 8888
55 | CMD [ "node", "app.js" ]
56 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kikoeru
2 | 一个同人音声专用的音乐流媒体服务器,详细的使用说明见[**用户文档**](https://github.com/umonaca/kikoeru-express/wiki/%E4%BD%BF%E7%94%A8%E8%AF%B4%E6%98%8E)
3 |
4 | [](https://github.com/umonaca/kikoeru-express/actions)
5 |
6 | ### 对比kikoeru-project/kikoeru-express的更新内容
7 | - 主界面更换为暗色
8 | - 支持8位RJ号
9 | - 排序支持按有字幕新作和添加时间排序
10 | - 增加无图模式
11 | - 增加视频播放功能
12 | - 增加播放历史记录功能
13 | - 增加是否显示lrc文件功能(关闭不影响字幕显示)
14 |
15 | ### 唯一指定部署方式
16 | - 请使用docker pull kirieharuna/kikoeru:latest
17 |
18 |
19 | ### 功能介绍
20 | - 从 DLSite 爬取音声元数据
21 | - 对音声标记进度、打星、写评语
22 | - 通过标签或关键字快速检索想要找到的音声
23 | - 根据音声元数据对检索结果进行排序
24 | - 可以选择通过 JWT 验证用户或关闭用户认证功能
25 | - 支持在 Web 端修改配置文件和扫描音声库
26 | - 支持为音声库添加多个根文件夹
27 |
28 | ### ~~源码安装部署~~
29 | ~~将kikoeru-quasar项目生成的SPA或PWA文件夹全部文件置于`dist`文件夹下,确保`dist/index.html`存在,然后:~~
30 | ~~```bash~~
31 | ~~# 安装依赖~~
32 | ~~npm install~~
33 |
34 | ~~# 启动服务器~~
35 | ~~npm start~~
36 |
37 | ~~# Express listening on http://[::]:8888~~
38 | ~~```~~
39 | ~~关于选择PWA还是SPA:~~
40 | ~~区别仅仅在于有无Service Worker,由于Service Worker只能在本地和HTTPS上运行,因此如果远程以HTTP方式打开,PWA和SPA二者没有任何区别。也就是说,如果Kikoeru的主要用途是在移动设备上局域网播放,并且您没有配置HTTPS证书,那么实际上打开的都是SPA。~~
41 | ~~PWA的优点:基本页面零延迟,可以像手机APP一样通过浏览器“添加到桌面”的方式安装成App。作者自己使用的前端版本。~~
42 | ~~缺点:更新新版本时需要至少多刷新一次。~~
43 |
44 | ~~本项目还有打包好的 **Windows 系统下可用的 exe 可执行文件**与 **docker 镜像**版本,docker镜像及docker-compose的使用说明详见[**用户文档**](https://github.com/umonaca/kikoeru-express/wiki/%E4%BD%BF%E7%94%A8%E8%AF%B4%E6%98%8E)
45 | 使用docker-compose只需调整`docker-compose.yml`内的挂载位置以符合您的存储路径即可。~~
46 |
47 | ### 技术栈
48 | - axios (网络请求)
49 | - express (构建后端服务)
50 | - sqlite3 (文件型数据库)
51 | - knexjs (操作数据库)
52 | - knex-migrate (数据库迁移)
53 | - cheerio (将 html 解析为 jQuery 对象)
54 | - jsonwebtoken (用户认证)
55 | - socket.io (用于将扫描音声库的结果实时传给客户端)
56 | - lrc-file-parser (解析播放LRC歌词文件)
57 | - jschardet (判断文本文件编码)
58 | - child_process (nodejs 子进程)
59 | - pkg (打包为可执行文件)
60 |
61 |
62 | ### 项目目录结构
63 | ```
64 | ├── routes/ # 主要路由
65 | ├── config/ # 存放配置文件
66 | ├── covers/ # 存放音声封面
67 | ├── database/ # 操作数据库相关代码
68 | ├── dist/ # 存放前端项目 kikoeru-quasar 构建的 PWA
69 | ├── filesystem/ # 存放扫描相关代码
70 | ├── package/ # 存放 pkg 打包后的可执行文件
71 | ├── package-macos/ # 存放 pkg 打包后的可执行文件
72 | ├── scraper/ # 存放爬虫相关代码
73 | ├── sqlite/ # 存放 sqlite 数据库文件
74 | ├── static/ # 存放静态资源
75 | ├── .gitignore # git 忽略路径
76 | ├── .dockerignore # Docker 忽略路径
77 | ├── api.js # 为 express 实例添加路由与 jwt 验证中间件
78 | ├── app.js # 项目入口文件
79 | ├── socket.js # 用于初始化socket.io
80 | ├── config.js # 用于生成与修改 config.json 配置文件,导出公共配置以及升级锁
81 | ├── Dockerfile # 用于构建 docker 镜像的文本文件
82 | ├── docker-compose.yml # 用于使用docker-compose一键构建环境
83 | ├── package.json # npm 脚本和依赖项
84 | ├── eslintrc.json # ESLint
85 | ├── Changelog.md # 最近的版本历史
86 | ```
87 |
88 |
89 | ### TODO
90 | - [x] 可拖动歌词控件
91 | - [x] 二级页面返回按钮
92 | - [x] 手动星标
93 | - [x] 评价过的作品优先
94 | - [x] 星标前端 CRUD
95 | - [x] 星标后端 CRUD
96 | - [x] 进度标记页面
97 | - [x] 用户评价
98 | - [x] 修复面条代码里的placeholders
99 | - [x] 升级sqlite等
100 | - [x] 刷新元数据
101 | - [x] 不清理作品
102 | - [x] 修复扫描阻塞
103 | - [ ] 使用ID标识文件夹
104 | - [x] 整理路由等
105 | - [ ] 单元测试、CI
106 | - [ ] Insersection Observer
107 | - [ ] 可编辑标签
108 | - [ ] 重新扫描
109 | - [ ] Dark Mode
110 | - [ ] 重构WorkCard和WorkDetail
111 | - [ ] 使用vuex重构收藏
112 | - [x] 检查启用foreign key是否会出错
113 | - [ ] 导入导出评价、进度
114 | - [ ] 重构config和schema,添加多用户支持(目前实际上仍然是单用户架构)
115 | - [ ] 重构鉴权逻辑, cookie, CSRF, 不向管理员传递md5 salt...
116 | - [x] Knex error catch
117 | - [x] 写迁移脚本
118 | - [x] 重写创建数据库逻辑(与迁移脚本冲突了)
119 | - [ ] 播放列表功能(目前只有一个)
120 | - [ ] docker适当的权限与进程监控
121 | - [ ] 添加计划任务,定期更新音声的动态元数据
122 | - [ ] 手动添加音声
123 | - [x] 首次扫描bug
124 | - [x] 扫描设置
125 |
126 | ### 感谢
127 | 本项目的大部分后端代码来自于开源项目 [kikoeru](https://github.com/nortonandrews/kikoeru)
128 |
129 | ### 声明
130 | 本项目作为开源软件,本身不包含任何版权内容或其它违反法律的内容。项目中的程序是为了个人用户管理自己所有的合法数据资料而设计的。
131 | 程序作者并不能防止内容提供商(如各类网站)或其它用户使用本程序提供侵权或其它非法内容。程序作者与使用本程序的各类内容提供商并无联系,不为其提供技术支持,也不为其不当使用承担法律责任。
132 |
133 | ### 许可协议
134 | GNU General Public License v3.0
135 |
--------------------------------------------------------------------------------
/api.js:
--------------------------------------------------------------------------------
1 | const routes = require('./routes')
2 | const expressJwt = require('express-jwt'); // 把 JWT 的 payload 部分赋值于 req.user
3 |
4 | const { config } = require('./config');
5 | const { issuer, audience } = require('./auth/utils')
6 |
7 | /**
8 | * Get token from header or query string.
9 | */
10 | const getToken = (req) => {
11 | if (req.headers.authorization && req.headers.authorization.split(' ')[0] === 'Bearer') {
12 | return req.headers.authorization.split(' ')[1];
13 | } else if (req.query && req.query.token) {
14 | return req.query.token;
15 | }
16 | return null;
17 | }
18 |
19 |
20 | module.exports = (app) => {
21 | if (config.auth) {
22 | // expressJwt 中间件
23 | // 验证指定 http 请求的 JsonWebTokens 的有效性, 如果有效就将 JsonWebTokens 的值设置到 req.user 里面, 然后路由到相应的 router
24 | app.use('/api', expressJwt({ secret: config.jwtsecret, audience: audience, issuer: issuer, getToken, algorithms: ['HS256'] }).unless({ path: ['/api/auth/me', '/api/health'] }));
25 | }
26 |
27 | app.use('/api', routes);
28 | };
--------------------------------------------------------------------------------
/app.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | require('dotenv').config()
4 | const path = require('path');
5 | const express = require('express');
6 |
7 | const compression = require('compression');
8 | const bodyParser = require('body-parser'); // 获取 req.body
9 | const history = require('connect-history-api-fallback');
10 | const http = require('http');
11 | const https = require('https');
12 | const fs = require('fs');
13 |
14 | // Crash the process on "unhandled promise rejection" when NODE_ENV=test or CRASH_ON_UNHANDLED exists
15 | if (process.env.NODE_ENV === 'test' || process.env.CRASH_ON_UNHANDLED) {
16 | process.on('unhandledRejection', (reason, promise) => {
17 | console.error(new Date().toJSON(), 'Kikoeru log: Unhandled rejection at ', promise, `reason: ${reason}`);
18 | console.error('Crashing the process because of NODE_ENV or CRASH_ON_UNHANDLED settings');
19 | process.exit(1)
20 | })
21 | }
22 |
23 | const { initApp }= require('./database/init');
24 | const initSocket = require('./socket');
25 | const { config } = require('./config');
26 | const api = require('./api');
27 | const app = express();
28 |
29 | // Initialize database if not exists
30 | // Init or migrate database and config
31 | // Note: non-blocking
32 | initApp().catch(err => console.error(err));
33 |
34 | if (config.behindProxy) {
35 | // Only useful if you are using a reverse proxy e.g. nginx
36 | // This is used to detect correct remote IP address which will be used in express-brute and some routes
37 | // You MUST set a X-Forwarded-For header in your reverse proxy to make it work
38 | // By default, behindProxy is false
39 | app.set('trust proxy', 'loopback')
40 | }
41 |
42 | if (config.enableGzip) {
43 | app.use(compression());
44 | }
45 |
46 | // parse application/x-www-form-urlencoded
47 | app.use(bodyParser.urlencoded({ extended: true }));
48 | // parse application/json
49 | app.use(bodyParser.json());
50 |
51 | // For dev purpose only
52 | if (process.env.NODE_ENV === 'development') {
53 | // eslint-disable-next-line node/no-unpublished-require
54 | app.use('/media/stream/VoiceWork', express.static('VoiceWork'), require('serve-index')('VoiceWork', {'icons': true}));
55 | // eslint-disable-next-line node/no-unpublished-require
56 | app.use('/media/download/VoiceWork', express.static('VoiceWork'), require('serve-index')('VoiceWork', {'icons': true}));
57 | }
58 |
59 | // connect-history-api-fallback 中间件后所有的 GET 请求都会变成 index (default: './index.html').
60 | app.use(history({
61 | // 将所有带 api 的 GET 请求都代理到 parsedUrl.path, 其实就是原来的路径
62 | rewrites: [
63 | {
64 | from: /^\/api\/.*$/,
65 | to: context => context.parsedUrl.path
66 | }
67 | ]
68 | }));
69 | // Expose API routes
70 | api(app);
71 |
72 | // Serve WebApp routes
73 | app.use(express.static(path.join(__dirname, './dist')));
74 |
75 | // 返回错误响应
76 | // eslint-disable-next-line no-unused-vars
77 | app.use((err, req, res, next) => {
78 | if (err.name === 'UnauthorizedError') {
79 | // 验证错误
80 | res.set("WWW-Authenticate", "Bearer realm=\"Authorization Required\"");
81 | res.status(401).send({ error: err.message });
82 | } else if (err.code === 'SQLITE_ERROR') {
83 | if (err.message.indexOf('no such table') !== -1) {
84 | res.status(500).send({ error: '数据库结构尚未建立,请先执行扫描.'});
85 | }
86 | } else {
87 | console.error(new Date().toJSON(), 'Kikoeru log:', err);
88 | if (process.env.NODE_ENV === 'production' || config.production) {
89 | // Do not send excess error messages to the client on production mode
90 | res.status(500).send({ error: '服务器错误' });
91 | } else {
92 | res.status(500).send({ error: err.message || err });
93 | }
94 | }
95 | });
96 |
97 | // Create HTTP and HTTPS server
98 | const server = http.createServer(app);
99 | let httpsServer = null;
100 | let httpsSuccess = false;
101 | if (config.httpsEnabled) {
102 | try {
103 | httpsServer = https.createServer({
104 | key: fs.readFileSync(config.httpsPrivateKey),
105 | cert: fs.readFileSync(config.httpsCert),
106 | },app);
107 | httpsSuccess = true;
108 | } catch (err) {
109 | console.error('HTTPS服务器启动失败,请检查证书位置以及是否文件可读')
110 | console.error(err);
111 | }
112 | }
113 |
114 | // websocket 握手依赖 http 服务
115 | initSocket(server);
116 | if (config.httpsEnabled) {
117 | initSocket(httpsServer);
118 | }
119 |
120 | const listenPort = process.env.PORT || config.listenPort || 8888;
121 | const localOnly = config.blockRemoteConnection;
122 |
123 | // Note: for some unknown reasons, :: does not always work
124 | localOnly ? server.listen(listenPort, 'localhost') : server.listen(listenPort)
125 | if (config.httpsEnabled && httpsSuccess) {
126 | localOnly ? httpsServer.listen(config.httpsPort, 'localhost') : httpsServer.listen(config.httpsPort)
127 | }
128 |
129 | server.on('listening', () => {
130 | console.log('Express server started on port %s at %s', server.address().port, server.address().address);
131 | })
132 |
133 | if (config.httpsEnabled && httpsSuccess) {
134 | httpsServer.on('listening', () => {
135 | console.log('Express server started on port %s at %s', httpsServer.address().port, httpsServer.address().address);
136 | })
137 | }
138 |
--------------------------------------------------------------------------------
/auth/utils.js:
--------------------------------------------------------------------------------
1 | const jwt = require('jsonwebtoken');
2 | const md5 = require('md5');
3 |
4 | const { config } = require('../config');
5 |
6 | const issuer = 'http://kikoeru'
7 | const audience = 'http://kikoeru/api'
8 |
9 |
10 | const signPayload = (payload) => jwt.sign(payload, config.jwtsecret, {expiresIn: config.expiresIn});
11 |
12 | const signToken = (user) => {
13 | // RFC 7519
14 | const payload = {
15 | iss: issuer,
16 | sub: user.name,
17 | aud: audience,
18 | name: user.name,
19 | group: user.group
20 | }
21 | return signPayload(payload)
22 | }
23 |
24 | const cmd5 = (str) => md5(str + config.md5secret);
25 |
26 |
27 |
28 | module.exports = {
29 | signToken,
30 | md5: cmd5,
31 | issuer,
32 | audience
33 | }
34 |
--------------------------------------------------------------------------------
/database/init.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const { md5 } = require('../auth/utils');
3 | const knexMigrate = require('./knex-migrate');
4 | const { databaseExist, createUser } = require('./db');
5 | const pjson = require('../package.json');
6 | const compareVersions = require('compare-versions');
7 | const { config, updateConfig } = require('../config');
8 | const { applyFix } = require('../upgrade');
9 | const { createSchema } = require('./schema');
10 |
11 | const initApp = async () => {
12 | let configVersion = config.version;
13 | let currentVersion = pjson.version;
14 |
15 |
16 | async function runMigrations () {
17 | const log = ({ action, migration }) => console.log('Doing ' + action + ' on ' + migration);
18 | await knexMigrate('up', {}, log);
19 | }
20 |
21 | async function skipMigrations () {
22 | await knexMigrate('skipAll', {});
23 | }
24 |
25 | // Fix a nasty bug introduced in v0.5.1
26 | async function fixMigrations () {
27 | if (compareVersions.compare(configVersion, 'v0.5.1', '>=') && compareVersions.compare(configVersion, 'v0.5.3', '<')) {
28 | await knexMigrate('skipAll', {to: '20210108093032'});
29 | }
30 | }
31 |
32 | function initDatabaseDir () {
33 | const databaseFolderDir = config.databaseFolderDir;
34 | if (!fs.existsSync(databaseFolderDir)) {
35 | try {
36 | fs.mkdirSync(databaseFolderDir, { recursive: true });
37 | } catch(err) {
38 | console.error(` ! 在创建存放数据库文件的文件夹时出错: ${err.message}`);
39 | }
40 | }
41 | }
42 |
43 | // 迁移或创建数据库结构
44 | if (databaseExist && compareVersions.compare(currentVersion, configVersion, '>')) {
45 | console.log('升级中');
46 | const oldVersion = config.version;
47 | try {
48 | await applyFix(oldVersion);
49 | await fixMigrations();
50 | await runMigrations();
51 | updateConfig();
52 | } catch (error) {
53 | console.log('升级迁移过程中出错,请在GitHub issues中报告作者')
54 | console.error(error);
55 | }
56 | } else if (!databaseExist) {
57 | initDatabaseDir();
58 | await createSchema();
59 | try { // 创建内置的管理员账号
60 | await createUser({
61 | name: 'admin',
62 | password: md5('admin'),
63 | group: 'administrator'
64 | });
65 | } catch(err) {{
66 | console.error(err.message);
67 | process.exit(1);
68 | }
69 | }
70 | try {
71 | await skipMigrations()
72 | } catch (err) {
73 | console.error(` ! 在构建数据库结构过程中出错: ${err.message}`);
74 | process.exit(1);
75 | }
76 | if (compareVersions.compare(currentVersion, configVersion, '>')) {
77 | // Update config only. Do not apply fix to database.
78 | updateConfig();
79 | }
80 | }
81 | }
82 |
83 | module.exports = { initApp };
84 |
--------------------------------------------------------------------------------
/database/knex-migrate.js:
--------------------------------------------------------------------------------
1 | // 来自knex-migrate,用于解决knex migratio API和knex-migrate在Windows上打包后
2 | // 仍然使用绝对路径导致找不到文件的问题
3 |
4 | const { join } = require('path')
5 | const { existsSync} = require('fs')
6 | const Umzug = require('@umonaca/umzug')
7 | const { omitBy, isNil} = require('lodash')
8 | const Promise = require('bluebird')
9 | const knex = require('knex')
10 |
11 | function normalizeFlags (flags) {
12 | flags.knexfile = flags.knexfile || 'knexfile.js'
13 |
14 | flags.knexfile = join(__dirname, flags.knexfile)
15 |
16 | flags.env =
17 | flags.env || process.env.KNEX_ENV || process.env.NODE_ENV || 'upgrade'
18 | }
19 |
20 | function knexInit (flags) {
21 | normalizeFlags(flags)
22 |
23 | let config
24 |
25 | if (flags.config) {
26 | config = flags.config
27 | } else {
28 | try {
29 | config = require(flags.knexfile)
30 | } catch (err) {
31 | if (/Cannot find module/.test(err.message)) {
32 | console.error(`No knexfile at '${flags.knexfile}'`)
33 | console.error("Please create one or bootstrap using 'knex init'")
34 | process.exit(1)
35 | }
36 |
37 | throw err
38 | }
39 | }
40 |
41 | if (config[flags.env] && config[flags.env]) {
42 | config = config[flags.env]
43 | }
44 |
45 | if (typeof config !== 'object') {
46 | console.error(`Malformed knex config:`)
47 | console.error(JSON.stringify(config, null, 2))
48 | process.exit(1)
49 | }
50 |
51 | flags.migrations =
52 | flags.migrations ||
53 | (config.migrations && config.migrations.directory) ||
54 | 'migrations'
55 | flags.migrations = join(__dirname, flags.migrations)
56 |
57 | if (!existsSync(flags.migrations)) {
58 | console.error(`No migrations directory at '${flags.migrations}'`)
59 | }
60 |
61 | if (config.client === 'sqlite3') {
62 | config.useNullAsDefault = true
63 | }
64 |
65 | config.pool = { max: 10, min: 0, idleTimeoutMillis: 1000 }
66 |
67 | return knex(config)
68 | }
69 |
70 | function umzugKnex (flags, connection) {
71 | return new Umzug({
72 | storage: join(__dirname, 'storage'),
73 | storageOptions: { connection },
74 | migrations: {
75 | params: [connection, Promise],
76 | path: flags.migrations,
77 | pattern: /^\d+_.+\.[j|t]s$/,
78 | wrap: fn => (knex, Promise) => {
79 | if (flags.skip) {
80 | // Non standard. Mark as executed without actually executing the migration
81 | return Promise.resolve()
82 | }
83 | if (flags.raw) {
84 | return Promise.resolve(fn(knex, Promise))
85 | } else {
86 | return knex.transaction(tx => Promise.resolve(fn(tx, Promise)))
87 | }
88 | }
89 | },
90 | skipTargetMigrationCheck: true
91 | })
92 | }
93 |
94 | async function umzugOptions (command, flags, umzug) {
95 | if (isNil(flags.to) && isNil(flags.from) && !isNil(flags.only)) {
96 | return flags.only
97 | }
98 |
99 | if (flags.to === '0') {
100 | flags.to = 0
101 | }
102 |
103 | if (flags.from === '0') {
104 | flags.from = 0
105 | }
106 |
107 | const opts = omitBy({ to: flags.to, from: flags.from }, isNil)
108 |
109 | if (!isNil(flags.step)) {
110 | await applyStepOption(command, umzug, opts, flags.step)
111 | }
112 |
113 | return opts
114 | }
115 |
116 | async function applyStepOption (command, umzug, opts, steps) {
117 | // Default to 1 step if no number is provided
118 | if (steps === '') {
119 | steps = 1
120 | }
121 |
122 | // Use the list of pending or executed migrations to determine what would happen without --step
123 | let migrations =
124 | command === 'up'
125 | ? await umzug.pending()
126 | : await umzug.executed().then(m => m.reverse())
127 |
128 | // Remove migrations prior to the one used in --from
129 | // If it isn't in the list, the --from option has no effect
130 | if (opts.from) {
131 | const limit = migrations.find(m => m.file.startsWith(opts.to))
132 | migrations = migrations.slice(Math.min(0, migrations.indexOf(limit)))
133 | }
134 |
135 | // Remove migrations after the one used in --to
136 | // If it isn't in the list, we remove everything, causing a 'migration not pending' notice to show
137 | if (opts.to) {
138 | const limit = migrations.find(m => m.file.startsWith(opts.to))
139 | migrations = migrations.slice(0, migrations.indexOf(limit) + 1)
140 | }
141 |
142 | // Limit to the number of migrations available
143 | steps = Math.min(migrations.length, steps)
144 |
145 | // Override the --to option to limit the number of steps taken
146 | if (steps > 0) {
147 | opts.to = migrations[steps - 1].file
148 | }
149 | }
150 |
151 | async function knexMigrate (command, flags, progress) {
152 | flags = flags || {}
153 | progress = progress || function () {}
154 |
155 | const umzug = umzugKnex(flags, knexInit(flags))
156 |
157 | const debug = action => migration => {
158 | progress({
159 | action,
160 | migration: join(flags.migrations, migration)
161 | })
162 | }
163 |
164 | umzug
165 | .on('migrating', debug('migrate'))
166 | .on('reverting', debug('revert'))
167 | .on('debug', debug('debug'))
168 |
169 | const api = {
170 | up: async () => {
171 | const opts = await umzugOptions('up', flags, umzug)
172 | await umzug.storage.ensureTable()
173 | return umzug.up(opts)
174 | },
175 |
176 | // Non standard, used in this project only
177 | skipAll: async () => {
178 | flags.skip = true
179 | const opts = await umzugOptions('up', flags, umzug)
180 | await umzug.storage.ensureTable()
181 | return umzug.up(opts)
182 | }
183 | }
184 |
185 | if (!(command in api)) {
186 | throw new Error('Unknown command: ' + command)
187 | }
188 |
189 | try {
190 | return await api[command].apply(null, flags)
191 | } finally {
192 | umzug.storage.knex.destroy()
193 | }
194 | }
195 |
196 | module.exports = knexMigrate
197 | module.exports.default = knexMigrate
--------------------------------------------------------------------------------
/database/knexfile.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const { config } = require('../config')
3 |
4 | module.exports = {
5 | // Default environment
6 | development: {
7 | client: 'sqlite3', // 数据库类型
8 | useNullAsDefault: true,
9 | connection: { // 连接参数
10 | filename: path.join(config.databaseFolderDir, 'db.sqlite3'),
11 | },
12 | acquireConnectionTimeout: 40000, // 连接计时器
13 | pool: {
14 | afterCreate: (conn, done) => {
15 | conn.run('PRAGMA foreign_keys = ON;', function (err) {
16 | if (err) {
17 | done(err, conn);
18 | } else {
19 | conn.run(`PRAGMA busy_timeout = ${config.dbBusyTimeout};`, function (err) {
20 | done(err, conn);
21 | });
22 | }
23 | });
24 | }
25 | }
26 | },
27 |
28 | // For migration only. Foreign keys are disabled (SQLite default)
29 | upgrade: {
30 | client: 'sqlite3',
31 | connection: {
32 | filename: path.join(config.databaseFolderDir, 'db.sqlite3')
33 | },
34 | migrations: {
35 | tableName: 'knex_migrations'
36 | }
37 | },
38 |
39 | test: {
40 | client: "sqlite3",
41 | connection: {
42 | filename: path.join(__dirname, '../test/db-test.sqlite3'),
43 | },
44 | useNullAsDefault: true,
45 | migrations: {
46 | tableName: 'knex_migrations'
47 | }
48 | }
49 | };
50 |
--------------------------------------------------------------------------------
/database/memdb.js:
--------------------------------------------------------------------------------
1 | const Redis = require('redis');
2 | const client = new Redis.createClient({
3 | url: 'redis://redisearch:6379'
4 | });
5 | client.on('error', (err) => {
6 | if (!useable) return;
7 | console.error(err);
8 | useable = false;
9 | });
10 |
11 | let useable = true;
12 |
13 | const initialize = async () => {
14 | if (!useable) return;
15 | try {
16 | await client.connect();
17 | await client.ft.create('idx:works', {
18 | '$.index': {
19 | type: Redis.SchemaFieldTypes.NUMERIC,
20 | SORTABLE: true
21 | },
22 | '$.url': {
23 | type: Redis.SchemaFieldTypes.TEXT,
24 | AS: 'url'
25 | }
26 | }, {
27 | ON: 'JSON',
28 | PREFIX: 'work:'
29 | });
30 | } catch (e) {
31 | if (e.message === 'Index already exists') {
32 | console.log('Index exists already, skipped creation.');
33 | } else {
34 | useable = false;
35 | console.error(e);
36 | }
37 | }
38 | };
39 |
40 | const setWork = async (work) => {
41 | if (!useable) return;
42 | try{
43 | client.json.set(`work:${work.id}`, '$', work);
44 | } catch (e) {
45 | if (e.message === 'idx:works: no such index') {
46 | initialize();
47 | } else {
48 | console.error(e);
49 | }
50 | }
51 | }
52 |
53 | const getWorks = async (url) => {
54 | if (!useable) return;
55 | try{
56 | let result = await client.ft.search(
57 | 'idx:works',
58 | url,
59 | {
60 | LIMIT: {
61 | from: 0,
62 | size: 100
63 | }
64 | }
65 | );
66 | return result.documents.map(doc => doc.value);
67 | } catch (e) {
68 | if (e.message === 'idx:works: no such index') {
69 | initialize();
70 | } else {
71 | console.error(e);
72 | }
73 | }
74 | }
75 |
76 | const drop = async () => {
77 | if (!useable) return;
78 | client.FLUSHDB('ASYNC');
79 | }
80 |
81 | const setHistory = async (body) => {
82 | if (!useable) return;
83 | try{
84 | //取得work:${body.id}的值
85 | let work = await client.json.get(`work:${body.id}`, '$');
86 | if(work === null || work === undefined){
87 | return;
88 | }
89 | let newValue = [{
90 | play_time: body.play_time,
91 | user_name: body.username,
92 | track_name: body.track_name
93 | }];
94 | if(work.history === null){
95 | await client.json.set(`work:${body.id}`, `$.history`, newValue);
96 | }else{
97 | for(let i = 0; i < work.history.length; i++){
98 | if(work.history[i].user_name === body.username){
99 | work.history.splice(i, 1);
100 | break;
101 | }
102 | }
103 | work.history.unshift(newValue[0]);
104 | await client.json.set(`work:${body.id}`, `$.history`, work.history);
105 | }
106 | } catch (e) {
107 | if (e.message === 'idx:works: no such index') {
108 | initialize();
109 | } else {
110 | console.error(e);
111 | }
112 | }
113 | }
114 |
115 | initialize();
116 |
117 | module.exports = {setWork, getWorks, drop, setHistory, useable};
--------------------------------------------------------------------------------
/database/migrations/20210105232420_fix_duplicate_title.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | await knex.raw(`UPDATE t_work SET title = trim(substr(title, 1, length(title)/2 + 1), X'0A' || ' ')
3 | WHERE trim(substr(title, 1, length(title)/2 + 1), X'0A' || ' ') =
4 | trim(substr(title, length(title)/2 + 2, length(title)), X'0A' || ' ');`)
5 | .catch(err => console.error(err));
6 | };
7 |
8 | exports.down = async function() {
9 | // Do nothing
10 | };
--------------------------------------------------------------------------------
/database/migrations/20210106013350_create_t_review.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | let exist = await knex.schema.hasTable('t_review')
3 | if (!exist) {
4 | await knex.schema.createTable("t_review", function(table) {
5 | table.string('user_name').notNullable();
6 | table.string('work_id').notNullable();
7 | table.integer('rating'); // 用户评分1-5
8 | table.string('review_text'); // 用户评价文字
9 | table.timestamps(true, true); // 时间戳created_at, updated_at 默认当前时间
10 | table.foreign('user_name').references('name').inTable('t_user'); // FOREIGN KEY
11 | table.foreign('work_id').references('id').inTable('t_work'); // FOREIGN KEY
12 | table.primary(['user_name', 'work_id']); // PRIMARY KEY
13 | })
14 | }
15 | };
16 |
17 | exports.down = async function(knex) {
18 | await knex.schema.dropTable("t_review");
19 | };
20 |
--------------------------------------------------------------------------------
/database/migrations/20210108093032_alter_t_reviews.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | await knex.schema.alterTable('t_review', function(table) {
3 | table.string('progress'); // add column ['marked', 'listening', 'listened', 'postponed', null]
4 | });
5 | };
6 |
7 | exports.down = async function(knex) {
8 | await knex.schema.alterTable('t_review', function(table) {
9 | table.dropColumn('progress'); // drop column progress
10 | });
11 | };
12 |
--------------------------------------------------------------------------------
/database/migrations/20210115014202_add_on_delete_cascade.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 |
3 | await knex.raw(`PRAGMA foreign_keys=off;`);
4 |
5 | await knex.transaction(async (trx) => {
6 | // Add on delete cascade
7 | // SQLite allows DDL in transaction
8 | await trx.raw(`
9 | CREATE TABLE "t_review_new" (
10 | "user_name" varchar(255) NOT NULL,
11 | "work_id" varchar(255) NOT NULL,
12 | "rating" integer,
13 | "review_text" varchar(255),
14 | "created_at" datetime DEFAULT CURRENT_TIMESTAMP,
15 | "updated_at" datetime DEFAULT CURRENT_TIMESTAMP,
16 | "progress" varchar(255),
17 | PRIMARY KEY("user_name","work_id"),
18 | FOREIGN KEY("work_id") REFERENCES "t_work"("id") ON DELETE CASCADE,
19 | FOREIGN KEY("user_name") REFERENCES "t_user"("name") ON DELETE CASCADE
20 | );
21 | `);
22 | await trx.raw(`INSERT INTO t_review_new SELECT * FROM t_review;`);
23 | await trx.raw(`DROP TABLE t_review;`);
24 | // Will be recreated by the main program
25 | await trx.raw(`DROP VIEW IF EXISTS userMetadata;`);
26 | await trx.raw(`ALTER TABLE t_review_new RENAME TO t_review;`);
27 | }).catch(err => console.error(err));
28 |
29 | await knex.raw(`PRAGMA foreign_keys=on;`);
30 | };
31 |
32 | exports.down = async function(knex) {
33 | await knex.raw(`PRAGMA foreign_keys=off;`);
34 |
35 | await knex.transaction(async (trx) => {
36 | await trx.raw(`
37 | CREATE TABLE "t_review_new" (
38 | "user_name" varchar(255) NOT NULL,
39 | "work_id" varchar(255) NOT NULL,
40 | "rating" integer,
41 | "review_text" varchar(255),
42 | "created_at" datetime,
43 | "updated_at" datetime,
44 | "progress" varchar(255),
45 | PRIMARY KEY("user_name","work_id"),
46 | FOREIGN KEY("work_id") REFERENCES "t_work"("id"),
47 | FOREIGN KEY("user_name") REFERENCES "t_user"("name")
48 | );
49 | `);
50 | await trx.raw(`INSERT INTO t_review_new SELECT * FROM t_review;`);
51 | await trx.raw(`DROP TABLE t_review;`);
52 | // Will be recreated by the main program
53 | await trx.raw(`DROP VIEW IF EXISTS userMetadata;`);
54 | await trx.raw(`ALTER TABLE t_review_new RENAME TO t_review;`);
55 | }).catch(err => console.error(err));
56 |
57 | await knex.raw(`PRAGMA foreign_keys=on;`);
58 | };
59 |
--------------------------------------------------------------------------------
/database/migrations/20210206135836_drop_t_favorite.js:
--------------------------------------------------------------------------------
1 | // This table is from upstream, which is no longer in use
2 | exports.up = async function(knex) {
3 | await knex.schema.dropTableIfExists('t_favorite');
4 | };
5 |
6 | exports.down = async function(knex) {
7 | let exist = await knex.schema.hasTable('t_favorite');
8 | if (!exist) {
9 | await knex.schema.createTable("t_favorite", function(table) {
10 | table.string('user_name').notNullable();
11 | table.string('name').notNullable();
12 | table.text('works').notNullable(); // TEXT 类型 [评价分布明细]
13 | table.foreign('user_name').references('name').inTable('t_user'); // FOREIGN KEY 外键
14 | table.primary(['user_name', 'name']); // PRIMARY KEYprimary 主键
15 | })
16 | }
17 | };
18 |
--------------------------------------------------------------------------------
/database/migrations/20210206141840_drop_not_null_constraints.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | await knex.raw(`PRAGMA foreign_keys=off;`);
3 |
4 | await knex.transaction(async (trx) => {
5 | trx.raw('DROP INDEX IF EXISTS t_work_circle_id_release_dl_count_review_count_price_rate_average_2dp_index');
6 |
7 | // SQLite allows DDL in transaction
8 | await trx.schema.createTable('t_work_new', (table) => {
9 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
10 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
11 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
12 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
13 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
14 | table.boolean('nsfw'); // BOOLEAN 类型
15 | table.string('release'); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
16 |
17 | table.integer('dl_count'); // INTEGER 类型 [售出数]
18 | table.integer('price'); // INTEGER 类型 [价格]
19 | table.integer('review_count'); // INTEGER 类型 [评论数量]
20 | table.integer('rate_count'); // INTEGER 类型 [评价数量]
21 | table.float('rate_average_2dp'); // FLOAT 类型 [平均评价]
22 | table.text('rate_count_detail'); // TEXT 类型 [评价分布明细]
23 | table.text('rank'); // TEXT 类型 [历史销售业绩]
24 |
25 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
26 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp'], 't_work_index'); // INDEX 索引
27 | })
28 | await trx.raw(`INSERT INTO t_work_new SELECT * FROM t_work;`);
29 | await trx.raw(`DROP TABLE t_work;`);
30 | await trx.raw(`ALTER TABLE t_work_new RENAME TO t_work;`);
31 | }).catch(err => console.error(err));
32 |
33 | await knex.raw(`PRAGMA foreign_keys=on;`);
34 | };
35 |
36 | exports.down = async function(knex) {
37 | await knex.raw(`PRAGMA foreign_keys=off;`);
38 |
39 | await knex.transaction(async (trx) => {
40 | trx.raw('DROP INDEX IF EXISTS t_work_index');
41 |
42 | // SQLite allows DDL in transaction
43 | await trx.schema.createTable('t_work_new', (table) => {
44 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
45 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
46 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
47 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
48 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
49 | table.boolean('nsfw').notNullable(); // BOOLEAN 类型
50 | table.string('release').notNullable(); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
51 |
52 | table.integer('dl_count').notNullable(); // INTEGER 类型 [售出数]
53 | table.integer('price').notNullable(); // INTEGER 类型 [价格]
54 | table.integer('review_count').notNullable(); // INTEGER 类型 [评论数量]
55 | table.integer('rate_count').notNullable(); // INTEGER 类型 [评价数量]
56 | table.float('rate_average_2dp').notNullable(); // FLOAT 类型 [平均评价]
57 | table.text('rate_count_detail').notNullable(); // TEXT 类型 [评价分布明细]
58 | table.text('rank'); // TEXT 类型 [历史销售业绩]
59 |
60 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
61 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp'], 't_work_circle_id_release_dl_count_review_count_price_rate_average_2dp_index'); // INDEX 索引
62 | })
63 | await trx.raw(`INSERT INTO t_work_new SELECT * FROM t_work;`);
64 | await trx.raw(`DROP TABLE t_work;`);
65 | await trx.raw(`ALTER TABLE t_work_new RENAME TO t_work;`);
66 | }).catch(err => console.error(err));
67 |
68 | await knex.raw(`PRAGMA foreign_keys=on;`);
69 | };
70 |
--------------------------------------------------------------------------------
/database/migrations/20210213224539_alter_t_va.js:
--------------------------------------------------------------------------------
1 | // Purpose: change t_va(id) to store string UUID instead of old hash integer
2 | // See GitHub issue #22
3 | exports.up = async function(knex) {
4 | // Note: SQLite disables foreign keys by default.
5 | // I did not set PRAGMA foreign_keys=on in afterCreate()
6 | // this up() function is wrapped inside a transaction, and SQLite does not support changing PRAGMA in transactions.
7 | try {
8 | // Will be recreated by the main program
9 | await knex.raw('DROP VIEW IF EXISTS userMetadata;');
10 | // Change t_va(id) data type to TEXT
11 | // SQLite has very limited support for alter table
12 | await knex.raw(`
13 | CREATE TABLE t_va_new (
14 | id TEXT,
15 | name TEXT NOT NULL,
16 | PRIMARY KEY(id)
17 | );
18 | `);
19 | await knex.raw('INSERT INTO t_va_new SELECT * FROM t_va;');
20 | await knex.raw('DROP TABLE t_va;');
21 | await knex.raw('ALTER TABLE t_va_new RENAME TO t_va;');
22 |
23 | // Change r_va_work(id) data type to TEXT
24 | // SQLite has very limited support for alter table
25 | await knex.raw(`
26 | CREATE TABLE r_va_work_new (
27 | va_id TEXT,
28 | work_id INTEGER,
29 | FOREIGN KEY(va_id) REFERENCES t_va(id) ON DELETE CASCADE ON UPDATE CASCADE,
30 | FOREIGN KEY(work_id) REFERENCES t_work(id) ON DELETE CASCADE ON UPDATE CASCADE,
31 | PRIMARY KEY(va_id, work_id)
32 | );
33 | `);
34 | await knex.raw('INSERT INTO r_va_work_new(va_id, work_id) SELECT va_id, work_id FROM r_va_work;');
35 | await knex.raw('DROP TABLE r_va_work;');
36 | await knex.raw('ALTER TABLE r_va_work_new RENAME TO r_va_work;');
37 | } catch(err) {
38 | console.error(err);
39 | throw err;
40 | }
41 | };
42 |
43 | exports.down = async function(knex) {
44 | try {
45 | // Will be recreated by the main program
46 | await knex.raw('DROP VIEW IF EXISTS userMetadata;');
47 | // Change t_va(id) data type to TEXT
48 | // SQLite has very limited support for alter table
49 | await knex.raw(`
50 | CREATE TABLE t_va_new (
51 | id INTEGER,
52 | name TEXT NOT NULL,
53 | PRIMARY KEY(id)
54 | );
55 | `);
56 | await knex.raw('INSERT INTO t_va_new SELECT * FROM t_va;');
57 | await knex.raw('DROP TABLE t_va;');
58 | await knex.raw('ALTER TABLE t_va_new RENAME TO t_va;');
59 |
60 | // Change r_va_work(id) data type to TEXT
61 | // SQLite has very limited support for alter table
62 | await knex.raw(`
63 | CREATE TABLE r_va_work_new (
64 | va_id INTEGER,
65 | work_id INTEGER,
66 | FOREIGN KEY(va_id) REFERENCES t_va(id),
67 | FOREIGN KEY(work_id) REFERENCES t_work(id),
68 | PRIMARY KEY(va_id, work_id)
69 | );
70 | `);
71 | await knex.raw('INSERT INTO r_va_work_new(va_id, work_id) SELECT va_id, work_id FROM r_va_work;');
72 | await knex.raw('DROP TABLE r_va_work;');
73 | await knex.raw('ALTER TABLE r_va_work_new RENAME TO r_va_work;');
74 | } catch(err) {
75 | console.error(err);
76 | throw err;
77 | }
78 | };
79 |
--------------------------------------------------------------------------------
/database/migrations/20210213233544_fill_va_uuid.js:
--------------------------------------------------------------------------------
1 | const { v5: uuidv5 } = require('uuid');
2 |
3 | exports.up = async function(knex) {
4 | const nameToUUID = (name) => {
5 | const namespace = '699d9c07-b965-4399-bafd-18a3cacf073c';
6 | return uuidv5(name, namespace);
7 | };
8 |
9 | const records = await knex('t_va').select('id', 'name');
10 | try {
11 | await knex.transaction(async (trx) => {
12 | for (const item of records) {
13 | // Note: foreign key integrity checks and actions are disabled by default in SQLite
14 | // I keep them disabled in migrations.
15 | const oldHash = item['id'];
16 | const name = item['name'];
17 | const newHash = nameToUUID(name);
18 | await trx('t_va').update('id', newHash).where('name', name);
19 | await trx('r_va_work').update('va_id', newHash).where('va_id', oldHash);
20 | }
21 | });
22 | } catch (err) {
23 | console.error(err);
24 | throw err;
25 | }
26 | };
27 |
28 | exports.down = async function(knex) {
29 | const hashNameIntoInt = (name) => {
30 | let hash = '';
31 |
32 | for (let i = 0; i < name.length; i += 1) {
33 | const char = name.charCodeAt(i);
34 | // eslint-disable-next-line no-bitwise
35 | hash = ((hash << 5) - hash) + char;
36 | }
37 |
38 | // eslint-disable-next-line no-bitwise
39 | hash |= 0;
40 | hash = Math.abs(Math.round(hash / 1000));
41 | return hash;
42 | };
43 |
44 | const records = await knex('t_va').select('id', 'name');
45 | try {
46 | await knex.transaction(async (trx) => {
47 | for (const item of records) {
48 | const name = item['name'];
49 | await trx('t_va').update('id', hashNameIntoInt(name)).where('name', name);
50 | await trx('r_va_work').update('va_id', hashNameIntoInt(name)).where('va_id', item['id']);
51 | }
52 | });
53 | } catch (err) {
54 | console.error(err);
55 | throw err;
56 | }
57 | };
58 |
--------------------------------------------------------------------------------
/database/migrations/20210223200240_drop_not_null_constraints.js:
--------------------------------------------------------------------------------
1 | // I forgot to change schema.js for db initialization
2 | // This migration tries to fix the inconsistency
3 | exports.up = async function(knex) {
4 | try {
5 | await knex.raw('DROP INDEX IF EXISTS t_work_circle_id_release_dl_count_review_count_price_rate_average_2dp_index');
6 | await knex.raw('DROP INDEX IF EXISTS t_work_index');
7 | // Will be recreated by the main program
8 | await knex.raw('DROP VIEW IF EXISTS userMetadata');
9 |
10 | // SQLite allows DDL in transaction
11 | await knex.schema.createTable('t_work_new', (table) => {
12 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
13 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
14 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
15 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
16 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
17 | table.boolean('nsfw'); // BOOLEAN 类型
18 | table.string('release'); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
19 |
20 | table.integer('dl_count'); // INTEGER 类型 [售出数]
21 | table.integer('price'); // INTEGER 类型 [价格]
22 | table.integer('review_count'); // INTEGER 类型 [评论数量]
23 | table.integer('rate_count'); // INTEGER 类型 [评价数量]
24 | table.float('rate_average_2dp'); // FLOAT 类型 [平均评价]
25 | table.text('rate_count_detail'); // TEXT 类型 [评价分布明细]
26 | table.text('rank'); // TEXT 类型 [历史销售业绩]
27 |
28 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
29 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp'], 't_work_index'); // INDEX 索引
30 | })
31 |
32 | await knex.raw(`INSERT INTO t_work_new SELECT * FROM t_work;`);
33 | await knex.raw(`DROP TABLE t_work;`);
34 | await knex.raw(`ALTER TABLE t_work_new RENAME TO t_work;`);
35 | } catch (error) {
36 | console.error(error);
37 | throw error;
38 | }
39 | };
40 |
41 | // Cannot be reverted
42 | // eslint-disable-next-line no-unused-vars
43 | exports.down = async function(knex) {
44 |
45 | };
46 |
--------------------------------------------------------------------------------
/database/migrations/20210301141407_fix_title_redundant_chars.js:
--------------------------------------------------------------------------------
1 | // Issue #29
2 | exports.up = async function(knex) {
3 | const fixTitle = (title) => {
4 | const pattern = /(円|日元)$/u;
5 | return title.replace(pattern, '');
6 | };
7 |
8 | const records = await knex('t_work').select('id', 'title');
9 | try {
10 | for (const item of records) {
11 | const oldTitle = item['title'];
12 | const newTitle = fixTitle(oldTitle);
13 | await knex('t_work').update('title', newTitle).where('id', item['id']);
14 | }
15 | } catch (err) {
16 | console.error(err);
17 | throw err;
18 | }
19 | };
20 |
21 | // Cannot be reverted
22 | // eslint-disable-next-line no-unused-vars
23 | exports.down = async function(knex) {
24 |
25 | };
26 |
--------------------------------------------------------------------------------
/database/migrations/20210304101412_fix_view_userMetatdata.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | try {
3 | // Will be recreated by the main program
4 | await knex.raw('DROP VIEW IF EXISTS userMetadata');
5 | } catch (error) {
6 | console.error(error);
7 | throw error;
8 | }
9 | };
10 |
11 | // Problematic queries !
12 | // eslint-disable-next-line no-unused-vars
13 | exports.down = async function(knex) {
14 | try {
15 | await knex.raw('DROP VIEW IF EXISTS userMetadata');
16 | await knex.raw(
17 | `CREATE VIEW IF NOT EXISTS userMetadata AS
18 | SELECT t_work.id,
19 | t_work.title,
20 | json_object('id', t_work.circle_id, 'name', t_circle.name) AS circleObj,
21 | t_work.release,
22 | t_work.review_count,
23 | t_work.dl_count,
24 | t_work.nsfw,
25 | t_va.id AS vaid,
26 | t_va.name AS vaname,
27 | userrate.userRating,
28 | userrate.review_text,
29 | userrate.progress,
30 | userrate.updated_at,
31 | json_object('vas', json_group_array(json_object('id', t_va.id, 'name', t_va.name))) AS vaObj,
32 | userrate.user_name
33 | FROM t_work
34 | JOIN t_circle on t_circle.id = t_work.circle_id
35 | JOIN r_va_work on r_va_work.work_id = t_work.id
36 | join t_va on t_va.id = r_va_work.va_id
37 | JOIN (
38 | SELECT t_review.work_id,
39 | t_review.rating AS userRating,
40 | t_review.review_text,
41 | t_review.progress,
42 | strftime('%Y-%m-%d %H-%M-%S', t_review.updated_at, 'localtime') AS updated_at,
43 | t_review.user_name
44 | FROM t_review
45 | JOIN t_work on t_work.id = t_review.work_id
46 | ) AS userrate
47 | ON userrate.work_id = t_work.id
48 | GROUP BY t_work.id
49 | `);
50 | } catch (error) {
51 | console.error(error);
52 | throw error;
53 | }
54 | };
55 |
--------------------------------------------------------------------------------
/database/migrations/20210305161906_remove_ddl_from_review_api.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | try {
3 | await knex.schema.raw(
4 | `CREATE VIEW IF NOT EXISTS userMetadata AS
5 | SELECT t_work.id,
6 | t_work.title,
7 | json_object('id', t_work.circle_id, 'name', t_circle.name) AS circleObj,
8 | t_work.release,
9 | t_work.review_count,
10 | t_work.dl_count,
11 | t_work.nsfw,
12 | userrate.userRating,
13 | userrate.review_text,
14 | userrate.progress,
15 | userrate.updated_at,
16 | json_object('vas', json_group_array(json_object('id', t_va.id, 'name', t_va.name))) AS vaObj,
17 | userrate.user_name
18 | FROM t_work
19 | JOIN t_circle on t_circle.id = t_work.circle_id
20 | JOIN r_va_work on r_va_work.work_id = t_work.id
21 | join t_va on t_va.id = r_va_work.va_id
22 | JOIN (
23 | SELECT t_review.work_id,
24 | t_review.rating AS userRating,
25 | t_review.review_text,
26 | t_review.progress,
27 | strftime('%Y-%m-%d %H-%M-%S', t_review.updated_at, 'localtime') AS updated_at,
28 | t_review.user_name
29 | FROM t_review
30 | JOIN t_work on t_work.id = t_review.work_id
31 | ) AS userrate
32 | ON userrate.work_id = t_work.id
33 | GROUP BY t_work.id, userrate.user_name
34 | `);
35 | } catch (error) {
36 | console.error(error);
37 | throw error;
38 | }
39 | };
40 |
41 | // Do nothing
42 | // eslint-disable-next-line no-unused-vars
43 | exports.down = async function(knex) {
44 |
45 | };
46 |
--------------------------------------------------------------------------------
/database/migrations/20210307061415_refactor_queries.js:
--------------------------------------------------------------------------------
1 | exports.up = async function(knex) {
2 | await knex.raw(`
3 | CREATE VIEW IF NOT EXISTS staticMetadata AS
4 | SELECT baseQueryWithVA.*,
5 | json_object('tags', json_group_array(json_object('id', t_tag.id, 'name', t_tag.name))) AS tagObj
6 | FROM (
7 | SELECT baseQuery.*,
8 | json_object('vas', json_group_array(json_object('id', t_va.id, 'name', t_va.name))) AS vaObj
9 | FROM (
10 | SELECT t_work.id,
11 | t_work.title,
12 | t_work.circle_id,
13 | t_circle.name,
14 | json_object('id', t_work.circle_id, 'name', t_circle.name) AS circleObj,
15 | t_work.nsfw,
16 | t_work.release,
17 | t_work.dl_count,
18 | t_work.price,
19 | t_work.review_count,
20 | t_work.rate_count,
21 | t_work.rate_average_2dp,
22 | t_work.rate_count_detail,
23 | t_work.rank
24 | FROM t_work
25 | JOIN t_circle ON t_circle.id = t_work.circle_id
26 | ) AS baseQuery
27 | JOIN r_va_work ON r_va_work.work_id = baseQuery.id
28 | JOIN t_va ON t_va.id = r_va_work.va_id
29 | GROUP BY baseQuery.id
30 | ) AS baseQueryWithVA
31 | LEFT JOIN r_tag_work ON r_tag_work.work_id = baseQueryWithVA.id
32 | LEFT JOIN t_tag ON t_tag.id = r_tag_work.tag_id
33 | GROUP BY baseQueryWithVA.id;
34 | `)
35 | };
36 |
37 | // Will break most of the queries! You will need to switch to an earlier version of code.
38 | exports.down = async function(knex) {
39 | knex.raw(`DROP VIEW IF EXISTS staticMetadata`)
40 | };
41 |
--------------------------------------------------------------------------------
/database/migrations/20210502081522_remove_obsolete_view.js:
--------------------------------------------------------------------------------
1 | // Replaced by staticMetadata
2 | exports.up = async function(knex) {
3 | await knex.schema.raw('DROP VIEW IF EXISTS userMetadata');
4 | };
5 |
6 | // Bring up old userMetadata to revert
7 | exports.down = async function(knex) {
8 | try {
9 | await knex.schema.raw(
10 | `CREATE VIEW IF NOT EXISTS userMetadata AS
11 | SELECT t_work.id,
12 | t_work.title,
13 | json_object('id', t_work.circle_id, 'name', t_circle.name) AS circleObj,
14 | t_work.release,
15 | t_work.review_count,
16 | t_work.dl_count,
17 | t_work.nsfw,
18 | userrate.userRating,
19 | userrate.review_text,
20 | userrate.progress,
21 | userrate.updated_at,
22 | json_object('vas', json_group_array(json_object('id', t_va.id, 'name', t_va.name))) AS vaObj,
23 | userrate.user_name
24 | FROM t_work
25 | JOIN t_circle on t_circle.id = t_work.circle_id
26 | JOIN r_va_work on r_va_work.work_id = t_work.id
27 | join t_va on t_va.id = r_va_work.va_id
28 | JOIN (
29 | SELECT t_review.work_id,
30 | t_review.rating AS userRating,
31 | t_review.review_text,
32 | t_review.progress,
33 | strftime('%Y-%m-%d %H-%M-%S', t_review.updated_at, 'localtime') AS updated_at,
34 | t_review.user_name
35 | FROM t_review
36 | JOIN t_work on t_work.id = t_review.work_id
37 | ) AS userrate
38 | ON userrate.work_id = t_work.id
39 | GROUP BY t_work.id, userrate.user_name
40 | `);
41 | } catch (error) {
42 | console.error(error);
43 | throw error;
44 | }
45 | };
46 |
--------------------------------------------------------------------------------
/database/schema.js:
--------------------------------------------------------------------------------
1 | const { knex } = require('./db');
2 |
3 | const dbVersion = '20210502081522';
4 |
5 | // 数据库结构
6 | const createSchema = () => knex.schema
7 | .createTable('t_circle', (table) => {
8 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [社团id]
9 | table.string('name').notNullable(); // VARCHAR 类型 [社团名称]
10 | })
11 | .createTable('t_work', (table) => {
12 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
13 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
14 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
15 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
16 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
17 | table.boolean('nsfw'); // BOOLEAN 类型
18 | table.string('release'); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
19 | table.dateTime('created_at')
20 | table.boolean('lrc'),
21 | table.integer('dl_count'); // INTEGER 类型 [售出数]
22 | table.integer('price'); // INTEGER 类型 [价格]
23 | table.integer('review_count'); // INTEGER 类型 [评论数量]
24 | table.integer('rate_count'); // INTEGER 类型 [评价数量]
25 | table.float('rate_average_2dp'); // FLOAT 类型 [平均评价]
26 | table.text('rate_count_detail'); // TEXT 类型 [评价分布明细]
27 | table.text('rank'); // TEXT 类型 [历史销售业绩]
28 |
29 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
30 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp'], 't_work_index'); // INDEX 索引
31 | })
32 | .createTable('t_tag', (table) => {
33 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [标签id]
34 | table.string('name').notNullable(); // VARCHAR 类型 [标签名称]
35 | })
36 | .createTable('t_va', (table) => {
37 | table.string('id'); // UUID v5, 基于name生成的固定值
38 | table.string('name').notNullable(); // VARCHAR 类型 [声优名称]
39 | table.primary('id');
40 | })
41 | .createTable('r_tag_work', (table) => {
42 | table.integer('tag_id');
43 | table.integer('work_id');
44 | table.foreign('tag_id').references('id').inTable('t_tag'); // FOREIGN KEY 外键
45 | table.foreign('work_id').references('id').inTable('t_work'); // FOREIGN KEY 外键
46 | table.primary(['tag_id', 'work_id']); // PRIMARY KEYprimary 主键
47 | })
48 | .createTable('r_va_work', (table) => {
49 | table.string('va_id');
50 | table.integer('work_id');
51 | table.foreign('va_id').references('id').inTable('t_va').onUpdate('CASCADE').onDelete('CASCADE'); // FOREIGN KEY 外键
52 | table.foreign('work_id').references('id').inTable('t_work').onUpdate('CASCADE').onDelete('CASCADE'); // FOREIGN KEY 外键
53 | table.primary(['va_id', 'work_id']); // PRIMARY KEYprimary 主键
54 | })
55 | .createTable('t_user', (table) => {
56 | table.string('name').notNullable();
57 | table.string('password').notNullable();
58 | table.string('group').notNullable(); // USER ADMIN guest
59 | table.primary(['name']); // PRIMARY KEYprimary 主键
60 | })
61 | .createTable('t_review', (table) => {
62 | table.string('user_name').notNullable();
63 | table.string('work_id').notNullable();
64 | table.integer('rating'); // 用户评分1-5
65 | table.string('review_text'); // 用户评价文字
66 | table.timestamps(true, true); // 时间戳created_at, updated_at
67 | table.string('progress'); // ['marked', 'listening', 'listened', 'replay','postponed', null]
68 | table.foreign('user_name').references('name').inTable('t_user').onDelete('CASCADE'); // FOREIGN KEY
69 | table.foreign('work_id').references('id').inTable('t_work').onDelete('CASCADE'); // FOREIGN KEY
70 | table.primary(['user_name', 'work_id']); // PRIMARY KEY
71 | })
72 | .createTable('t_history', (table) => {
73 | table.string('user_name', 255).notNullable();
74 | table.string('work_id', 255).notNullable();
75 | table.string('hash', 255);
76 | table.string('track_name', 255);
77 | table.decimal('play_time');
78 | table.timestamp('updateTime').defaultTo(knex.fn.now());
79 | table.primary(['user_name', 'work_id']);
80 | })
81 | .raw(`
82 | CREATE VIEW IF NOT EXISTS staticMetadata AS
83 | SELECT baseQueryWithVA.*
84 | , json_object('tags', json_group_array(json_object('id', t_tag.id, 'name', t_tag.name))) AS tagObj
85 | FROM (
86 | SELECT baseQuery.*
87 | , json_object('vas', json_group_array(DISTINCT json_object('id', t_va.id, 'name', t_va.name))) AS vaObj
88 | , CASE
89 | WHEN COUNT(t_history.user_name) > 0 THEN json_object('history', json_group_array(json_object('play_time', t_history.play_time, 'user_name', t_history.user_name, 'track_name', t_history.track_name)))
90 | ELSE NULL
91 | END AS hisObj
92 | FROM (
93 | SELECT t_work.id, t_work.title, t_work.circle_id, t_circle.name
94 | , json_object('id', t_work.circle_id, 'name', t_circle.name) AS circleObj
95 | , t_work.nsfw, t_work.release, t_work.dl_count, t_work.price, t_work.created_at
96 | , t_work.lrc, t_work.review_count, t_work.rate_count, t_work.rate_average_2dp, t_work.rate_count_detail
97 | , t_work.rank
98 | FROM t_work
99 | JOIN t_circle ON t_circle.id = t_work.circle_id
100 | ) baseQuery
101 | JOIN r_va_work ON r_va_work.work_id = baseQuery.id
102 | JOIN t_va ON t_va.id = r_va_work.va_id
103 | LEFT JOIN t_history t_history ON t_history.work_id = baseQuery.id
104 | GROUP BY baseQuery.id
105 | ) baseQueryWithVA
106 | LEFT JOIN r_tag_work ON r_tag_work.work_id = baseQueryWithVA.id
107 | LEFT JOIN t_tag ON t_tag.id = r_tag_work.tag_id
108 | GROUP BY baseQueryWithVA.id
109 | `)
110 | .then(() => {
111 | console.log(' * 成功构建数据库结构.');
112 | })
113 | .catch((err) => {
114 | if (err.toString().indexOf('table `t_circle` already exists') !== -1) {
115 | console.log(' * 数据库结构已经存在.');
116 | } else {
117 | throw err;
118 | }
119 | });
120 |
121 | module.exports = { createSchema, dbVersion };
122 |
--------------------------------------------------------------------------------
/database/storage.js:
--------------------------------------------------------------------------------
1 | // 来自knex-migrate,用于解决knex migratio API和knex-migrate在Windows上打包后
2 | // 仍然使用绝对路径导致找不到文件的问题
3 |
4 | const invariant = require('invariant')
5 | const { get } = require('lodash')
6 |
7 | function tableDoesNotExist (err, table) {
8 | return (
9 | err.code === 'ER_NO_SUCH_TABLE' ||
10 | new RegExp(`relation "${table}" does not exist`).test(err.message) ||
11 | new RegExp(`no such table: ${table}`).test(err.message)
12 | )
13 | }
14 |
15 | module.exports = class KnexStorage {
16 | constructor (options) {
17 | this.knex = options.connection
18 | this.tableName = get(
19 | this.knex,
20 | 'client.config.migrations.tableName',
21 | 'knex_migrations'
22 | )
23 | invariant(
24 | this.knex,
25 | "The option 'options.storageOptions.connection' is required."
26 | )
27 | }
28 |
29 | ensureTable () {
30 | return this.knex(this.tableName)
31 | .count('id')
32 | .catch(err => {
33 | if (tableDoesNotExist(err, this.tableName)) {
34 | return this.knex.schema.createTable(this.tableName, table => {
35 | table.increments()
36 | table.string('name')
37 | table.integer('batch')
38 | table.dateTime('migration_time')
39 | })
40 | }
41 |
42 | throw err
43 | })
44 | }
45 |
46 | async logMigration (migrationName) {
47 | if (typeof this.currentBatch === 'undefined') {
48 | this.currentBatch = this.getCurrentBatch()
49 | }
50 |
51 | const currentBatch = await this.currentBatch
52 |
53 | return this.knex(this.tableName).insert({
54 | name: migrationName,
55 | batch: currentBatch + 1,
56 | migration_time: new Date() // eslint-disable-line camelcase
57 | })
58 | }
59 |
60 | unlogMigration (migrationName) {
61 | return this.knex(this.tableName)
62 | .where('name', migrationName)
63 | .del()
64 | }
65 |
66 | migrations () {
67 | return this.knex(this.tableName)
68 | .select()
69 | .orderBy('id', 'asc')
70 | }
71 |
72 | executed () {
73 | return this.knex(this.tableName)
74 | .orderBy('id', 'asc')
75 | .pluck('name')
76 | .catch(err => {
77 | if (tableDoesNotExist(err, this.tableName)) {
78 | return []
79 | }
80 |
81 | throw err
82 | })
83 | }
84 |
85 | getCurrentBatch () {
86 | return this.knex(this.tableName)
87 | .max('batch as max_batch')
88 | .then(obj => obj[0].max_batch || 0)
89 | }
90 | }
--------------------------------------------------------------------------------
/dist/fonts/KFOkCnqEu92Fr1MmgVxIIzQ.9391e6e2.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOkCnqEu92Fr1MmgVxIIzQ.9391e6e2.woff
--------------------------------------------------------------------------------
/dist/fonts/KFOlCnqEu92Fr1MmEU9fBBc-.ddd11dab.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOlCnqEu92Fr1MmEU9fBBc-.ddd11dab.woff
--------------------------------------------------------------------------------
/dist/fonts/KFOlCnqEu92Fr1MmSU5fBBc-.877b9231.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOlCnqEu92Fr1MmSU5fBBc-.877b9231.woff
--------------------------------------------------------------------------------
/dist/fonts/KFOlCnqEu92Fr1MmWUlfBBc-.0344cc3c.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOlCnqEu92Fr1MmWUlfBBc-.0344cc3c.woff
--------------------------------------------------------------------------------
/dist/fonts/KFOlCnqEu92Fr1MmYUtfBBc-.b555d228.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOlCnqEu92Fr1MmYUtfBBc-.b555d228.woff
--------------------------------------------------------------------------------
/dist/fonts/KFOmCnqEu92Fr1Mu4mxM.9b78ea3b.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/KFOmCnqEu92Fr1Mu4mxM.9b78ea3b.woff
--------------------------------------------------------------------------------
/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNa.0d57c481.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNa.0d57c481.woff
--------------------------------------------------------------------------------
/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNa.6b3adb7e.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNa.6b3adb7e.woff
--------------------------------------------------------------------------------
/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.ae520e14.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.ae520e14.woff2
--------------------------------------------------------------------------------
/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.b833408f.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/fonts/flUhRq6tzZclQEJ-Vdg-IuiaDsNcIhQ8tQ.b833408f.woff2
--------------------------------------------------------------------------------
/dist/index.html:
--------------------------------------------------------------------------------
1 |
Kikoeru
--------------------------------------------------------------------------------
/dist/js/runtime.4271bde0.js:
--------------------------------------------------------------------------------
1 | (function(e){function r(r){for(var n,a,i=r[0],c=r[1],l=r[2],f=0,s=[];f
--------------------------------------------------------------------------------
/dist/statics/material.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/dist/statics/material.png
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-background-sync.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,s){"use strict";try{self["workbox:background-sync:4.3.1"]&&_()}catch(t){}const i=3,n="workbox-background-sync",a="requests",r="queueName";class c{constructor(t){this.t=t,this.s=new s.DBWrapper(n,i,{onupgradeneeded:this.i})}async pushEntry(t){delete t.id,t.queueName=this.t,await this.s.add(a,t)}async unshiftEntry(t){const[e]=await this.s.getAllMatching(a,{count:1});e?t.id=e.id-1:delete t.id,t.queueName=this.t,await this.s.add(a,t)}async popEntry(){return this.h({direction:"prev"})}async shiftEntry(){return this.h({direction:"next"})}async getAll(){return await this.s.getAllMatching(a,{index:r,query:IDBKeyRange.only(this.t)})}async deleteEntry(t){await this.s.delete(a,t)}async h({direction:t}){const[e]=await this.s.getAllMatching(a,{direction:t,index:r,query:IDBKeyRange.only(this.t),count:1});if(e)return await this.deleteEntry(e.id),e}i(t){const e=t.target.result;t.oldVersion>0&&t.oldVersiont?await this.m.deleteEntry(i.id):s.push(f(i))}return s}async g({request:t,metadata:e,timestamp:s=Date.now()},i){const n={requestData:(await o.fromRequest(t.clone())).toObject(),timestamp:s};e&&(n.metadata=e),await this.m[`${i}Entry`](n),this.k?this.D=!0:await this.registerSync()}async R(t){const e=Date.now(),s=await this.m[`${t}Entry`]();if(s){const i=60*this.q*1e3;return e-s.timestamp>i?this.R(t):f(s)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(s){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.u})}}async registerSync(){if("sync"in registration)try{await registration.sync.register(`${u}:${this.u}`)}catch(t){}}p(){"sync"in registration?self.addEventListener("sync",t=>{if(t.tag===`${u}:${this.u}`){const e=async()=>{let e;this.k=!0;try{await this.l({queue:this})}catch(t){throw e=t}finally{!this.D||e&&!t.lastChance||await this.registerSync(),this.k=!1,this.D=!1}};t.waitUntil(e())}}):this.l({queue:this})}static get _(){return w}}const f=t=>{const e={request:new o(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};return t.Queue=d,t.Plugin=class{constructor(...t){this.v=new d(...t),this.fetchDidFail=this.fetchDidFail.bind(this)}async fetchDidFail({request:t}){await this.v.pushRequest({request:t})}},t}({},workbox.core._private,workbox.core._private);
2 | //# sourceMappingURL=workbox-background-sync.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-broadcast-update.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.broadcastUpdate=function(e,t){"use strict";try{self["workbox:broadcast-update:4.3.1"]&&_()}catch(e){}const s=(e,t,s)=>{return!s.some(s=>e.headers.has(s)&&t.headers.has(s))||s.every(s=>{const n=e.headers.has(s)===t.headers.has(s),a=e.headers.get(s)===t.headers.get(s);return n&&a})},n="workbox",a=1e4,i=["content-length","etag","last-modified"],o=async({channel:e,cacheName:t,url:s})=>{const n={type:"CACHE_UPDATED",meta:"workbox-broadcast-update",payload:{cacheName:t,updatedURL:s}};if(e)e.postMessage(n);else{const e=await clients.matchAll({type:"window"});for(const t of e)t.postMessage(n)}};class c{constructor({headersToCheck:e,channelName:t,deferNoticationTimeout:s}={}){this.t=e||i,this.s=t||n,this.i=s||a,this.o()}notifyIfUpdated({oldResponse:e,newResponse:t,url:n,cacheName:a,event:i}){if(!s(e,t,this.t)){const e=(async()=>{i&&i.request&&"navigate"===i.request.mode&&await this.h(i),await this.l({channel:this.u(),cacheName:a,url:n})})();if(i)try{i.waitUntil(e)}catch(e){}return e}}async l(e){await o(e)}u(){return"BroadcastChannel"in self&&!this.p&&(this.p=new BroadcastChannel(this.s)),this.p}h(e){if(!this.m.has(e)){const s=new t.Deferred;this.m.set(e,s);const n=setTimeout(()=>{s.resolve()},this.i);s.promise.then(()=>clearTimeout(n))}return this.m.get(e).promise}o(){this.m=new Map,self.addEventListener("message",e=>{if("WINDOW_READY"===e.data.type&&"workbox-window"===e.data.meta&&this.m.size>0){for(const e of this.m.values())e.resolve();this.m.clear()}})}}return e.BroadcastCacheUpdate=c,e.Plugin=class{constructor(e){this.l=new c(e)}cacheDidUpdate({cacheName:e,oldResponse:t,newResponse:s,request:n,event:a}){t&&this.l.notifyIfUpdated({cacheName:e,oldResponse:t,newResponse:s,event:a,url:n.url})}},e.broadcastUpdate=o,e.responsesAreSame=s,e}({},workbox.core._private);
2 | //# sourceMappingURL=workbox-broadcast-update.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-cacheable-response.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.cacheableResponse=function(t){"use strict";try{self["workbox:cacheable-response:4.3.1"]&&_()}catch(t){}class s{constructor(t={}){this.t=t.statuses,this.s=t.headers}isResponseCacheable(t){let s=!0;return this.t&&(s=this.t.includes(t.status)),this.s&&s&&(s=Object.keys(this.s).some(s=>t.headers.get(s)===this.s[s])),s}}return t.CacheableResponse=s,t.Plugin=class{constructor(t){this.i=new s(t)}cacheWillUpdate({response:t}){return this.i.isResponseCacheable(t)?t:null}},t}({});
2 | //# sourceMappingURL=workbox-cacheable-response.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-cacheable-response.prod.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"workbox-cacheable-response.prod.js","sources":["../_version.mjs","../CacheableResponse.mjs","../Plugin.mjs"],"sourcesContent":["try{self['workbox:cacheable-response:4.3.1']&&_()}catch(e){}// eslint-disable-line","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {WorkboxError} from 'workbox-core/_private/WorkboxError.mjs';\nimport {assert} from 'workbox-core/_private/assert.mjs';\nimport {getFriendlyURL} from 'workbox-core/_private/getFriendlyURL.mjs';\nimport {logger} from 'workbox-core/_private/logger.mjs';\nimport './_version.mjs';\n\n/**\n * This class allows you to set up rules determining what\n * status codes and/or headers need to be present in order for a\n * [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)\n * to be considered cacheable.\n *\n * @memberof workbox.cacheableResponse\n */\nclass CacheableResponse {\n /**\n * To construct a new CacheableResponse instance you must provide at least\n * one of the `config` properties.\n *\n * If both `statuses` and `headers` are specified, then both conditions must\n * be met for the `Response` to be considered cacheable.\n *\n * @param {Object} config\n * @param {Array} [config.statuses] One or more status codes that a\n * `Response` can have and be considered cacheable.\n * @param {Object} [config.headers] A mapping of header names\n * and expected values that a `Response` can have and be considered cacheable.\n * If multiple headers are provided, only one needs to be present.\n */\n constructor(config = {}) {\n if (process.env.NODE_ENV !== 'production') {\n if (!(config.statuses || config.headers)) {\n throw new WorkboxError('statuses-or-headers-required', {\n moduleName: 'workbox-cacheable-response',\n className: 'CacheableResponse',\n funcName: 'constructor',\n });\n }\n\n if (config.statuses) {\n assert.isArray(config.statuses, {\n moduleName: 'workbox-cacheable-response',\n className: 'CacheableResponse',\n funcName: 'constructor',\n paramName: 'config.statuses',\n });\n }\n\n if (config.headers) {\n assert.isType(config.headers, 'object', {\n moduleName: 'workbox-cacheable-response',\n className: 'CacheableResponse',\n funcName: 'constructor',\n paramName: 'config.headers',\n });\n }\n }\n\n this._statuses = config.statuses;\n this._headers = config.headers;\n }\n\n /**\n * Checks a response to see whether it's cacheable or not, based on this\n * object's configuration.\n *\n * @param {Response} response The response whose cacheability is being\n * checked.\n * @return {boolean} `true` if the `Response` is cacheable, and `false`\n * otherwise.\n */\n isResponseCacheable(response) {\n if (process.env.NODE_ENV !== 'production') {\n assert.isInstance(response, Response, {\n moduleName: 'workbox-cacheable-response',\n className: 'CacheableResponse',\n funcName: 'isResponseCacheable',\n paramName: 'response',\n });\n }\n\n let cacheable = true;\n\n if (this._statuses) {\n cacheable = this._statuses.includes(response.status);\n }\n\n if (this._headers && cacheable) {\n cacheable = Object.keys(this._headers).some((headerName) => {\n return response.headers.get(headerName) === this._headers[headerName];\n });\n }\n\n if (process.env.NODE_ENV !== 'production') {\n if (!cacheable) {\n logger.groupCollapsed(`The request for ` +\n `'${getFriendlyURL(response.url)}' returned a response that does ` +\n `not meet the criteria for being cached.`);\n\n logger.groupCollapsed(`View cacheability criteria here.`);\n logger.log(`Cacheable statuses: ` +\n JSON.stringify(this._statuses));\n logger.log(`Cacheable headers: ` +\n JSON.stringify(this._headers, null, 2));\n logger.groupEnd();\n\n const logFriendlyHeaders = {};\n response.headers.forEach((value, key) => {\n logFriendlyHeaders[key] = value;\n });\n\n logger.groupCollapsed(`View response status and headers here.`);\n logger.log(`Response status: ` + response.status);\n logger.log(`Response headers: ` +\n JSON.stringify(logFriendlyHeaders, null, 2));\n logger.groupEnd();\n\n logger.groupCollapsed(`View full response details here.`);\n logger.log(response.headers);\n logger.log(response);\n logger.groupEnd();\n\n logger.groupEnd();\n }\n }\n\n return cacheable;\n }\n}\n\nexport {CacheableResponse};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {CacheableResponse} from './CacheableResponse.mjs';\nimport './_version.mjs';\n\n/**\n * A class implementing the `cacheWillUpdate` lifecycle callback. This makes it\n * easier to add in cacheability checks to requests made via Workbox's built-in\n * strategies.\n *\n * @memberof workbox.cacheableResponse\n */\nclass Plugin {\n /**\n * To construct a new cacheable response Plugin instance you must provide at\n * least one of the `config` properties.\n *\n * If both `statuses` and `headers` are specified, then both conditions must\n * be met for the `Response` to be considered cacheable.\n *\n * @param {Object} config\n * @param {Array} [config.statuses] One or more status codes that a\n * `Response` can have and be considered cacheable.\n * @param {Object} [config.headers] A mapping of header names\n * and expected values that a `Response` can have and be considered cacheable.\n * If multiple headers are provided, only one needs to be present.\n */\n constructor(config) {\n this._cacheableResponse = new CacheableResponse(config);\n }\n\n /**\n * @param {Object} options\n * @param {Response} options.response\n * @return {boolean}\n * @private\n */\n cacheWillUpdate({response}) {\n if (this._cacheableResponse.isResponseCacheable(response)) {\n return response;\n }\n return null;\n }\n}\n\nexport {Plugin};\n"],"names":["self","_","e","CacheableResponse","constructor","config","_statuses","statuses","_headers","headers","isResponseCacheable","response","cacheable","this","includes","status","Object","keys","some","headerName","get","_cacheableResponse","cacheWillUpdate"],"mappings":"sFAAA,IAAIA,KAAK,qCAAqCC,IAAI,MAAMC,ICsBxD,MAAMC,EAeJC,YAAYC,EAAS,SA6BdC,EAAYD,EAAOE,cACnBC,EAAWH,EAAOI,QAYzBC,oBAAoBC,OAUdC,GAAY,SAEZC,KAAKP,IACPM,EAAYC,KAAKP,EAAUQ,SAASH,EAASI,SAG3CF,KAAKL,GAAYI,IACnBA,EAAYI,OAAOC,KAAKJ,KAAKL,GAAUU,KAAMC,GACpCR,EAASF,QAAQW,IAAID,KAAgBN,KAAKL,EAASW,KAqCvDP,yCCpHX,MAeER,YAAYC,QACLgB,EAAqB,IAAIlB,EAAkBE,GASlDiB,iBAAgBX,SAACA,WACXE,KAAKQ,EAAmBX,oBAAoBC,GACvCA,EAEF"}
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-core.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.core=function(e){"use strict";try{self["workbox:core:4.3.1"]&&_()}catch(e){}const t=(e,...t)=>{let n=e;return t.length>0&&(n+=` :: ${JSON.stringify(t)}`),n};class n extends Error{constructor(e,n){super(t(e,n)),this.name=e,this.details=n}}const s=new Set;const r={googleAnalytics:"googleAnalytics",precache:"precache-v2",prefix:"workbox",runtime:"runtime",suffix:self.registration.scope},a=e=>[r.prefix,e,r.suffix].filter(e=>e.length>0).join("-"),i={updateDetails:e=>{Object.keys(r).forEach(t=>{void 0!==e[t]&&(r[t]=e[t])})},getGoogleAnalyticsName:e=>e||a(r.googleAnalytics),getPrecacheName:e=>e||a(r.precache),getPrefix:()=>r.prefix,getRuntimeName:e=>e||a(r.runtime),getSuffix:()=>r.suffix},c=e=>{const t=new URL(e,location);return t.origin===location.origin?t.pathname:t.href};async function o(){for(const e of s)await e()}const l="cacheDidUpdate",u="cacheKeyWillBeUsed",h="cacheWillUpdate",f="cachedResponseWillBeUsed",w="fetchDidFail",g="fetchDidSucceed",d="requestWillFetch",p=(e,t)=>e.filter(e=>t in e),y=async({cacheName:e,request:t,event:n,matchOptions:s,plugins:r=[]})=>{const a=await caches.open(e),i=await q({plugins:r,request:t,mode:"read"});let c=await a.match(i,s);for(const t of r)f in t&&(c=await t[f].call(t,{cacheName:e,event:n,matchOptions:s,cachedResponse:c,request:i}));return c},m=async({request:e,response:t,event:n,plugins:s})=>{let r=t,a=!1;for(let t of s)if(h in t&&(a=!0,!(r=await t[h].call(t,{request:e,response:r,event:n}))))break;return a||(r=200===r.status?r:null),r||null},q=async({request:e,mode:t,plugins:n})=>{const s=p(n,u);let r=e;for(const e of s)"string"==typeof(r=await e[u].call(e,{mode:t,request:r}))&&(r=new Request(r));return r},v={put:async({cacheName:e,request:t,response:s,event:r,plugins:a=[],matchOptions:i}={})=>{const u=await q({plugins:a,request:t,mode:"write"});if(!s)throw new n("cache-put-with-no-response",{url:c(u.url)});let h=await m({event:r,plugins:a,response:s,request:u});if(!h)return;const f=await caches.open(e),w=p(a,l);let g=w.length>0?await y({cacheName:e,matchOptions:i,request:u}):null;try{await f.put(u,h)}catch(e){throw"QuotaExceededError"===e.name&&await o(),e}for(let t of w)await t[l].call(t,{cacheName:e,event:r,oldResponse:g,newResponse:h,request:u})},match:y};class x{constructor(e,t,{onupgradeneeded:n,onversionchange:s=this.t}={}){this.s=e,this.i=t,this.o=n,this.t=s,this.l=null}get db(){return this.l}async open(){if(!this.l)return this.l=await new Promise((e,t)=>{let n=!1;setTimeout(()=>{n=!0,t(new Error("The open request was blocked and timed out"))},this.OPEN_TIMEOUT);const s=indexedDB.open(this.s,this.i);s.onerror=(()=>t(s.error)),s.onupgradeneeded=(e=>{n?(s.transaction.abort(),e.target.result.close()):this.o&&this.o(e)}),s.onsuccess=(({target:t})=>{const s=t.result;n?s.close():(s.onversionchange=this.t.bind(this),e(s))})}),this}async getKey(e,t){return(await this.getAllKeys(e,t,1))[0]}async getAll(e,t,n){return await this.getAllMatching(e,{query:t,count:n})}async getAllKeys(e,t,n){return(await this.getAllMatching(e,{query:t,count:n,includeKeys:!0})).map(({key:e})=>e)}async getAllMatching(e,{index:t,query:n=null,direction:s="next",count:r,includeKeys:a}={}){return await this.transaction([e],"readonly",(i,c)=>{const o=i.objectStore(e),l=t?o.index(t):o,u=[];l.openCursor(n,s).onsuccess=(({target:e})=>{const t=e.result;if(t){const{primaryKey:e,key:n,value:s}=t;u.push(a?{primaryKey:e,key:n,value:s}:s),r&&u.length>=r?c(u):t.continue()}else c(u)})})}async transaction(e,t,n){return await this.open(),await new Promise((s,r)=>{const a=this.l.transaction(e,t);a.onabort=(({target:e})=>r(e.error)),a.oncomplete=(()=>s()),n(a,e=>s(e))})}async u(e,t,n,...s){return await this.transaction([t],n,(n,r)=>{n.objectStore(t)[e](...s).onsuccess=(({target:e})=>{r(e.result)})})}t(){this.close()}close(){this.l&&(this.l.close(),this.l=null)}}x.prototype.OPEN_TIMEOUT=2e3;const b={readonly:["get","count","getKey","getAll","getAllKeys"],readwrite:["add","put","clear","delete"]};for(const[e,t]of Object.entries(b))for(const n of t)n in IDBObjectStore.prototype&&(x.prototype[n]=async function(t,...s){return await this.u(n,t,e,...s)});const D={fetch:async({request:e,fetchOptions:t,event:s,plugins:r=[]})=>{if(s&&s.preloadResponse){const e=await s.preloadResponse;if(e)return e}"string"==typeof e&&(e=new Request(e));const a=p(r,w),i=a.length>0?e.clone():null;try{for(let t of r)d in t&&(e=await t[d].call(t,{request:e.clone(),event:s}))}catch(e){throw new n("plugin-error-request-will-fetch",{thrownError:e})}let c=e.clone();try{let n;n="navigate"===e.mode?await fetch(e):await fetch(e,t);for(const e of r)g in e&&(n=await e[g].call(e,{event:s,request:c,response:n}));return n}catch(e){for(const t of a)await t[w].call(t,{error:e,event:s,originalRequest:i.clone(),request:c.clone()});throw e}}};var E=Object.freeze({assert:null,cacheNames:i,cacheWrapper:v,DBWrapper:x,Deferred:class{constructor(){this.promise=new Promise((e,t)=>{this.resolve=e,this.reject=t})}},deleteDatabase:async e=>{await new Promise((t,n)=>{const s=indexedDB.deleteDatabase(e);s.onerror=(({target:e})=>{n(e.error)}),s.onblocked=(()=>{n(new Error("Delete blocked"))}),s.onsuccess=(()=>{t()})})},executeQuotaErrorCallbacks:o,fetchWrapper:D,getFriendlyURL:c,logger:null,WorkboxError:n});const N={get googleAnalytics(){return i.getGoogleAnalyticsName()},get precache(){return i.getPrecacheName()},get prefix(){return i.getPrefix()},get runtime(){return i.getRuntimeName()},get suffix(){return i.getSuffix()}};try{self.workbox.v=self.workbox.v||{}}catch(e){}return e._private=E,e.clientsClaim=(()=>{addEventListener("activate",()=>clients.claim())}),e.cacheNames=N,e.registerQuotaErrorCallback=function(e){s.add(e)},e.setCacheNameDetails=(e=>{i.updateDetails(e)}),e.skipWaiting=(()=>{addEventListener("install",()=>self.skipWaiting())}),e}({});
2 | //# sourceMappingURL=workbox-core.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-expiration.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.expiration=function(t,e,s,i,a,n){"use strict";try{self["workbox:expiration:4.3.1"]&&_()}catch(t){}const h="workbox-expiration",c="cache-entries",r=t=>{const e=new URL(t,location);return e.hash="",e.href};class o{constructor(t){this.t=t,this.s=new e.DBWrapper(h,1,{onupgradeneeded:t=>this.i(t)})}i(t){const e=t.target.result.createObjectStore(c,{keyPath:"id"});e.createIndex("cacheName","cacheName",{unique:!1}),e.createIndex("timestamp","timestamp",{unique:!1}),s.deleteDatabase(this.t)}async setTimestamp(t,e){t=r(t),await this.s.put(c,{url:t,timestamp:e,cacheName:this.t,id:this.h(t)})}async getTimestamp(t){return(await this.s.get(c,this.h(t))).timestamp}async expireEntries(t,e){const s=await this.s.transaction(c,"readwrite",(s,i)=>{const a=s.objectStore(c),n=[];let h=0;a.index("timestamp").openCursor(null,"prev").onsuccess=(({target:s})=>{const a=s.result;if(a){const s=a.value;s.cacheName===this.t&&(t&&s.timestamp=e?n.push(a.value):h++),a.continue()}else i(n)})}),i=[];for(const t of s)await this.s.delete(c,t.id),i.push(t.url);return i}h(t){return this.t+"|"+r(t)}}class u{constructor(t,e={}){this.o=!1,this.u=!1,this.l=e.maxEntries,this.p=e.maxAgeSeconds,this.t=t,this.m=new o(t)}async expireEntries(){if(this.o)return void(this.u=!0);this.o=!0;const t=this.p?Date.now()-1e3*this.p:void 0,e=await this.m.expireEntries(t,this.l),s=await caches.open(this.t);for(const t of e)await s.delete(t);this.o=!1,this.u&&(this.u=!1,this.expireEntries())}async updateTimestamp(t){await this.m.setTimestamp(t,Date.now())}async isURLExpired(t){return await this.m.getTimestamp(t)this.deleteCacheAndMetadata())}k(t){if(t===a.cacheNames.getRuntimeName())throw new i.WorkboxError("expire-custom-caches-only");let e=this.g.get(t);return e||(e=new u(t,this.D),this.g.set(t,e)),e}cachedResponseWillBeUsed({event:t,request:e,cacheName:s,cachedResponse:i}){if(!i)return null;let a=this.N(i);const n=this.k(s);n.expireEntries();const h=n.updateTimestamp(e.url);if(t)try{t.waitUntil(h)}catch(t){}return a?i:null}N(t){if(!this.p)return!0;const e=this._(t);return null===e||e>=Date.now()-1e3*this.p}_(t){if(!t.headers.has("date"))return null;const e=t.headers.get("date"),s=new Date(e).getTime();return isNaN(s)?null:s}async cacheDidUpdate({cacheName:t,request:e}){const s=this.k(t);await s.updateTimestamp(e.url),await s.expireEntries()}async deleteCacheAndMetadata(){for(const[t,e]of this.g)await caches.delete(t),await e.delete();this.g=new Map}},t}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private,workbox.core);
2 | //# sourceMappingURL=workbox-expiration.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-navigation-preload.dev.js:
--------------------------------------------------------------------------------
1 | this.workbox = this.workbox || {};
2 | this.workbox.navigationPreload = (function (exports, logger_mjs) {
3 | 'use strict';
4 |
5 | try {
6 | self['workbox:navigation-preload:4.3.1'] && _();
7 | } catch (e) {} // eslint-disable-line
8 |
9 | /*
10 | Copyright 2018 Google LLC
11 |
12 | Use of this source code is governed by an MIT-style
13 | license that can be found in the LICENSE file or at
14 | https://opensource.org/licenses/MIT.
15 | */
16 | /**
17 | * @return {boolean} Whether or not the current browser supports enabling
18 | * navigation preload.
19 | *
20 | * @memberof workbox.navigationPreload
21 | */
22 |
23 | function isSupported() {
24 | return Boolean(self.registration && self.registration.navigationPreload);
25 | }
26 |
27 | /*
28 | Copyright 2018 Google LLC
29 |
30 | Use of this source code is governed by an MIT-style
31 | license that can be found in the LICENSE file or at
32 | https://opensource.org/licenses/MIT.
33 | */
34 | /**
35 | * If the browser supports Navigation Preload, then this will disable it.
36 | *
37 | * @memberof workbox.navigationPreload
38 | */
39 |
40 | function disable() {
41 | if (isSupported()) {
42 | self.addEventListener('activate', event => {
43 | event.waitUntil(self.registration.navigationPreload.disable().then(() => {
44 | {
45 | logger_mjs.logger.log(`Navigation preload is disabled.`);
46 | }
47 | }));
48 | });
49 | } else {
50 | {
51 | logger_mjs.logger.log(`Navigation preload is not supported in this browser.`);
52 | }
53 | }
54 | }
55 |
56 | /*
57 | Copyright 2018 Google LLC
58 |
59 | Use of this source code is governed by an MIT-style
60 | license that can be found in the LICENSE file or at
61 | https://opensource.org/licenses/MIT.
62 | */
63 | /**
64 | * If the browser supports Navigation Preload, then this will enable it.
65 | *
66 | * @param {string} [headerValue] Optionally, allows developers to
67 | * [override](https://developers.google.com/web/updates/2017/02/navigation-preload#changing_the_header)
68 | * the value of the `Service-Worker-Navigation-Preload` header which will be
69 | * sent to the server when making the navigation request.
70 | *
71 | * @memberof workbox.navigationPreload
72 | */
73 |
74 | function enable(headerValue) {
75 | if (isSupported()) {
76 | self.addEventListener('activate', event => {
77 | event.waitUntil(self.registration.navigationPreload.enable().then(() => {
78 | // Defaults to Service-Worker-Navigation-Preload: true if not set.
79 | if (headerValue) {
80 | self.registration.navigationPreload.setHeaderValue(headerValue);
81 | }
82 |
83 | {
84 | logger_mjs.logger.log(`Navigation preload is enabled.`);
85 | }
86 | }));
87 | });
88 | } else {
89 | {
90 | logger_mjs.logger.log(`Navigation preload is not supported in this browser.`);
91 | }
92 | }
93 | }
94 |
95 | /*
96 | Copyright 2018 Google LLC
97 |
98 | Use of this source code is governed by an MIT-style
99 | license that can be found in the LICENSE file or at
100 | https://opensource.org/licenses/MIT.
101 | */
102 |
103 | exports.disable = disable;
104 | exports.enable = enable;
105 | exports.isSupported = isSupported;
106 |
107 | return exports;
108 |
109 | }({}, workbox.core._private));
110 | //# sourceMappingURL=workbox-navigation-preload.dev.js.map
111 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-navigation-preload.dev.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"workbox-navigation-preload.dev.js","sources":["../_version.mjs","../isSupported.mjs","../disable.mjs","../enable.mjs","../index.mjs"],"sourcesContent":["try{self['workbox:navigation-preload:4.3.1']&&_()}catch(e){}// eslint-disable-line","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport './_version.mjs';\n\n/**\n * @return {boolean} Whether or not the current browser supports enabling\n * navigation preload.\n *\n * @memberof workbox.navigationPreload\n */\nfunction isSupported() {\n return Boolean(self.registration && self.registration.navigationPreload);\n}\n\nexport {isSupported};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {logger} from 'workbox-core/_private/logger.mjs';\n\nimport {isSupported} from './isSupported.mjs';\n\nimport './_version.mjs';\n\n/**\n * If the browser supports Navigation Preload, then this will disable it.\n *\n * @memberof workbox.navigationPreload\n */\nfunction disable() {\n if (isSupported()) {\n self.addEventListener('activate', (event) => {\n event.waitUntil(\n self.registration.navigationPreload.disable().then(() => {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is disabled.`);\n }\n })\n );\n });\n } else {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is not supported in this browser.`);\n }\n }\n}\n\nexport {disable};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {logger} from 'workbox-core/_private/logger.mjs';\n\nimport {isSupported} from './isSupported.mjs';\n\nimport './_version.mjs';\n\n/**\n * If the browser supports Navigation Preload, then this will enable it.\n *\n * @param {string} [headerValue] Optionally, allows developers to\n * [override](https://developers.google.com/web/updates/2017/02/navigation-preload#changing_the_header)\n * the value of the `Service-Worker-Navigation-Preload` header which will be\n * sent to the server when making the navigation request.\n *\n * @memberof workbox.navigationPreload\n */\nfunction enable(headerValue) {\n if (isSupported()) {\n self.addEventListener('activate', (event) => {\n event.waitUntil(\n self.registration.navigationPreload.enable().then(() => {\n // Defaults to Service-Worker-Navigation-Preload: true if not set.\n if (headerValue) {\n self.registration.navigationPreload.setHeaderValue(headerValue);\n }\n\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is enabled.`);\n }\n })\n );\n });\n } else {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is not supported in this browser.`);\n }\n }\n}\n\nexport {enable};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {disable} from './disable.mjs';\nimport {enable} from './enable.mjs';\nimport {isSupported} from './isSupported.mjs';\nimport './_version.mjs';\n\n\n/**\n * @namespace workbox.navigationPreload\n */\n\nexport {\n disable,\n enable,\n isSupported,\n};\n"],"names":["self","_","e","isSupported","Boolean","registration","navigationPreload","disable","addEventListener","event","waitUntil","then","logger","log","enable","headerValue","setHeaderValue"],"mappings":";;;;EAAA,IAAG;EAACA,EAAAA,IAAI,CAAC,kCAAD,CAAJ,IAA0CC,CAAC,EAA3C;EAA8C,CAAlD,CAAkD,OAAMC,CAAN,EAAQ;;ECA1D;;;;;;;AAQA,EAEA;;;;;;;EAMA,SAASC,WAAT,GAAuB;EACrB,SAAOC,OAAO,CAACJ,IAAI,CAACK,YAAL,IAAqBL,IAAI,CAACK,YAAL,CAAkBC,iBAAxC,CAAd;EACD;;EClBD;;;;;;;AAQA,EAMA;;;;;;EAKA,SAASC,OAAT,GAAmB;EACjB,MAAIJ,WAAW,EAAf,EAAmB;EACjBH,IAAAA,IAAI,CAACQ,gBAAL,CAAsB,UAAtB,EAAmCC,KAAD,IAAW;EAC3CA,MAAAA,KAAK,CAACC,SAAN,CACIV,IAAI,CAACK,YAAL,CAAkBC,iBAAlB,CAAoCC,OAApC,GAA8CI,IAA9C,CAAmD,MAAM;EACvD,QAA2C;EACzCC,UAAAA,iBAAM,CAACC,GAAP,CAAY,iCAAZ;EACD;EACF,OAJD,CADJ;EAOD,KARD;EASD,GAVD,MAUO;EACL,IAA2C;EACzCD,MAAAA,iBAAM,CAACC,GAAP,CAAY,sDAAZ;EACD;EACF;EACF;;ECnCD;;;;;;;AAQA,EAMA;;;;;;;;;;;EAUA,SAASC,MAAT,CAAgBC,WAAhB,EAA6B;EAC3B,MAAIZ,WAAW,EAAf,EAAmB;EACjBH,IAAAA,IAAI,CAACQ,gBAAL,CAAsB,UAAtB,EAAmCC,KAAD,IAAW;EAC3CA,MAAAA,KAAK,CAACC,SAAN,CACIV,IAAI,CAACK,YAAL,CAAkBC,iBAAlB,CAAoCQ,MAApC,GAA6CH,IAA7C,CAAkD,MAAM;EACxD;EACE,YAAII,WAAJ,EAAiB;EACff,UAAAA,IAAI,CAACK,YAAL,CAAkBC,iBAAlB,CAAoCU,cAApC,CAAmDD,WAAnD;EACD;;EAED,QAA2C;EACzCH,UAAAA,iBAAM,CAACC,GAAP,CAAY,gCAAZ;EACD;EACF,OATD,CADJ;EAYD,KAbD;EAcD,GAfD,MAeO;EACL,IAA2C;EACzCD,MAAAA,iBAAM,CAACC,GAAP,CAAY,sDAAZ;EACD;EACF;EACF;;EC7CD;;;;;;;;;;;;;;;;;;"}
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-navigation-preload.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.navigationPreload=function(t){"use strict";try{self["workbox:navigation-preload:4.3.1"]&&_()}catch(t){}function e(){return Boolean(self.registration&&self.registration.navigationPreload)}return t.disable=function(){e()&&self.addEventListener("activate",t=>{t.waitUntil(self.registration.navigationPreload.disable().then(()=>{}))})},t.enable=function(t){e()&&self.addEventListener("activate",e=>{e.waitUntil(self.registration.navigationPreload.enable().then(()=>{t&&self.registration.navigationPreload.setHeaderValue(t)}))})},t.isSupported=e,t}({});
2 | //# sourceMappingURL=workbox-navigation-preload.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-navigation-preload.prod.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"workbox-navigation-preload.prod.js","sources":["../_version.mjs","../isSupported.mjs","../disable.mjs","../enable.mjs"],"sourcesContent":["try{self['workbox:navigation-preload:4.3.1']&&_()}catch(e){}// eslint-disable-line","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport './_version.mjs';\n\n/**\n * @return {boolean} Whether or not the current browser supports enabling\n * navigation preload.\n *\n * @memberof workbox.navigationPreload\n */\nfunction isSupported() {\n return Boolean(self.registration && self.registration.navigationPreload);\n}\n\nexport {isSupported};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {logger} from 'workbox-core/_private/logger.mjs';\n\nimport {isSupported} from './isSupported.mjs';\n\nimport './_version.mjs';\n\n/**\n * If the browser supports Navigation Preload, then this will disable it.\n *\n * @memberof workbox.navigationPreload\n */\nfunction disable() {\n if (isSupported()) {\n self.addEventListener('activate', (event) => {\n event.waitUntil(\n self.registration.navigationPreload.disable().then(() => {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is disabled.`);\n }\n })\n );\n });\n } else {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is not supported in this browser.`);\n }\n }\n}\n\nexport {disable};\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {logger} from 'workbox-core/_private/logger.mjs';\n\nimport {isSupported} from './isSupported.mjs';\n\nimport './_version.mjs';\n\n/**\n * If the browser supports Navigation Preload, then this will enable it.\n *\n * @param {string} [headerValue] Optionally, allows developers to\n * [override](https://developers.google.com/web/updates/2017/02/navigation-preload#changing_the_header)\n * the value of the `Service-Worker-Navigation-Preload` header which will be\n * sent to the server when making the navigation request.\n *\n * @memberof workbox.navigationPreload\n */\nfunction enable(headerValue) {\n if (isSupported()) {\n self.addEventListener('activate', (event) => {\n event.waitUntil(\n self.registration.navigationPreload.enable().then(() => {\n // Defaults to Service-Worker-Navigation-Preload: true if not set.\n if (headerValue) {\n self.registration.navigationPreload.setHeaderValue(headerValue);\n }\n\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is enabled.`);\n }\n })\n );\n });\n } else {\n if (process.env.NODE_ENV !== 'production') {\n logger.log(`Navigation preload is not supported in this browser.`);\n }\n }\n}\n\nexport {enable};\n"],"names":["self","_","e","isSupported","Boolean","registration","navigationPreload","addEventListener","event","waitUntil","disable","then","headerValue","enable","setHeaderValue"],"mappings":"sFAAA,IAAIA,KAAK,qCAAqCC,IAAI,MAAMC,ICgBxD,SAASC,WACAC,QAAQJ,KAAKK,cAAgBL,KAAKK,aAAaC,oCCExD,WACMH,KACFH,KAAKO,iBAAiB,WAAaC,IACjCA,EAAMC,UACFT,KAAKK,aAAaC,kBAAkBI,UAAUC,KAAK,qBCC7D,SAAgBC,GACVT,KACFH,KAAKO,iBAAiB,WAAaC,IACjCA,EAAMC,UACFT,KAAKK,aAAaC,kBAAkBO,SAASF,KAAK,KAE5CC,GACFZ,KAAKK,aAAaC,kBAAkBQ,eAAeF"}
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-offline-ga.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.googleAnalytics=function(e,t,o,n,a,c,w){"use strict";try{self["workbox:google-analytics:4.3.1"]&&_()}catch(e){}const r=/^\/(\w+\/)?collect/,s=e=>async({queue:t})=>{let o;for(;o=await t.shiftRequest();){const{request:n,timestamp:a}=o,c=new URL(n.url);try{const w="POST"===n.method?new URLSearchParams(await n.clone().text()):c.searchParams,r=a-(Number(w.get("qt"))||0),s=Date.now()-r;if(w.set("qt",s),e.parameterOverrides)for(const t of Object.keys(e.parameterOverrides)){const o=e.parameterOverrides[t];w.set(t,o)}"function"==typeof e.hitFilter&&e.hitFilter.call(null,w),await fetch(new Request(c.origin+c.pathname,{body:w.toString(),method:"POST",mode:"cors",credentials:"omit",headers:{"Content-Type":"text/plain"}}))}catch(e){throw await t.unshiftRequest(o),e}}},i=e=>{const t=({url:e})=>"www.google-analytics.com"===e.hostname&&r.test(e.pathname),o=new w.NetworkOnly({plugins:[e]});return[new n.Route(t,o,"GET"),new n.Route(t,o,"POST")]},l=e=>{const t=new c.NetworkFirst({cacheName:e});return new n.Route(({url:e})=>"www.google-analytics.com"===e.hostname&&"/analytics.js"===e.pathname,t,"GET")},m=e=>{const t=new c.NetworkFirst({cacheName:e});return new n.Route(({url:e})=>"www.googletagmanager.com"===e.hostname&&"/gtag/js"===e.pathname,t,"GET")},u=e=>{const t=new c.NetworkFirst({cacheName:e});return new n.Route(({url:e})=>"www.googletagmanager.com"===e.hostname&&"/gtm.js"===e.pathname,t,"GET")};return e.initialize=((e={})=>{const n=o.cacheNames.getGoogleAnalyticsName(e.cacheName),c=new t.Plugin("workbox-google-analytics",{maxRetentionTime:2880,onSync:s(e)}),w=[u(n),l(n),m(n),...i(c)],r=new a.Router;for(const e of w)r.registerRoute(e);r.addFetchListener()}),e}({},workbox.backgroundSync,workbox.core._private,workbox.routing,workbox.routing,workbox.strategies,workbox.strategies);
2 | //# sourceMappingURL=workbox-offline-ga.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-precaching.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.precaching=function(t,e,n,s,c){"use strict";try{self["workbox:precaching:4.3.1"]&&_()}catch(t){}const o=[],i={get:()=>o,add(t){o.push(...t)}};const a="__WB_REVISION__";function r(t){if(!t)throw new c.WorkboxError("add-to-cache-list-unexpected-type",{entry:t});if("string"==typeof t){const e=new URL(t,location);return{cacheKey:e.href,url:e.href}}const{revision:e,url:n}=t;if(!n)throw new c.WorkboxError("add-to-cache-list-unexpected-type",{entry:t});if(!e){const t=new URL(n,location);return{cacheKey:t.href,url:t.href}}const s=new URL(n,location),o=new URL(n,location);return o.searchParams.set(a,e),{cacheKey:o.href,url:s.href}}class l{constructor(t){this.t=e.cacheNames.getPrecacheName(t),this.s=new Map}addToCacheList(t){for(const e of t){const{cacheKey:t,url:n}=r(e);if(this.s.has(n)&&this.s.get(n)!==t)throw new c.WorkboxError("add-to-cache-list-conflicting-entries",{firstEntry:this.s.get(n),secondEntry:t});this.s.set(n,t)}}async install({event:t,plugins:e}={}){const n=[],s=[],c=await caches.open(this.t),o=await c.keys(),i=new Set(o.map(t=>t.url));for(const t of this.s.values())i.has(t)?s.push(t):n.push(t);const a=n.map(n=>this.o({event:t,plugins:e,url:n}));return await Promise.all(a),{updatedURLs:n,notUpdatedURLs:s}}async activate(){const t=await caches.open(this.t),e=await t.keys(),n=new Set(this.s.values()),s=[];for(const c of e)n.has(c.url)||(await t.delete(c),s.push(c.url));return{deletedURLs:s}}async o({url:t,event:e,plugins:o}){const i=new Request(t,{credentials:"same-origin"});let a,r=await s.fetchWrapper.fetch({event:e,plugins:o,request:i});for(const t of o||[])"cacheWillUpdate"in t&&(a=t.cacheWillUpdate.bind(t));if(!(a?a({event:e,request:i,response:r}):r.status<400))throw new c.WorkboxError("bad-precaching-response",{url:t,status:r.status});r.redirected&&(r=await async function(t){const e=t.clone(),n="body"in e?Promise.resolve(e.body):e.blob(),s=await n;return new Response(s,{headers:e.headers,status:e.status,statusText:e.statusText})}(r)),await n.cacheWrapper.put({event:e,plugins:o,request:i,response:r,cacheName:this.t,matchOptions:{ignoreSearch:!0}})}getURLsToCacheKeys(){return this.s}getCachedURLs(){return[...this.s.keys()]}getCacheKeyForURL(t){const e=new URL(t,location);return this.s.get(e.href)}}let u;const h=()=>(u||(u=new l),u);const d=(t,e)=>{const n=h().getURLsToCacheKeys();for(const s of function*(t,{ignoreURLParametersMatching:e,directoryIndex:n,cleanURLs:s,urlManipulation:c}={}){const o=new URL(t,location);o.hash="",yield o.href;const i=function(t,e){for(const n of[...t.searchParams.keys()])e.some(t=>t.test(n))&&t.searchParams.delete(n);return t}(o,e);if(yield i.href,n&&i.pathname.endsWith("/")){const t=new URL(i);t.pathname+=n,yield t.href}if(s){const t=new URL(i);t.pathname+=".html",yield t.href}if(c){const t=c({url:o});for(const e of t)yield e.href}}(t,e)){const t=n.get(s);if(t)return t}};let w=!1;const f=t=>{w||((({ignoreURLParametersMatching:t=[/^utm_/],directoryIndex:n="index.html",cleanURLs:s=!0,urlManipulation:c=null}={})=>{const o=e.cacheNames.getPrecacheName();addEventListener("fetch",e=>{const i=d(e.request.url,{cleanURLs:s,directoryIndex:n,ignoreURLParametersMatching:t,urlManipulation:c});if(!i)return;let a=caches.open(o).then(t=>t.match(i)).then(t=>t||fetch(i));e.respondWith(a)})})(t),w=!0)},y=t=>{const e=h(),n=i.get();t.waitUntil(e.install({event:t,plugins:n}).catch(t=>{throw t}))},p=t=>{const e=h(),n=i.get();t.waitUntil(e.activate({event:t,plugins:n}))},L=t=>{h().addToCacheList(t),t.length>0&&(addEventListener("install",y),addEventListener("activate",p))};return t.addPlugins=(t=>{i.add(t)}),t.addRoute=f,t.cleanupOutdatedCaches=(()=>{addEventListener("activate",t=>{const n=e.cacheNames.getPrecacheName();t.waitUntil((async(t,e="-precache-")=>{const n=(await caches.keys()).filter(n=>n.includes(e)&&n.includes(self.registration.scope)&&n!==t);return await Promise.all(n.map(t=>caches.delete(t))),n})(n).then(t=>{}))})}),t.getCacheKeyForURL=(t=>{return h().getCacheKeyForURL(t)}),t.precache=L,t.precacheAndRoute=((t,e)=>{L(t),f(e)}),t.PrecacheController=l,t}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private);
2 | //# sourceMappingURL=workbox-precaching.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-range-requests.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.rangeRequests=function(e,n){"use strict";try{self["workbox:range-requests:4.3.1"]&&_()}catch(e){}async function t(e,t){try{if(206===t.status)return t;const s=e.headers.get("range");if(!s)throw new n.WorkboxError("no-range-header");const a=function(e){const t=e.trim().toLowerCase();if(!t.startsWith("bytes="))throw new n.WorkboxError("unit-must-be-bytes",{normalizedRangeHeader:t});if(t.includes(","))throw new n.WorkboxError("single-range-only",{normalizedRangeHeader:t});const s=/(\d*)-(\d*)/.exec(t);if(null===s||!s[1]&&!s[2])throw new n.WorkboxError("invalid-range-values",{normalizedRangeHeader:t});return{start:""===s[1]?null:Number(s[1]),end:""===s[2]?null:Number(s[2])}}(s),r=await t.blob(),i=function(e,t,s){const a=e.size;if(s>a||t<0)throw new n.WorkboxError("range-not-satisfiable",{size:a,end:s,start:t});let r,i;return null===t?(r=a-s,i=a):null===s?(r=t,i=a):(r=t,i=s+1),{start:r,end:i}}(r,a.start,a.end),o=r.slice(i.start,i.end),u=o.size,l=new Response(o,{status:206,statusText:"Partial Content",headers:t.headers});return l.headers.set("Content-Length",u),l.headers.set("Content-Range",`bytes ${i.start}-${i.end-1}/`+r.size),l}catch(e){return new Response("",{status:416,statusText:"Range Not Satisfiable"})}}return e.createPartialResponse=t,e.Plugin=class{async cachedResponseWillBeUsed({request:e,cachedResponse:n}){return n&&e.headers.has("range")?await t(e,n):n}},e}({},workbox.core._private);
2 | //# sourceMappingURL=workbox-range-requests.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-routing.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.routing=function(t,e,r){"use strict";try{self["workbox:routing:4.3.1"]&&_()}catch(t){}const s="GET",n=t=>t&&"object"==typeof t?t:{handle:t};class o{constructor(t,e,r){this.handler=n(e),this.match=t,this.method=r||s}}class i extends o{constructor(t,{whitelist:e=[/./],blacklist:r=[]}={}){super(t=>this.t(t),t),this.s=e,this.o=r}t({url:t,request:e}){if("navigate"!==e.mode)return!1;const r=t.pathname+t.search;for(const t of this.o)if(t.test(r))return!1;return!!this.s.some(t=>t.test(r))}}class u extends o{constructor(t,e,r){super(({url:e})=>{const r=t.exec(e.href);return r?e.origin!==location.origin&&0!==r.index?null:r.slice(1):null},e,r)}}class c{constructor(){this.i=new Map}get routes(){return this.i}addFetchListener(){self.addEventListener("fetch",t=>{const{request:e}=t,r=this.handleRequest({request:e,event:t});r&&t.respondWith(r)})}addCacheListener(){self.addEventListener("message",async t=>{if(t.data&&"CACHE_URLS"===t.data.type){const{payload:e}=t.data,r=Promise.all(e.urlsToCache.map(t=>{"string"==typeof t&&(t=[t]);const e=new Request(...t);return this.handleRequest({request:e})}));t.waitUntil(r),t.ports&&t.ports[0]&&(await r,t.ports[0].postMessage(!0))}})}handleRequest({request:t,event:e}){const r=new URL(t.url,location);if(!r.protocol.startsWith("http"))return;let s,{params:n,route:o}=this.findMatchingRoute({url:r,request:t,event:e}),i=o&&o.handler;if(!i&&this.u&&(i=this.u),i){try{s=i.handle({url:r,request:t,event:e,params:n})}catch(t){s=Promise.reject(t)}return s&&this.h&&(s=s.catch(t=>this.h.handle({url:r,event:e,err:t}))),s}}findMatchingRoute({url:t,request:e,event:r}){const s=this.i.get(e.method)||[];for(const n of s){let s,o=n.match({url:t,request:e,event:r});if(o)return Array.isArray(o)&&o.length>0?s=o:o.constructor===Object&&Object.keys(o).length>0&&(s=o),{route:n,params:s}}return{}}setDefaultHandler(t){this.u=n(t)}setCatchHandler(t){this.h=n(t)}registerRoute(t){this.i.has(t.method)||this.i.set(t.method,[]),this.i.get(t.method).push(t)}unregisterRoute(t){if(!this.i.has(t.method))throw new r.WorkboxError("unregister-route-but-not-found-with-method",{method:t.method});const e=this.i.get(t.method).indexOf(t);if(!(e>-1))throw new r.WorkboxError("unregister-route-route-not-registered");this.i.get(t.method).splice(e,1)}}let a;const h=()=>(a||((a=new c).addFetchListener(),a.addCacheListener()),a);return t.NavigationRoute=i,t.RegExpRoute=u,t.registerNavigationRoute=((t,r={})=>{const s=e.cacheNames.getPrecacheName(r.cacheName),n=new i(async()=>{try{const e=await caches.match(t,{cacheName:s});if(e)return e;throw new Error(`The cache ${s} did not have an entry for `+`${t}.`)}catch(e){return fetch(t)}},{whitelist:r.whitelist,blacklist:r.blacklist});return h().registerRoute(n),n}),t.registerRoute=((t,e,s="GET")=>{let n;if("string"==typeof t){const r=new URL(t,location);n=new o(({url:t})=>t.href===r.href,e,s)}else if(t instanceof RegExp)n=new u(t,e,s);else if("function"==typeof t)n=new o(t,e,s);else{if(!(t instanceof o))throw new r.WorkboxError("unsupported-route-type",{moduleName:"workbox-routing",funcName:"registerRoute",paramName:"capture"});n=t}return h().registerRoute(n),n}),t.Route=o,t.Router=c,t.setCatchHandler=(t=>{h().setCatchHandler(t)}),t.setDefaultHandler=(t=>{h().setDefaultHandler(t)}),t}({},workbox.core._private,workbox.core._private);
2 | //# sourceMappingURL=workbox-routing.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-strategies.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.strategies=function(e,t,s,n,r){"use strict";try{self["workbox:strategies:4.3.1"]&&_()}catch(e){}class i{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));let n,i=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(!i)try{i=await this.u(t,e)}catch(e){n=e}if(!i)throw new r.WorkboxError("no-response",{url:t.url,error:n});return i}async u(e,t){const r=await n.fetchWrapper.fetch({request:e,event:t,fetchOptions:this.i,plugins:this.s}),i=r.clone(),h=s.cacheWrapper.put({cacheName:this.t,request:e,response:i,event:t,plugins:this.s});if(t)try{t.waitUntil(h)}catch(e){}return r}}class h{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));const n=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(!n)throw new r.WorkboxError("no-response",{url:t.url});return n}}const u={cacheWillUpdate:({response:e})=>200===e.status||0===e.status?e:null};class a{constructor(e={}){if(this.t=t.cacheNames.getRuntimeName(e.cacheName),e.plugins){let t=e.plugins.some(e=>!!e.cacheWillUpdate);this.s=t?e.plugins:[u,...e.plugins]}else this.s=[u];this.o=e.networkTimeoutSeconds,this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){const s=[];"string"==typeof t&&(t=new Request(t));const n=[];let i;if(this.o){const{id:r,promise:h}=this.l({request:t,event:e,logs:s});i=r,n.push(h)}const h=this.q({timeoutId:i,request:t,event:e,logs:s});n.push(h);let u=await Promise.race(n);if(u||(u=await h),!u)throw new r.WorkboxError("no-response",{url:t.url});return u}l({request:e,logs:t,event:s}){let n;return{promise:new Promise(t=>{n=setTimeout(async()=>{t(await this.p({request:e,event:s}))},1e3*this.o)}),id:n}}async q({timeoutId:e,request:t,logs:r,event:i}){let h,u;try{u=await n.fetchWrapper.fetch({request:t,event:i,fetchOptions:this.i,plugins:this.s})}catch(e){h=e}if(e&&clearTimeout(e),h||!u)u=await this.p({request:t,event:i});else{const e=u.clone(),n=s.cacheWrapper.put({cacheName:this.t,request:t,response:e,event:i,plugins:this.s});if(i)try{i.waitUntil(n)}catch(e){}}return u}p({event:e,request:t}){return s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s})}}class c{constructor(e={}){this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],this.i=e.fetchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){let s,i;"string"==typeof t&&(t=new Request(t));try{i=await n.fetchWrapper.fetch({request:t,event:e,fetchOptions:this.i,plugins:this.s})}catch(e){s=e}if(!i)throw new r.WorkboxError("no-response",{url:t.url,error:s});return i}}class o{constructor(e={}){if(this.t=t.cacheNames.getRuntimeName(e.cacheName),this.s=e.plugins||[],e.plugins){let t=e.plugins.some(e=>!!e.cacheWillUpdate);this.s=t?e.plugins:[u,...e.plugins]}else this.s=[u];this.i=e.fetchOptions||null,this.h=e.matchOptions||null}async handle({event:e,request:t}){return this.makeRequest({event:e,request:t||e.request})}async makeRequest({event:e,request:t}){"string"==typeof t&&(t=new Request(t));const n=this.u({request:t,event:e});let i,h=await s.cacheWrapper.match({cacheName:this.t,request:t,event:e,matchOptions:this.h,plugins:this.s});if(h){if(e)try{e.waitUntil(n)}catch(i){}}else try{h=await n}catch(e){i=e}if(!h)throw new r.WorkboxError("no-response",{url:t.url,error:i});return h}async u({request:e,event:t}){const r=await n.fetchWrapper.fetch({request:e,event:t,fetchOptions:this.i,plugins:this.s}),i=s.cacheWrapper.put({cacheName:this.t,request:e,response:r.clone(),event:t,plugins:this.s});if(t)try{t.waitUntil(i)}catch(e){}return r}}const l={cacheFirst:i,cacheOnly:h,networkFirst:a,networkOnly:c,staleWhileRevalidate:o},q=e=>{const t=l[e];return e=>new t(e)},w=q("cacheFirst"),p=q("cacheOnly"),v=q("networkFirst"),y=q("networkOnly"),m=q("staleWhileRevalidate");return e.CacheFirst=i,e.CacheOnly=h,e.NetworkFirst=a,e.NetworkOnly=c,e.StaleWhileRevalidate=o,e.cacheFirst=w,e.cacheOnly=p,e.networkFirst=v,e.networkOnly=y,e.staleWhileRevalidate=m,e}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private);
2 | //# sourceMappingURL=workbox-strategies.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-streams.prod.js:
--------------------------------------------------------------------------------
1 | this.workbox=this.workbox||{},this.workbox.streams=function(e){"use strict";try{self["workbox:streams:4.3.1"]&&_()}catch(e){}function n(e){const n=e.map(e=>Promise.resolve(e).then(e=>(function(e){return e.body&&e.body.getReader?e.body.getReader():e.getReader?e.getReader():new Response(e).body.getReader()})(e)));let t,r;const s=new Promise((e,n)=>{t=e,r=n});let o=0;return{done:s,stream:new ReadableStream({pull(e){return n[o].then(e=>e.read()).then(r=>{if(r.done)return++o>=n.length?(e.close(),void t()):this.pull(e);e.enqueue(r.value)}).catch(e=>{throw r(e),e})},cancel(){t()}})}}function t(e={}){const n=new Headers(e);return n.has("content-type")||n.set("content-type","text/html"),n}function r(e,r){const{done:s,stream:o}=n(e),a=t(r);return{done:s,response:new Response(o,{headers:a})}}let s=void 0;function o(){if(void 0===s)try{new ReadableStream({start(){}}),s=!0}catch(e){s=!1}return s}return e.concatenate=n,e.concatenateToResponse=r,e.isSupported=o,e.strategy=function(e,n){return async({event:s,url:a,params:c})=>{if(o()){const{done:t,response:o}=r(e.map(e=>e({event:s,url:a,params:c})),n);return s.waitUntil(t),o}const i=await Promise.all(e.map(e=>e({event:s,url:a,params:c})).map(async e=>{const n=await e;return n instanceof Response?n.blob():n})),u=t(n);return new Response(new Blob(i),{headers:u})}},e}({});
2 | //# sourceMappingURL=workbox-streams.prod.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-sw.js:
--------------------------------------------------------------------------------
1 | !function(){"use strict";try{self["workbox:sw:4.3.1"]&&_()}catch(t){}const t="https://storage.googleapis.com/workbox-cdn/releases/4.3.1",e={backgroundSync:"background-sync",broadcastUpdate:"broadcast-update",cacheableResponse:"cacheable-response",core:"core",expiration:"expiration",googleAnalytics:"offline-ga",navigationPreload:"navigation-preload",precaching:"precaching",rangeRequests:"range-requests",routing:"routing",strategies:"strategies",streams:"streams"};self.workbox=new class{constructor(){return this.v={},this.t={debug:"localhost"===self.location.hostname,modulePathPrefix:null,modulePathCb:null},this.s=this.t.debug?"dev":"prod",this.o=!1,new Proxy(this,{get(t,s){if(t[s])return t[s];const o=e[s];return o&&t.loadModule(`workbox-${o}`),t[s]}})}setConfig(t={}){if(this.o)throw new Error("Config must be set before accessing workbox.* modules");Object.assign(this.t,t),this.s=this.t.debug?"dev":"prod"}loadModule(t){const e=this.i(t);try{importScripts(e),this.o=!0}catch(s){throw console.error(`Unable to import module '${t}' from '${e}'.`),s}}i(e){if(this.t.modulePathCb)return this.t.modulePathCb(e,this.t.debug);let s=[t];const o=`${e}.${this.s}.js`,r=this.t.modulePathPrefix;return r&&""===(s=r.split("/"))[s.length-1]&&s.splice(s.length-1,1),s.push(o),s.join("/")}}}();
2 | //# sourceMappingURL=workbox-sw.js.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-sw.js.map:
--------------------------------------------------------------------------------
1 | {"version":3,"file":"workbox-sw.js","sources":["../_version.mjs","../controllers/WorkboxSW.mjs","../index.mjs"],"sourcesContent":["try{self['workbox:sw:4.3.1']&&_()}catch(e){}// eslint-disable-line","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport '../_version.mjs';\n\nconst CDN_PATH = `WORKBOX_CDN_ROOT_URL`;\n\nconst MODULE_KEY_TO_NAME_MAPPING = {\n // TODO(philipwalton): add jsdoc tags to associate these with their module.\n // @name backgroundSync\n // @memberof workbox\n // @see module:workbox-background-sync\n backgroundSync: 'background-sync',\n broadcastUpdate: 'broadcast-update',\n cacheableResponse: 'cacheable-response',\n core: 'core',\n expiration: 'expiration',\n googleAnalytics: 'offline-ga',\n navigationPreload: 'navigation-preload',\n precaching: 'precaching',\n rangeRequests: 'range-requests',\n routing: 'routing',\n strategies: 'strategies',\n streams: 'streams',\n};\n\n/**\n * This class can be used to make it easy to use the various parts of\n * Workbox.\n *\n * @private\n */\nexport class WorkboxSW {\n /**\n * Creates a proxy that automatically loads workbox namespaces on demand.\n *\n * @private\n */\n constructor() {\n this.v = {};\n this._options = {\n debug: self.location.hostname === 'localhost',\n modulePathPrefix: null,\n modulePathCb: null,\n };\n\n this._env = this._options.debug ? 'dev' : 'prod';\n this._modulesLoaded = false;\n\n return new Proxy(this, {\n get(target, key) {\n if (target[key]) {\n return target[key];\n }\n\n const moduleName = MODULE_KEY_TO_NAME_MAPPING[key];\n if (moduleName) {\n target.loadModule(`workbox-${moduleName}`);\n }\n\n return target[key];\n },\n });\n }\n\n /**\n * Updates the configuration options. You can specify whether to treat as a\n * debug build and whether to use a CDN or a specific path when importing\n * other workbox-modules\n *\n * @param {Object} [options]\n * @param {boolean} [options.debug] If true, `dev` builds are using, otherwise\n * `prod` builds are used. By default, `prod` is used unless on localhost.\n * @param {Function} [options.modulePathPrefix] To avoid using the CDN with\n * `workbox-sw` set the path prefix of where modules should be loaded from.\n * For example `modulePathPrefix: '/third_party/workbox/v3.0.0/'`.\n * @param {workbox~ModulePathCallback} [options.modulePathCb] If defined,\n * this callback will be responsible for determining the path of each\n * workbox module.\n *\n * @alias workbox.setConfig\n */\n setConfig(options = {}) {\n if (!this._modulesLoaded) {\n Object.assign(this._options, options);\n this._env = this._options.debug ? 'dev' : 'prod';\n } else {\n throw new Error('Config must be set before accessing workbox.* modules');\n }\n }\n\n /**\n * Load a Workbox module by passing in the appropriate module name.\n *\n * This is not generally needed unless you know there are modules that are\n * dynamically used and you want to safe guard use of the module while the\n * user may be offline.\n *\n * @param {string} moduleName\n *\n * @alias workbox.loadModule\n */\n loadModule(moduleName) {\n const modulePath = this._getImportPath(moduleName);\n try {\n importScripts(modulePath);\n this._modulesLoaded = true;\n } catch (err) {\n // TODO Add context of this error if using the CDN vs the local file.\n\n // We can't rely on workbox-core being loaded so using console\n // eslint-disable-next-line\n console.error(\n `Unable to import module '${moduleName}' from '${modulePath}'.`);\n throw err;\n }\n }\n\n /**\n * This method will get the path / CDN URL to be used for importScript calls.\n *\n * @param {string} moduleName\n * @return {string} URL to the desired module.\n *\n * @private\n */\n _getImportPath(moduleName) {\n if (this._options.modulePathCb) {\n return this._options.modulePathCb(moduleName, this._options.debug);\n }\n\n // TODO: This needs to be dynamic some how.\n let pathParts = [CDN_PATH];\n\n const fileName = `${moduleName}.${this._env}.js`;\n\n const pathPrefix = this._options.modulePathPrefix;\n if (pathPrefix) {\n // Split to avoid issues with developers ending / not ending with slash\n pathParts = pathPrefix.split('/');\n\n // We don't need a slash at the end as we will be adding\n // a filename regardless\n if (pathParts[pathParts.length - 1] === '') {\n pathParts.splice(pathParts.length - 1, 1);\n }\n }\n\n pathParts.push(fileName);\n\n return pathParts.join('/');\n }\n}\n","/*\n Copyright 2018 Google LLC\n\n Use of this source code is governed by an MIT-style\n license that can be found in the LICENSE file or at\n https://opensource.org/licenses/MIT.\n*/\n\nimport {WorkboxSW} from './controllers/WorkboxSW.mjs';\nimport './_version.mjs';\n\n/**\n * @namespace workbox\n */\n\n// Don't export anything, just expose a global.\nself.workbox = new WorkboxSW();\n"],"names":["self","_","e","CDN_PATH","MODULE_KEY_TO_NAME_MAPPING","backgroundSync","broadcastUpdate","cacheableResponse","core","expiration","googleAnalytics","navigationPreload","precaching","rangeRequests","routing","strategies","streams","workbox","constructor","v","_options","debug","location","hostname","modulePathPrefix","modulePathCb","_env","this","_modulesLoaded","Proxy","get","target","key","moduleName","loadModule","setConfig","options","Error","Object","assign","modulePath","_getImportPath","importScripts","err","console","error","pathParts","fileName","pathPrefix","split","length","splice","push","join"],"mappings":"yBAAA,IAAIA,KAAK,qBAAqBC,IAAI,MAAMC,ICUxC,MAAMC,EAAY,4DAEZC,EAA6B,CAKjCC,eAAgB,kBAChBC,gBAAiB,mBACjBC,kBAAmB,qBACnBC,KAAM,OACNC,WAAY,aACZC,gBAAiB,aACjBC,kBAAmB,qBACnBC,WAAY,aACZC,cAAe,iBACfC,QAAS,UACTC,WAAY,aACZC,QAAS,WCZXhB,KAAKiB,QAAU,IDqBR,MAMLC,0BACOC,EAAI,QACJC,EAAW,CACdC,MAAkC,cAA3BrB,KAAKsB,SAASC,SACrBC,iBAAkB,KAClBC,aAAc,WAGXC,EAAOC,KAAKP,EAASC,MAAQ,MAAQ,YACrCO,GAAiB,EAEf,IAAIC,MAAMF,KAAM,CACrBG,IAAIC,EAAQC,MACND,EAAOC,UACFD,EAAOC,SAGVC,EAAa7B,EAA2B4B,UAC1CC,GACFF,EAAOG,sBAAsBD,KAGxBF,EAAOC,MAsBpBG,UAAUC,EAAU,OACbT,KAAKC,QAIF,IAAIS,MAAM,yDAHhBC,OAAOC,OAAOZ,KAAKP,EAAUgB,QACxBV,EAAOC,KAAKP,EAASC,MAAQ,MAAQ,OAiB9Ca,WAAWD,SACHO,EAAab,KAAKc,EAAeR,OAErCS,cAAcF,QACTZ,GAAiB,EACtB,MAAOe,SAKPC,QAAQC,kCACwBZ,YAAqBO,OAC/CG,GAYVF,EAAeR,MACTN,KAAKP,EAASK,oBACTE,KAAKP,EAASK,aAAaQ,EAAYN,KAAKP,EAASC,WAI1DyB,EAAY,CAAC3C,SAEX4C,KAAcd,KAAcN,KAAKD,OAEjCsB,EAAarB,KAAKP,EAASI,wBAC7BwB,GAMsC,MAJxCF,EAAYE,EAAWC,MAAM,MAIfH,EAAUI,OAAS,IAC/BJ,EAAUK,OAAOL,EAAUI,OAAS,EAAG,GAI3CJ,EAAUM,KAAKL,GAERD,EAAUO,KAAK"}
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-window.prod.es5.mjs:
--------------------------------------------------------------------------------
1 | try{self["workbox:window:4.3.1"]&&_()}catch(n){}var n=function(n,t){return new Promise(function(i){var e=new MessageChannel;e.port1.onmessage=function(n){return i(n.data)},n.postMessage(t,[e.port2])})};function t(n,t){for(var i=0;i0||!r(n.scriptURL,this.t)||performance.now()>this.L+6e4?(this.W=n,this.B.removeEventListener("updatefound",this.g)):(this.O=n,this.u.resolve(n)),++this.o,n.addEventListener("statechange",this.l)},d.l=function(n){var t=this,i=n.target,e=i.state,r=i===this.W,u=r?"external":"",a={sw:i,originalEvent:n};!r&&this.p&&(a.isUpdate=!0),this.dispatchEvent(new o(u+e,a)),"installed"===e?this._=setTimeout(function(){"installed"===e&&t.B.waiting===i&&t.dispatchEvent(new o(u+"waiting",a))},200):"activating"===e&&(clearTimeout(this._),r||this.s.resolve(i))},d.m=function(n){var t=this.O;t===navigator.serviceWorker.controller&&(this.dispatchEvent(new o("controlling",{sw:t,originalEvent:n})),this.h.resolve(t))},d.v=function(n){var t=n.data;this.dispatchEvent(new o("message",{data:t,originalEvent:n}))},l=v,(w=[{key:"active",get:function(){return this.s.promise}},{key:"controlling",get:function(){return this.h.promise}}])&&t(l.prototype,w),g&&t(l,g),v}(function(){function n(){this.D={}}var t=n.prototype;return t.addEventListener=function(n,t){this.T(n).add(t)},t.removeEventListener=function(n,t){this.T(n).delete(t)},t.dispatchEvent=function(n){n.target=this,this.T(n.type).forEach(function(t){return t(n)})},t.T=function(n){return this.D[n]=this.D[n]||new Set},n}());export{c as Workbox,n as messageSW};
2 | //# sourceMappingURL=workbox-window.prod.es5.mjs.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-window.prod.mjs:
--------------------------------------------------------------------------------
1 | try{self["workbox:window:4.3.1"]&&_()}catch(t){}const t=(t,s)=>new Promise(i=>{let e=new MessageChannel;e.port1.onmessage=(t=>i(t.data)),t.postMessage(s,[e.port2])});try{self["workbox:core:4.3.1"]&&_()}catch(t){}class s{constructor(){this.promise=new Promise((t,s)=>{this.resolve=t,this.reject=s})}}class i{constructor(){this.t={}}addEventListener(t,s){this.s(t).add(s)}removeEventListener(t,s){this.s(t).delete(s)}dispatchEvent(t){t.target=this,this.s(t.type).forEach(s=>s(t))}s(t){return this.t[t]=this.t[t]||new Set}}const e=(t,s)=>new URL(t,location).href===new URL(s,location).href;class n{constructor(t,s){Object.assign(this,s,{type:t})}}const h=200,a=6e4;class o extends i{constructor(t,i={}){super(),this.i=t,this.h=i,this.o=0,this.l=new s,this.g=new s,this.u=new s,this.m=this.m.bind(this),this.v=this.v.bind(this),this.p=this.p.bind(this),this._=this._.bind(this)}async register({immediate:t=!1}={}){t||"complete"===document.readyState||await new Promise(t=>addEventListener("load",t)),this.C=Boolean(navigator.serviceWorker.controller),this.W=this.L(),this.S=await this.B(),this.W&&(this.R=this.W,this.g.resolve(this.W),this.u.resolve(this.W),this.P(this.W),this.W.addEventListener("statechange",this.v,{once:!0}));const s=this.S.waiting;return s&&e(s.scriptURL,this.i)&&(this.R=s,Promise.resolve().then(()=>{this.dispatchEvent(new n("waiting",{sw:s,wasWaitingBeforeRegister:!0}))})),this.R&&this.l.resolve(this.R),this.S.addEventListener("updatefound",this.p),navigator.serviceWorker.addEventListener("controllerchange",this._,{once:!0}),"BroadcastChannel"in self&&(this.T=new BroadcastChannel("workbox"),this.T.addEventListener("message",this.m)),navigator.serviceWorker.addEventListener("message",this.m),this.S}get active(){return this.g.promise}get controlling(){return this.u.promise}async getSW(){return this.R||this.l.promise}async messageSW(s){const i=await this.getSW();return t(i,s)}L(){const t=navigator.serviceWorker.controller;if(t&&e(t.scriptURL,this.i))return t}async B(){try{const t=await navigator.serviceWorker.register(this.i,this.h);return this.U=performance.now(),t}catch(t){throw t}}P(s){t(s,{type:"WINDOW_READY",meta:"workbox-window"})}p(){const t=this.S.installing;this.o>0||!e(t.scriptURL,this.i)||performance.now()>this.U+a?(this.k=t,this.S.removeEventListener("updatefound",this.p)):(this.R=t,this.l.resolve(t)),++this.o,t.addEventListener("statechange",this.v)}v(t){const s=t.target,{state:i}=s,e=s===this.k,a=e?"external":"",o={sw:s,originalEvent:t};!e&&this.C&&(o.isUpdate=!0),this.dispatchEvent(new n(a+i,o)),"installed"===i?this.D=setTimeout(()=>{"installed"===i&&this.S.waiting===s&&this.dispatchEvent(new n(a+"waiting",o))},h):"activating"===i&&(clearTimeout(this.D),e||this.g.resolve(s))}_(t){const s=this.R;s===navigator.serviceWorker.controller&&(this.dispatchEvent(new n("controlling",{sw:s,originalEvent:t})),this.u.resolve(s))}m(t){const{data:s}=t;this.dispatchEvent(new n("message",{data:s,originalEvent:t}))}}export{o as Workbox,t as messageSW};
2 | //# sourceMappingURL=workbox-window.prod.mjs.map
3 |
--------------------------------------------------------------------------------
/dist/workbox-v4.3.1/workbox-window.prod.umd.js:
--------------------------------------------------------------------------------
1 | !function(n,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((n=n||self).workbox={})}(this,function(n){"use strict";try{self["workbox:window:4.3.1"]&&_()}catch(n){}var t=function(n,t){return new Promise(function(i){var e=new MessageChannel;e.port1.onmessage=function(n){return i(n.data)},n.postMessage(t,[e.port2])})};function i(n,t){for(var i=0;i0||!o(n.scriptURL,this.t)||performance.now()>this.C+6e4?(this.L=n,this.R.removeEventListener("updatefound",this.g)):(this._=n,this.u.resolve(n)),++this.o,n.addEventListener("statechange",this.l)},g.l=function(n){var t=this,i=n.target,e=i.state,r=i===this.L,o=r?"external":"",s={sw:i,originalEvent:n};!r&&this.p&&(s.isUpdate=!0),this.dispatchEvent(new u(o+e,s)),"installed"===e?this.W=setTimeout(function(){"installed"===e&&t.R.waiting===i&&t.dispatchEvent(new u(o+"waiting",s))},200):"activating"===e&&(clearTimeout(this.W),r||this.s.resolve(i))},g.m=function(n){var t=this._;t===navigator.serviceWorker.controller&&(this.dispatchEvent(new u("controlling",{sw:t,originalEvent:n})),this.h.resolve(t))},g.v=function(n){var t=n.data;this.dispatchEvent(new u("message",{data:t,originalEvent:n}))},l=v,(w=[{key:"active",get:function(){return this.s.promise}},{key:"controlling",get:function(){return this.h.promise}}])&&i(l.prototype,w),d&&i(l,d),v}(function(){function n(){this.D={}}var t=n.prototype;return t.addEventListener=function(n,t){this.M(n).add(t)},t.removeEventListener=function(n,t){this.M(n).delete(t)},t.dispatchEvent=function(n){n.target=this,this.M(n.type).forEach(function(t){return t(n)})},t.M=function(n){return this.D[n]=this.D[n]||new Set},n}());n.Workbox=f,n.messageSW=t,Object.defineProperty(n,"__esModule",{value:!0})});
2 | //# sourceMappingURL=workbox-window.prod.umd.js.map
3 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | kikoeru:
4 | ports:
5 | - '8888:8888'
6 | volumes:
7 | # Change the next line to your location of storage
8 | # e.g. /storage/DLsite/xxx:/usr/src/kikoeru/VoiceWork
9 | # Then fill in the location /usr/src/kikoeru/VoiceWork in the admin UI
10 | - ./VoiceWork:/usr/src/kikoeru/VoiceWork
11 | - ./sqlite:/usr/src/kikoeru/sqlite
12 | - ./covers:/usr/src/kikoeru/covers
13 | - ./config:/usr/src/kikoeru/config
14 | image: 'kirieharuna/kikoeru'
15 | restart: always
16 |
--------------------------------------------------------------------------------
/filesystem/scanner.js:
--------------------------------------------------------------------------------
1 | const { performScan } = require('./scannerModules')
2 | performScan();
3 |
--------------------------------------------------------------------------------
/filesystem/updater.js:
--------------------------------------------------------------------------------
1 | const yargs = require('yargs/yargs')
2 | const { hideBin } = require('yargs/helpers')
3 | const { performUpdate } = require('./scannerModules')
4 |
5 | const argv = yargs(hideBin(process.argv))
6 | .option('refreshAll', {
7 | alias: 'all',
8 | description: 'Refresh both dynamic and static metadata',
9 | type: 'boolean',
10 | })
11 | .option('includeNSFW', {
12 | alias: 'nsfw',
13 | description: 'Refresh dynamic metadata and nsfw field',
14 | type: 'boolean',
15 | })
16 | .option('includeTags', {
17 | alias: 'tags',
18 | description: 'Refresh dynamic metadata and tags',
19 | type: 'boolean',
20 | })
21 | .option('includeVA', {
22 | alias: 'vas',
23 | description: 'Refresh dynamic metadata and voice actors',
24 | type: 'boolean',
25 | })
26 | .argv;
27 |
28 | const updateOptions = {};
29 |
30 | if (argv.refreshAll) {
31 | updateOptions.refreshAll = true
32 | } else if (argv.includeNSFW) {
33 | updateOptions.includeNSFW = true
34 | } else if (argv.includeTags) {
35 | updateOptions.includeTags = true
36 | } else if (argv.includeVA) {
37 | updateOptions.includeVA = true
38 | }
39 |
40 | performUpdate(updateOptions)
41 | .then(() => {
42 | process.exit(0);
43 | })
44 | .catch(err => {
45 | throw err
46 | });
47 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "kikoeru-express",
3 | "version": "0.6.2",
4 | "description": "A self-hosted media server for listening to your DLsite voice works.",
5 | "homepage": "https://github.com/kikoeru-project/kikoeru-express",
6 | "repository": {
7 | "type": "git",
8 | "url": "git@github.com:kikoeru-project/kikoeru-express.git"
9 | },
10 | "keywords": [
11 | "kikoeru"
12 | ],
13 | "engines": {
14 | "node": ">=12.0.0"
15 | },
16 | "bin": {
17 | "kikoeru-express": "app.js"
18 | },
19 | "pkg": {
20 | "scripts": [
21 | "filesystem/scanner.js",
22 | "filesystem/updater.js",
23 | "dist/**/*.js",
24 | "database/migrations/*",
25 | "database/knexfile.js",
26 | "database/storage.js"
27 | ],
28 | "assets": [
29 | "dist/**/*",
30 | "static/**/*"
31 | ]
32 | },
33 | "nodemonConfig": {
34 | "ignore": [
35 | "config/*",
36 | "sqlite/*"
37 | ],
38 | "delay": "2500"
39 | },
40 | "scripts": {
41 | "start": "node --trace-warnings app.js",
42 | "dev": "nodemon --trace-warnings app.js",
43 | "scan": "node --trace-warnings ./filesystem/scanner.js",
44 | "build": "pkg package.json --out-path package",
45 | "test": "cross-env eslint . && cross-env NODE_ENV=test mocha test/"
46 | },
47 | "author": "Watanuki-Kimihiro",
48 | "contributors": [
49 | "umonaca "
50 | ],
51 | "license": "GPL-3.0-or-later",
52 | "dependencies": {
53 | "@umonaca/umzug": "^2.3.1",
54 | "axios": "^0.21.1",
55 | "bluebird": "^3.7.2",
56 | "body-parser": "^1.19.0",
57 | "cheerio": "^1.0.0-rc.6",
58 | "compare-versions": "^3.6.0",
59 | "compression": "^1.7.4",
60 | "connect-history-api-fallback": "^1.6.0",
61 | "crypto": "^1.0.1",
62 | "dotenv": "^8.2.0",
63 | "express": "^4.17.1",
64 | "express-jwt": "^6.0.0",
65 | "express-validator": "^6.10.1",
66 | "htmlparser2": "^4.1.0",
67 | "https": "^1.0.0",
68 | "invariant": "^2.2.4",
69 | "jschardet": "^3.0.0",
70 | "jsonwebtoken": "^8.5.1",
71 | "knex": "^0.21.17",
72 | "knex-migrate": "^1.7.4",
73 | "limit-promise": "^1.0.6",
74 | "lodash": "^4.17.21",
75 | "md5": "^2.3.0",
76 | "natural-orderby": "^2.0.3",
77 | "recursive-readdir": "^2.2.2",
78 | "redis": "^4.6.10",
79 | "socket.io": "^2.4.1",
80 | "socketio-jwt-auth": "^0.2.1",
81 | "sqlite3": "~5.0.2",
82 | "string-random": "^0.1.3",
83 | "tunnel-agent": "^0.6.0",
84 | "url-join": "^4.0.1",
85 | "uuid": "^8.3.2",
86 | "yargs": "^16.2.0"
87 | },
88 | "devDependencies": {
89 | "chai": "^4.3.4",
90 | "chai-string": "^1.5.0",
91 | "cross-env": "^7.0.3",
92 | "eslint": "^7.25.0",
93 | "eslint-plugin-node": "^11.1.0",
94 | "mocha": "^8.3.2",
95 | "serve-index": "^1.9.1"
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/routes/auth.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const { check, validationResult } = require('express-validator'); // 后端校验
3 | const expressJwt = require('express-jwt'); // 把 JWT 的 payload 部分赋值于 req.user
4 |
5 | const { signToken, md5 } = require('../auth/utils');
6 | const db = require('../database/db');
7 |
8 | const { config } = require('../config');
9 |
10 | const router = express.Router();
11 |
12 | // 用户登录
13 | router.post('/me', [
14 | check('name')
15 | .isLength({ min: 5 })
16 | .withMessage('用户名长度至少为 5'),
17 | check('password')
18 | .isLength({ min: 5 })
19 | .withMessage('密码长度至少为 5')
20 | // eslint-disable-next-line no-unused-vars
21 | ], (req, res, next) => {
22 | // Finds the validation errors in this request and wraps them in an object with handy functions
23 | const errors = validationResult(req);
24 | if (!errors.isEmpty()) {
25 | return res.status(422).send({ errors: errors.array() });
26 | }
27 |
28 | const name = req.body.name;
29 | const password = req.body.password;
30 |
31 | db.knex('t_user')
32 | .where('name', '=', name)
33 | .andWhere('password', '=', md5(password))
34 | .first()
35 | .then((user) => {
36 | if (!user) {
37 | res.set("WWW-Authenticate", "Bearer realm=\"Authorization Required\"");
38 | res.status(401).send({error: '用户名或密码错误.'});
39 | } else {
40 | const token = signToken(user);
41 | res.send({ token });
42 | }
43 | })
44 | .catch((err) => {
45 | console.error(err);
46 | res.status(500).send({error: '服务器错误'});
47 | // next(err);
48 | });
49 | });
50 |
51 | if (config.auth) {
52 | router.get('/me', expressJwt({ secret: config.jwtsecret, algorithms: ['HS256'] }));
53 | }
54 |
55 | // 获取用户信息
56 | // eslint-disable-next-line no-unused-vars
57 | router.get('/me', (req, res, next) => {
58 | // 同时告诉客户端,服务器是否启用用户验证
59 | const auth = config.auth;
60 | const user = config.auth
61 | ? { name: req.user.name, group: req.user.group }
62 | : { name: 'admin', group: 'administrator' }
63 | res.send({ user, auth });
64 | });
65 |
66 | module.exports = router;
67 |
--------------------------------------------------------------------------------
/routes/config.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash');
2 | const express = require('express');
3 | const router = express.Router();
4 | const { config, setConfig, sharedConfigHandle } = require('../config');
5 |
6 | const filterConfig = (_config, option = 'read') => {
7 | const currentConfig = config;
8 | const configClone = _.cloneDeep(_config);
9 | delete configClone.md5secret;
10 | delete configClone.jwtsecret;
11 | if (option === 'write') {
12 | delete configClone.production;
13 | if (process.env.NODE_ENV === 'production' || currentConfig.production) {
14 | delete configClone.auth;
15 | }
16 | }
17 | return configClone;
18 | }
19 |
20 | // 修改配置文件
21 | router.put('/admin', (req, res, next) => {
22 | if (!config.auth || req.user.name === 'admin') {
23 | try {
24 | // Note: setConfig uses Object.assign to merge new configs
25 | setConfig(filterConfig(req.body.config, 'write'));
26 | res.send({ message: '保存成功.' })
27 | } catch(err) {
28 | next(err);
29 | }
30 | } else {
31 | res.status(403).send({ error: '只有 admin 账号能修改配置文件.' });
32 | }
33 | });
34 |
35 | // 获取配置文件
36 | router.get('/admin', (req, res, next) => {
37 | if (!config.auth || req.user.name === 'admin') {
38 | try {
39 | res.send({ config: filterConfig(config, 'read') });
40 | } catch(err) {
41 | next(err);
42 | }
43 | } else {
44 | res.status(403).send({ error: '只有 admin 账号能读取管理配置文件.' });
45 | }
46 | });
47 |
48 | router.get('/shared', (req, res, next) => {
49 | try {
50 | res.send({ sharedConfig: sharedConfigHandle.export() });
51 | } catch(err) {
52 | next(err);
53 | }
54 | });
55 |
56 | module.exports = router;
--------------------------------------------------------------------------------
/routes/credentials.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const router = express.Router();
3 | const { check, validationResult } = require('express-validator'); // 后端校验
4 | const { md5 } = require('../auth/utils');
5 | const { config} = require('../config');
6 | const db = require('../database/db');
7 |
8 |
9 | // 创建一个新用户 (只有 admin 账号拥有权限)
10 | router.post('/user', [
11 | check('name')
12 | .isLength({ min: 5 })
13 | .withMessage('用户名长度至少为 5'),
14 | check('password')
15 | .isLength({ min: 5 })
16 | .withMessage('密码长度至少为 5'),
17 | check('group')
18 | .custom(value => {
19 | if (value !== 'user' && value !== 'guest') {
20 | throw new Error(`用户组名称必须为 ['user', 'guest'] 的一个.`)
21 | }
22 | return true
23 | })
24 | ], (req, res, next) => {
25 | // Finds the validation errors in this request and wraps them in an object with handy functions
26 | const errors = validationResult(req);
27 | if (!errors.isEmpty()) {
28 | return res.status(422).send({ errors: errors.array() });
29 | }
30 |
31 | const user = {
32 | name: req.body.name,
33 | password: req.body.password,
34 | group: req.body.group
35 | };
36 |
37 | if (!config.auth || req.user.name === 'admin') {
38 | db.createUser({
39 | name: user.name,
40 | password: md5(user.password),
41 | group: user.group
42 | })
43 | .then(() => res.send({ message: `用户 ${user.name} 创建成功.` }))
44 | .catch((err) => {
45 | if (err.message.indexOf('已存在') !== -1) {
46 | res.status(403).send({ error: err.message });
47 | } else {
48 | next(err);
49 | }
50 | });
51 | } else {
52 | res.status(403).send({ error: '只有 admin 账号能创建新用户.' });
53 | }
54 | });
55 |
56 | // 更新用户密码
57 | router.put('/user', [
58 | check('name')
59 | .isLength({ min: 5 })
60 | .withMessage('用户名长度至少为 5'),
61 | check('newPassword')
62 | .isLength({ min: 5 })
63 | .withMessage('密码长度至少为 5')
64 | ], (req, res, next) => {
65 | // Finds the validation errors in this request and wraps them in an object with handy functions
66 | const errors = validationResult(req);
67 | if (!errors.isEmpty()) {
68 | return res.status(422).json({errors: errors.array()});
69 | }
70 |
71 | const user = {
72 | name: req.body.name
73 | };
74 | const newPassword = md5(req.body.newPassword);
75 |
76 | if (!config.auth || req.user.name === 'admin' || req.user.name === user.name) {
77 | db.updateUserPassword(user, newPassword)
78 | .then(() => res.send({ message: '密码修改成功.' }))
79 | .catch((err) => {
80 | if (err.message.indexOf('用户名错误.') !== -1) {
81 | res.status(403).send({ error: '用户名错误.' });
82 | } else {
83 | next(err);
84 | }
85 | });
86 | } else {
87 | res.status(403).send({ error: '只能修改自己账号的密码.' });
88 | }
89 | });
90 |
91 | // 删除用户 (仅 admin 账号拥有权限)
92 | router.delete('/user', (req, res, next) => {
93 | const users = req.body.users
94 |
95 | if (!config.auth || req.user.name === 'admin') {
96 | if (!users.find(user => user.name === 'admin')) {
97 | db.deleteUser(users)
98 | .then(() => {
99 | res.send({ message: '删除成功.' });
100 | })
101 | .catch((err) => {
102 | next(err);
103 | });
104 | } else {
105 | res.status(403).send({ error: '不能删除内置的管理员账号.' });
106 | }
107 | } else {
108 | res.status(403).send({ error: '只有 admin 账号能删除用户.' });
109 | }
110 | });
111 |
112 | // 获取所有用户
113 | router.get('/users', (req, res, next) => {
114 | if (!config.auth || req.user.name === 'admin') {
115 | db.knex('t_user')
116 | .select('name', 'group')
117 | .then((users) => {
118 | res.send({ users });
119 | })
120 | .catch((err) => {
121 | next(err);
122 | });
123 | } else {
124 | res.status(403).send({ error: '只有 admin 账号能浏览用户.' });
125 | }
126 | });
127 |
128 | module.exports = router;
--------------------------------------------------------------------------------
/routes/index.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const router = express.Router();
3 |
4 | // Health check endpoint
5 | router.get('/health', (req, res) => {
6 | res.send('OK');
7 | })
8 |
9 | // Eliminate error message from old PWA
10 | // Will be deleted in the future
11 | router.get('/me', (req, res) => {
12 | res.redirect('/api/auth/me');
13 | })
14 |
15 | router.use('/auth', require('./auth'));
16 | router.use('/credentials', require('./credentials'));
17 | router.use('/version', require('./version'));
18 | router.use('/config', require('./config'));
19 | router.use('/media', require('./media'));
20 | router.use('/review', require('./review'));
21 | // Other routes
22 | router.use('/', require('./metadata'));
23 |
24 | module.exports = router;
--------------------------------------------------------------------------------
/routes/review.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const router = express.Router();
3 | const { query, body } = require('express-validator');
4 | const { config } = require('../config');
5 | const db = require('../database/db');
6 | const normalize = require('./utils/normalize');
7 | const { isValidRequest } = require('./utils/validate');
8 |
9 | const PAGE_SIZE = config.pageSize || 12;
10 |
11 |
12 | router.get('/',
13 | query('page').optional({nullable: true}).isInt(),
14 | query('sort').optional({nullable: true}).isIn(['desc', 'asc']),
15 | query('seed').optional({nullable: true}).isInt(),
16 | query('filter').optional({nullable: true}).isIn(['marked', 'listening', 'listened', 'replay', 'postponed']),
17 | // eslint-disable-next-line no-unused-vars
18 | async (req, res, next) => {
19 | if(!isValidRequest(req, res)) return;
20 |
21 | const currentPage = parseInt(req.query.page) || 1;
22 | // 通过 "音声id, 贩卖日, 评价, 用户评价, 售出数, 评论数量, 价格, 平均评价, 全年龄新作" 排序
23 | // ['id', 'release', 'rating', 'dl_count', 'review_count', 'price', 'rate_average_2dp, nsfw']
24 | const order = req.query.order || 'release';
25 | const sort = req.query.sort || 'desc';
26 | const offset = (currentPage - 1) * PAGE_SIZE;
27 | const username = config.auth ? req.user.name : 'admin';
28 | const filter = req.query.filter;
29 |
30 | try {
31 | const {works, totalCount} = await db.getWorksWithReviews({username: username, limit: PAGE_SIZE, offset: offset, orderBy: order, sortOption: sort, filter});
32 |
33 | normalize(works, {dateOnly: true});
34 |
35 | res.send({
36 | works,
37 | pagination: {
38 | currentPage,
39 | pageSize: PAGE_SIZE,
40 | totalCount: totalCount[0]['count']
41 | }
42 | });
43 | } catch(err) {
44 | res.status(500).send({error: '查询过程中出错'});
45 | console.error(err)
46 | }
47 | });
48 |
49 | // 提交用户评价
50 | router.put('/',
51 | body('work_id').isInt(),
52 | body('rating').optional({nullable: true}).isInt(),
53 | body('progress').optional({nullable: true}).isIn(['marked', 'listening', 'listened', 'replay', 'postponed']),
54 | body('starOnly').optional({nullable: true}).isBoolean(),
55 | body('progressOnly').optional({nullable: true}).isBoolean(),
56 | // eslint-disable-next-line no-unused-vars
57 | (req, res, next) => {
58 | if(!isValidRequest(req, res)) return;
59 |
60 | let username = config.auth ? req.user.name : 'admin';
61 | let starOnly = true;
62 | let progressOnly = false;
63 | if (req.query.starOnly === 'false') {
64 | starOnly = false;
65 | }
66 | if (req.query.progressOnly === 'true') {
67 | progressOnly = true
68 | }
69 |
70 | db.updateUserReview(username, req.body.work_id, req.body.rating, req.body.review_text, req.body.progress, starOnly, progressOnly)
71 | .then(() => {
72 | if (progressOnly) {
73 | res.send({ message: '更新进度成功' });
74 | } else {
75 | res.send({ message: '评价成功' });
76 | }
77 | }).catch((err) =>{
78 | res.status(500).send({ error: '评价失败,服务器错误' });
79 | console.error(err);
80 | })
81 | });
82 |
83 | // 删除用户标记
84 | router.delete('/',
85 | query('work_id').isInt(),
86 | (req, res, next) => {
87 | if(!isValidRequest(req, res)) return;
88 |
89 | let username = config.auth ? req.user.name : 'admin';
90 | db.deleteUserReview(username, req.query.work_id)
91 | .then(() => {
92 | res.send({message: '删除标记成功'});
93 | }).catch((err) => next(err));
94 | });
95 |
96 | module.exports = router;
--------------------------------------------------------------------------------
/routes/utils/normalize.js:
--------------------------------------------------------------------------------
1 | const strftime = require('./strftime')
2 |
3 | // Normalize API endpoints
4 | const normalize = (works, options = {}) => {
5 | works.map(record => {
6 | record.nsfw = Boolean(record.nsfw);
7 | record.circle = JSON.parse(record.circleObj);
8 | record.rate_count_detail = JSON.parse(record.rate_count_detail);
9 | record.rank = record.rank ? JSON.parse(record.rank) : null;
10 | record.vas = JSON.parse(record.vaObj)['vas'];
11 | record.tags = JSON.parse(record.tagObj)['tags'];
12 | record.history = record.hisObj ? JSON.parse(record.hisObj)['history'] : null;
13 | delete record.circleObj;
14 | delete record.vaObj;
15 | delete record.tagObj;
16 | delete record.hisObj;
17 | if (options.dateOnly && record.updated_at) {
18 | record.updated_at = strftime('%F', record.updated_at);
19 | }
20 | })
21 | return works
22 | }
23 |
24 | module.exports = normalize;
--------------------------------------------------------------------------------
/routes/utils/strftime.js:
--------------------------------------------------------------------------------
1 | /* Port of strftime() by T. H. Doan (https://thdoan.github.io/strftime/)
2 | *
3 | * Day of year (%j) code based on Joe Orost's answer:
4 | * http://stackoverflow.com/questions/8619879/javascript-calculate-the-day-of-the-year-1-366
5 | *
6 | * Week number (%V) code based on Taco van den Broek's prototype:
7 | * http://techblog.procurios.nl/k/news/view/33796/14863/calculate-iso-8601-week-and-year-in-javascript.html
8 | */
9 | function strftime(sFormat, date) {
10 | if (!(date instanceof Date)) date = new Date();
11 | var nDay = date.getDay(),
12 | nDate = date.getDate(),
13 | nMonth = date.getMonth(),
14 | nYear = date.getFullYear(),
15 | nHour = date.getHours(),
16 | aDays = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'],
17 | aMonths = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'],
18 | aDayCount = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334],
19 | isLeapYear = function() {
20 | return (nYear%4===0 && nYear%100!==0) || nYear%400===0;
21 | },
22 | getThursday = function() {
23 | var target = new Date(date);
24 | target.setDate(nDate - ((nDay+6)%7) + 3);
25 | return target;
26 | },
27 | zeroPad = function(nNum, nPad) {
28 | return ((Math.pow(10, nPad) + nNum) + '').slice(1);
29 | };
30 | return sFormat.replace(/%[a-z]/gi, function(sMatch) {
31 | return (({
32 | '%a': aDays[nDay].slice(0,3),
33 | '%A': aDays[nDay],
34 | '%b': aMonths[nMonth].slice(0,3),
35 | '%B': aMonths[nMonth],
36 | '%c': date.toUTCString(),
37 | '%C': Math.floor(nYear/100),
38 | '%d': zeroPad(nDate, 2),
39 | '%e': nDate,
40 | '%F': date.toISOString().slice(0,10),
41 | '%G': getThursday().getFullYear(),
42 | '%g': (getThursday().getFullYear() + '').slice(2),
43 | '%H': zeroPad(nHour, 2),
44 | '%I': zeroPad((nHour+11)%12 + 1, 2),
45 | '%j': zeroPad(aDayCount[nMonth] + nDate + ((nMonth>1 && isLeapYear()) ? 1 : 0), 3),
46 | '%k': nHour,
47 | '%l': (nHour+11)%12 + 1,
48 | '%m': zeroPad(nMonth + 1, 2),
49 | '%n': nMonth + 1,
50 | '%M': zeroPad(date.getMinutes(), 2),
51 | '%p': (nHour<12) ? 'AM' : 'PM',
52 | '%P': (nHour<12) ? 'am' : 'pm',
53 | '%s': Math.round(date.getTime()/1000),
54 | '%S': zeroPad(date.getSeconds(), 2),
55 | '%u': nDay || 7,
56 | '%V': (function() {
57 | var target = getThursday(),
58 | n1stThu = target.valueOf();
59 | target.setMonth(0, 1);
60 | var nJan1 = target.getDay();
61 | if (nJan1!==4) target.setMonth(0, 1 + ((4-nJan1)+7)%7);
62 | return zeroPad(1 + Math.ceil((n1stThu-target)/604800000), 2);
63 | })(),
64 | '%w': nDay,
65 | '%x': date.toLocaleDateString(),
66 | '%X': date.toLocaleTimeString(),
67 | '%y': (nYear + '').slice(2),
68 | '%Y': nYear,
69 | '%z': date.toTimeString().replace(/.+GMT([+-]\d+).+/, '$1'),
70 | '%Z': date.toTimeString().replace(/.+\((.+?)\)$/, '$1')
71 | }[sMatch] || '') + '') || sMatch;
72 | });
73 | }
74 |
75 | module.exports = strftime;
76 |
--------------------------------------------------------------------------------
/routes/utils/url.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const urljoin = require('url-join');
3 |
4 | // work.dir and track.subtitle may contain '/' or '\' if they have subfolders
5 | // ["RJ123456", "画像/好きな人?"] => ["RJ123456", "%E7%94%BB%E5%83%8F", "%E5%A5%BD%E3%81%8D%E3%81%AA%E4%BA%BA%3F"]
6 | const encodeSplitFragments = (fragments) => {
7 | // On windows, replace "dir\RJ123456" => "dir/RJ123456"
8 | const expandedFragments = fragments.map(fragment => fragment.replace(/\\/g, '/').split('/'))
9 | return expandedFragments.flat().map(fragment => encodeURIComponent(fragment));
10 | }
11 |
12 | const joinFragments = (baseUrl, ...fragments) => {
13 | const pattern = new RegExp(/^https?:\/\//);
14 | const encodedFragments = encodeSplitFragments(fragments);
15 |
16 | // http(s)://example.com/
17 | if (pattern.test(baseUrl)) {
18 | return urljoin(baseUrl, ...encodedFragments);
19 | } else {
20 | // /media/stream/
21 | return path.join(baseUrl, ...fragments).replace(/\\/g, '/');
22 | }
23 | }
24 |
25 | module.exports = { joinFragments }
--------------------------------------------------------------------------------
/routes/utils/validate.js:
--------------------------------------------------------------------------------
1 | const { validationResult } = require('express-validator');
2 |
3 | const isValidRequest = (req, res, sendMessage = true) => {
4 | const errors = validationResult(req);
5 | if (!errors.isEmpty()) {
6 | if (sendMessage) {
7 | res.status(400).json({ errors: errors.array() });
8 | }
9 | return false;
10 | } else {
11 | return true;
12 | }
13 | }
14 |
15 | module.exports = { isValidRequest };
--------------------------------------------------------------------------------
/routes/version.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const router = express.Router();
3 | const { config } = require('../config');
4 | const { updateLock } = require('../upgrade');
5 | const axios = require('axios');
6 | const pjson = require('../package.json');
7 | const compareVersions = require('compare-versions');
8 |
9 | // Last connection time to GitHub
10 | // Not specific to a user
11 | // Prevent API throttling
12 | let lastGitHubCheck = null;
13 | let lastGitHubResponse = {
14 | latest_stable: null,
15 | latest_release: null,
16 | update_available: null
17 | };
18 |
19 | // eslint-disable-next-line no-unused-vars
20 | router.get('/', (req, res, next) => {
21 | const lockReason = '新版解决了旧版扫描时将かの仔和こっこ识别为同一个人的问题,建议进行扫描以自动修复这一问题'
22 |
23 | const throttledResponse = {
24 | current: pjson.version,
25 | ...lastGitHubResponse,
26 | notifyUser: config.checkUpdate,
27 | lockFileExists: updateLock.isLockFilePresent,
28 | lockReason: updateLock.isLockFilePresent ? lockReason : null
29 | }
30 |
31 | // GitHub API for unauthenticated: 60/hour
32 | if (lastGitHubCheck === null) {
33 | // Side effect
34 | lastGitHubCheck = Date.now();
35 | } else {
36 | const currentTime = Date.now();
37 | const timeDiff = new Date(currentTime - lastGitHubCheck);
38 | if (timeDiff.getMinutes() < 5) {
39 | res.send(throttledResponse);
40 | return
41 | }
42 | }
43 |
44 | const urlLatestStable = 'https://api.github.com/repos/umonaca/kikoeru-express/releases/latest';
45 | const urlLatestRelease = 'https://api.github.com/repos/umonaca/kikoeru-express/releases';
46 | const requestLatestStable = axios.get(urlLatestStable);
47 | const requestLatestRelease = axios.get(urlLatestRelease);
48 |
49 | axios.all([requestLatestStable, requestLatestRelease])
50 | .then(axios.spread((responseStable, responseLatest) => {
51 | if (responseStable.data && responseLatest.data && responseStable.data.tag_name && responseLatest.data[0].tag_name) {
52 | const current = pjson.version;
53 | const latest_stable = responseStable.data.tag_name;
54 | const latest_release = responseLatest.data[0].tag_name;
55 | const newVerAvailable = () => {
56 | if (config.checkBetaUpdate) {
57 | return compareVersions.compare(latest_release, current, '>')
58 | }
59 | return compareVersions.compare(latest_stable, current, '>')
60 | }
61 |
62 | // Side effect
63 | lastGitHubResponse = {
64 | latest_stable: latest_stable,
65 | latest_release: latest_release,
66 | update_available: newVerAvailable()
67 | };
68 |
69 | res.send({
70 | current: current,
71 | latest_stable: latest_stable,
72 | latest_release: latest_release,
73 | update_available: newVerAvailable(),
74 | notifyUser: config.checkUpdate,
75 | lockFileExists: updateLock.isLockFilePresent,
76 | lockReason: updateLock.isLockFilePresent ? lockReason : null
77 | });
78 | } else {
79 | // Empty result or no tag
80 | res.send(throttledResponse);
81 | }
82 | }))
83 | .catch(function () {
84 | res.send({throttledResponse});
85 | // next(error);
86 | })
87 | });
88 |
89 | module.exports = router;
--------------------------------------------------------------------------------
/scraper/axios.js:
--------------------------------------------------------------------------------
1 | const originAxios = require('axios');
2 | const { httpsOverHttp, httpOverHttp } = require('tunnel-agent');
3 |
4 | const { config } = require('../config');
5 | const Config = config;
6 |
7 | const axios = originAxios.create();
8 | // axios.defaults.timeout = Config.timeout || 2000; // 请求超时的毫秒数
9 | // // 拦截请求 (添加自定义默认参数)
10 | // axios.interceptors.request.use(function (config) {
11 | // config.retry = Config.retry || 5; // 重试次数
12 | // config.retryDelay = Config.retryDelay || 1000; // 请求间隔的毫秒数
13 | // return config;
14 | // });
15 |
16 | // 代理设置
17 | const TUNNEL_OPTIONS = {
18 | proxy: {
19 | port: Config.httpProxyPort
20 | }
21 | }
22 | if (Config.httpProxyHost) {
23 | TUNNEL_OPTIONS.proxy.host = Config.httpProxyHost;
24 | }
25 |
26 | // 拦截请求 (http 代理)
27 | axios.interceptors.request.use(function (config) {
28 | if (Config.httpProxyPort) {
29 | config.proxy = false; // 强制禁用环境变量中的代理配置
30 | config.httpAgent = httpOverHttp(TUNNEL_OPTIONS);
31 | config.httpsAgent = httpsOverHttp(TUNNEL_OPTIONS);
32 | }
33 |
34 | return config
35 | });
36 |
37 | // // 拦截响应 (遇到错误时, 重新发起新请求)
38 | // axios.interceptors.response.use(undefined, function axiosRetryInterceptor(err) {
39 | // var config = err.config;
40 | // // If config does not exist or the retry option is not set, reject
41 | // if(!config || !config.retry) return Promise.reject(err);
42 |
43 | // // Set the variable for keeping track of the retry count
44 | // config.__retryCount = config.__retryCount || 0;
45 |
46 | // // Check if we've maxed out the total number of retries
47 | // if(config.__retryCount >= config.retry) {
48 | // // Reject with the error
49 | // return Promise.reject(err);
50 | // }
51 |
52 | // // Increase the retry count
53 | // config.__retryCount += 1;
54 |
55 | // // Create new promise to handle exponential backoff
56 | // var backoff = new Promise(function(resolve) {
57 | // setTimeout(function() {
58 | // resolve();
59 | // }, config.retryDelay || 1);
60 | // });
61 |
62 | // // Return the promise in which recalls axios to retry the request
63 | // return backoff.then(function() {
64 | // return axios(config);
65 | // });
66 | // });
67 |
68 |
69 |
70 |
71 | const retryGet = async (url, config) => {
72 | let defaultLimit = Config.retry || 5;
73 | let defaultRetryDelay = Config.retryDelay || 2000;
74 | let defaultTimeout = 10000;
75 |
76 | if (url.indexOf('dlsite') !== -1) {
77 | defaultTimeout = Config.dlsiteTimeout || defaultLimit;
78 | } else if (url.indexOf('hvdb') !== -1) {
79 | defaultTimeout = Config.hvdbTimeout || defaultLimit;
80 | }
81 |
82 | config.retry = {
83 | limit: (config.retry && config.retry.limit) ? config.retry.limit : defaultLimit, // 5
84 | retryCount: (config.retry && config.retry.retryCount) ? config.retry.retryCount : 0,
85 | retryDelay: (config.retry && config.retry.retryDelay) ? config.retry.retryDelay : defaultRetryDelay, //2000,
86 | timeout: (config.retry && config.retry.timeout) ? config.retry.timeout : defaultTimeout
87 | };
88 |
89 | const abort = originAxios.CancelToken.source();
90 | const timeoutId = setTimeout(
91 | () => abort.cancel(`Timeout of ${config.retry.timeout}ms.`),
92 | config.retry.timeout
93 | );
94 | config.cancelToken = abort.token;
95 |
96 | try {
97 | const response = await axios.get(url, config);
98 | clearTimeout(timeoutId);
99 | return response;
100 | } catch (error) {
101 | const backoff = new Promise((resolve) => {
102 | setTimeout(() => resolve(), config.retry.retryDelay);
103 | });
104 |
105 | if (config.retry.retryCount < config.retry.limit && !error.response) {
106 | config.retry.retryCount += 1;
107 | await backoff;
108 | console.log(`${url} 第 ${config.retry.retryCount} 次重试请求`);
109 | return retryGet(url, config);
110 | } else {
111 | throw error;
112 | }
113 | }
114 | };
115 | axios.retryGet = retryGet;
116 |
117 |
118 | module.exports = axios;
119 |
--------------------------------------------------------------------------------
/scraper/hvdb.js:
--------------------------------------------------------------------------------
1 | const htmlparser = require('htmlparser2'); // 解析器
2 |
3 | const axios = require('./axios'); // 数据请求
4 | const { nameToUUID } = require('./utils');
5 |
6 | /**
7 | * Scrapes work metadata from public HVDB page HTML.
8 | * @param {number} id Work id.
9 | */
10 | const scrapeWorkMetadataFromHVDB = id => new Promise((resolve, reject) => {
11 | let rjcode ;
12 | if (id>=1000000) {
13 | rjcode = (`00000000${id}`).slice(-8);
14 | } else {
15 | rjcode = (`000000${id}`).slice(-6);
16 | }
17 | const url = `https://hvdb.me/Dashboard/WorkDetails/${id}`;
18 |
19 | console.log(`[RJ${rjcode}] 从 HVDB 抓取元数据...`);
20 | axios.retryGet(url, { retry: {} })
21 | .then(response => {
22 | console.log('res HVDB')
23 | return response.data
24 | })
25 | .then((data) => { //解析
26 | const work = { id, tags: [], vas: [] };
27 | let writeTo;
28 |
29 | const parser = new htmlparser.Parser({
30 | onopentag: (name, attrs) => { // 标签名 属性
31 | if (name === 'input') {
32 | if (attrs.id === 'Name') {
33 | work.title = attrs.value;
34 | } else if (attrs.name === 'SFW') {
35 | work.nsfw = attrs.value === 'false';
36 | }
37 | }
38 |
39 | if (name === 'a') {
40 | if (attrs.href.indexOf('CircleWorks') !== -1) {
41 | work.circle = {
42 | id: attrs.href.substring(attrs.href.lastIndexOf('/') + 1),
43 | };
44 | writeTo = 'circle.name';
45 | } else if (attrs.href.indexOf('TagWorks') !== -1) {
46 | work.tags.push({
47 | id: attrs.href.substring(attrs.href.lastIndexOf('/') + 1),
48 | });
49 | writeTo = 'tag.name';
50 | } else if (attrs.href.indexOf('CVWorks') !== -1) {
51 | work.vas.push({
52 | //id: hashNameIntoInt(attrs.href), // TODO: RESHNIX!!!
53 | });
54 | writeTo = 'va.name';
55 | }
56 | }
57 | },
58 | onclosetag: () => { writeTo = null; },
59 | ontext: (text) => {
60 | switch (writeTo) {
61 | case 'circle.name':
62 | work.circle.name = text;
63 | break;
64 | case 'tag.name':
65 | work.tags[work.tags.length - 1].name = text;
66 | break;
67 | case 'va.name':
68 | work.vas[work.vas.length - 1].name = text;
69 | work.vas[work.vas.length - 1].id = nameToUUID(text);
70 | break;
71 | default:
72 | }
73 | },
74 | }, { decodeEntities: true });
75 | parser.write(data);
76 | parser.end();
77 |
78 | if (work.tags.length === 0 && work.vas.length === 0) {
79 | reject(new Error('Couldn\'t parse data from HVDB work page.'));
80 | } else {
81 | console.log(`[RJ${rjcode}] 成功从 HVDB 抓取元数据...`);
82 | resolve(work);
83 | }
84 | })
85 | .catch((error) => {
86 | if (error.response) {
87 | // 请求已发出,但服务器响应的状态码不在 2xx 范围内
88 | reject(new Error(`Couldn't request work page HTML (${url}), received: ${error.response.status}.`));
89 | } else if (error.request) {
90 | reject(error);
91 | console.log(error.request);
92 | } else {
93 | console.log('Error', error.message);
94 | reject(error);
95 | }
96 | });
97 | });
98 |
99 |
100 | module.exports = scrapeWorkMetadataFromHVDB;
101 |
--------------------------------------------------------------------------------
/scraper/utils.js:
--------------------------------------------------------------------------------
1 | const { v5: uuidv5 } = require('uuid');
2 |
3 | const nameToUUID = (name) => {
4 | const namespace = '699d9c07-b965-4399-bafd-18a3cacf073c';
5 | return uuidv5(name, namespace);
6 | };
7 |
8 | /**
9 | * 判断一个字符串中是否包含字母
10 | * @param {String} str
11 | */
12 | const hasLetter = (str) => {
13 | for (let i in str) {
14 | let asc = str.charCodeAt(i);
15 | if ((asc >= 65 && asc <= 90 || asc >= 97 && asc <= 122)) {
16 | return true;
17 | }
18 | }
19 | return false;
20 | };
21 |
22 | module.exports = {
23 | nameToUUID, hasLetter
24 | };
--------------------------------------------------------------------------------
/socket.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const socket = require('socket.io');
3 | const jwtAuth = require('socketio-jwt-auth'); // 用于 JWT 验证的 socket.io 中间件
4 | const child_process = require('child_process'); // 子进程
5 | const { config } = require('./config');
6 |
7 | const initSocket = (server) => {
8 | const io = socket(server);
9 | if (config.auth) {
10 | io.use(jwtAuth.authenticate({
11 | secret: config.jwtsecret
12 | }, (payload, done) => {
13 | const user = {
14 | name: payload.name,
15 | group: payload.group
16 | };
17 |
18 | if (user.name === 'admin') {
19 | done(null, user);
20 | } else {
21 | done(null, false, '只有 admin 账号能登录管理后台.');
22 | }
23 | }));
24 | }
25 |
26 | let scanner = null;
27 |
28 | // 有新的客户端连接时触发
29 | io.on('connection', function (socket) {
30 | // console.log('connection');
31 | socket.emit('success', {
32 | message: '成功登录管理后台.',
33 | user: socket.request.user,
34 | auth: config.auth
35 | });
36 |
37 | // socket.on('disconnect', () => {
38 | // console.log('disconnect');
39 | // });
40 |
41 | socket.on('ON_SCANNER_PAGE', () => {
42 | if (scanner) {
43 | // 防止用户在扫描过程中刷新页面
44 | scanner.send({
45 | emit: 'SCAN_INIT_STATE'
46 | });
47 | }
48 | });
49 |
50 | socket.on('PERFORM_SCAN', () => {
51 | if (!scanner) {
52 | scanner = child_process.fork(path.join(__dirname, './filesystem/scanner.js'), { silent: false }); // 子进程
53 | scanner.on('exit', (code) => {
54 | scanner = null;
55 | if (code) {
56 | io.emit('SCAN_ERROR');
57 | }
58 | });
59 |
60 | scanner.on('message', (m) => {
61 | if (m.event) {
62 | io.emit(m.event, m.payload);
63 | }
64 | });
65 | }
66 | });
67 |
68 | socket.on('PERFORM_UPDATE', () => {
69 | if (!scanner) {
70 | scanner = child_process.fork(path.join(__dirname, './filesystem/updater.js'), ['--refreshAll'], { silent: false }); // 子进程
71 | scanner.on('exit', (code) => {
72 | scanner = null;
73 | if (code) {
74 | io.emit('SCAN_ERROR');
75 | }
76 | });
77 |
78 | scanner.on('message', (m) => {
79 | if (m.event) {
80 | io.emit(m.event, m.payload);
81 | }
82 | });
83 | }
84 | });
85 |
86 | socket.on('KILL_SCAN_PROCESS', () => {
87 | scanner.send({
88 | exit: 1
89 | });
90 | });
91 |
92 | // 发生错误时触发
93 | socket.on('error', (err) => {
94 | console.error(err);
95 | });
96 | });
97 | }
98 |
99 | module.exports = initSocket;
--------------------------------------------------------------------------------
/static/no-image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KirieHaruna/kikoeru-express/7f9521b43c60373fb05e589d8030cdacaee09662/static/no-image.jpg
--------------------------------------------------------------------------------
/test/migration..js:
--------------------------------------------------------------------------------
1 | /* eslint-disable node/no-unpublished-require */
2 | // This test checks whether migration from 0.3.0 works
3 | // There is no easy way to check verify that the schema after the migration is the same as the one created from scratch by createSchema()
4 | // Every time when I finish checking the schema I set dbVersion in schema.js to the number in the latest migration file
5 |
6 | // Prevent writing config files (side effect of config.js)
7 | process.env.FREEZE_CONFIG_FILE = true;
8 | process.env.NODE_ENV='test';
9 |
10 | const chai = require('chai');
11 | const expect = chai.expect;
12 | chai.use(require('chai-string'));
13 | const { unlink } = require('fs');
14 | const { join } = require('path');
15 |
16 | const db = require('../database/db').knex;
17 |
18 | const knexMigrate = require('../database/knex-migrate');
19 | const { dbVersion } = require('../database/schema');
20 |
21 | describe('Database', function() {
22 | before('Spin up v0.3.0 database schema', async function() {
23 | const { createOldSchema } = require('./spinup/spinup-0.3.0');
24 | await createOldSchema();
25 | })
26 |
27 | it('v0.3.0 should work', async function() {
28 | const result = await db.raw(`pragma table_info('t_va')`);
29 | // t_va id column type was integer in 0.3.0
30 | expect(result[0]['type']).to.equal('integer');
31 | })
32 |
33 | it('should be able to migrate to latest', async function() {
34 | const log = ({ action, migration }) => console.log('Doing ' + action + ' on ' + migration);
35 | await knexMigrate('up', {}, log);
36 | })
37 |
38 | it('schema after migration', async function() {
39 | // There is no easy way to verify that the schema after the migration is the same as the one created from scratch by createSchema()
40 | // There were some mistakes in previous migrations causing inconsistencies like 'text' vs 'varchar(255)'
41 | // Most inconsistencies do not cause bugs, because of SQLite type affinity. For example, SQLite stores all string types in 'text' and ignores the 255 limit.
42 | // You will have to read and verify manually
43 | // const schema = await db.raw('select sql from sqlite_master where sql not NULL');
44 | // console.log(schema);
45 |
46 | const tableNames = (await db.raw(`SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%'`)).map(record => record['name']);
47 | console.log(tableNames)
48 |
49 | for (const table of tableNames) {
50 | console.log(table)
51 | const tableInfo = await db.raw(`pragma table_info(${table})`);
52 | console.table(tableInfo);
53 | }
54 |
55 | // Every time when I finish checking the schema I set dbVersion in schema.js to the number in the last migration file
56 | // e.g. '20210307061415'
57 | // I use this test to prevent me from making mistakes
58 | // If you want to fork this code and don't understand how this works, just comment out the following lines
59 | const lastMigration = await db.raw('select name from knex_migrations order by id desc limit 1');
60 | expect(dbVersion).to.be.a('string');
61 | expect(lastMigration[0].name).to.startsWith(dbVersion);
62 | })
63 |
64 | after('Tear down test database', async function() {
65 | const { dropDatabase } = require('./teardown/teardown-0.6.0');
66 | await dropDatabase();
67 | })
68 | })
69 |
70 | describe('Database v0.6.0-rc4', function() {
71 | before('Spin up v0.6.0-rc4 database schema', async function() {
72 | const { createOldSchema } = require('./spinup/spinup-0.6.0-rc4');
73 | await createOldSchema();
74 | await knexMigrate('skipAll', {to: '20210213233544_fill_va_uuid'});
75 | })
76 |
77 | it('should work', async function() {
78 | const result = await db.raw(`pragma table_info('t_va')`);
79 | // t_va id column type has changed to text in v0.6.0-rc4
80 | expect(result[0]['type']).to.equal('varchar(255)');
81 | })
82 |
83 | it('should be able to migrate to 20210502081522_remove_obsolete_view', async function() {
84 | const log = ({ action, migration }) => console.log('Doing ' + action + ' on ' + migration);
85 | await knexMigrate('up', { to: '20210502081522' }, log);
86 | })
87 |
88 | it('should have null constraints removed', async function() {
89 | const tableInfo = await db.raw(`pragma table_info('t_work')`);
90 | console.table(tableInfo);
91 | for (const field of tableInfo) {
92 | if (field['name'] === 'dl_count') {
93 | expect(field['notnull']).to.equal(0);
94 | }
95 | }
96 | })
97 |
98 | after('Delete test database', function(done) {
99 | db.destroy(() => {
100 | unlink(join(__dirname, 'db-test.sqlite3'), (err) => {
101 | if (err) throw err;
102 | });
103 | done();
104 | });
105 | })
106 | })
--------------------------------------------------------------------------------
/test/spinup/spinup-0.3.0.js:
--------------------------------------------------------------------------------
1 | const { knex } = require('../../database/db');
2 |
3 | const createOldSchema = () => knex.schema
4 | .createTable('t_circle', (table) => {
5 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [社团id]
6 | table.string('name').notNullable(); // VARCHAR 类型 [社团名称]
7 | })
8 | .createTable('t_work', (table) => {
9 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
10 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
11 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
12 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
13 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
14 | table.boolean('nsfw').notNullable(); // BOOLEAN 类型
15 | table.string('release').notNullable(); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
16 |
17 | table.integer('dl_count').notNullable(); // INTEGER 类型 [售出数]
18 | table.integer('price').notNullable(); // INTEGER 类型 [价格]
19 | table.integer('review_count').notNullable(); // INTEGER 类型 [评论数量]
20 | table.integer('rate_count').notNullable(); // INTEGER 类型 [评价数量]
21 | table.float('rate_average_2dp').notNullable(); // FLOAT 类型 [平均评价]
22 | table.text('rate_count_detail').notNullable(); // TEXT 类型 [评价分布明细]
23 | table.text('rank'); // TEXT 类型 [历史销售业绩]
24 |
25 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
26 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp']); // INDEX 索引
27 | })
28 | .createTable('t_tag', (table) => {
29 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [标签id]
30 | table.string('name').notNullable(); // VARCHAR 类型 [标签名称]
31 | })
32 | .createTable('t_va', (table) => {
33 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [声优id]
34 | table.string('name').notNullable(); // VARCHAR 类型 [声优名称]
35 | })
36 | .createTable('r_tag_work', (table) => {
37 | table.integer('tag_id');
38 | table.integer('work_id');
39 | table.foreign('tag_id').references('id').inTable('t_tag'); // FOREIGN KEY 外键
40 | table.foreign('work_id').references('id').inTable('t_work'); // FOREIGN KEY 外键
41 | table.primary(['tag_id', 'work_id']); // PRIMARY KEYprimary 主键
42 | })
43 | .createTable('r_va_work', (table) => {
44 | table.integer('va_id');
45 | table.integer('work_id');
46 | table.foreign('va_id').references('id').inTable('t_va'); // FOREIGN KEY 外键
47 | table.foreign('work_id').references('id').inTable('t_work'); // FOREIGN KEY 外键
48 | table.primary(['va_id', 'work_id']); // PRIMARY KEYprimary 主键
49 | })
50 | .createTable('t_user', (table) => {
51 | table.string('name').notNullable();
52 | table.string('password').notNullable();
53 | table.string('group').notNullable(); // USER ADMIN GAUST
54 | table.primary(['name']); // PRIMARY KEYprimary 主键
55 | })
56 | .createTable('t_favorite', (table) => {
57 | table.string('user_name').notNullable();
58 | table.string('name').notNullable();
59 | table.text('works').notNullable(); // TEXT 类型 [评价分布明细]
60 | table.foreign('user_name').references('name').inTable('t_user'); // FOREIGN KEY 外键
61 | table.primary(['user_name', 'name']); // PRIMARY KEYprimary 主键
62 | })
63 |
64 | module.exports = { createOldSchema };
65 |
--------------------------------------------------------------------------------
/test/spinup/spinup-0.6.0-rc4.js:
--------------------------------------------------------------------------------
1 | const { knex } = require('../../database/db');
2 |
3 | const createOldSchema = () => knex.schema
4 | .createTable('t_circle', (table) => {
5 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [社团id]
6 | table.string('name').notNullable(); // VARCHAR 类型 [社团名称]
7 | })
8 | .createTable('t_work', (table) => {
9 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [音声id]
10 | table.string('root_folder').notNullable(); // VARCHAR 类型 [根文件夹别名]
11 | table.string('dir').notNullable(); // VARCHAR 类型 [相对存储路径]
12 | table.string('title').notNullable(); // VARCHAR 类型 [音声名称]
13 | table.integer('circle_id').notNullable(); // INTEGER 类型 [社团id]
14 | table.boolean('nsfw').notNullable(); // BOOLEAN 类型
15 | table.string('release').notNullable(); // VARCHAR 类型 [贩卖日 (YYYY-MM-DD)]
16 |
17 | table.integer('dl_count').notNullable(); // INTEGER 类型 [售出数]
18 | table.integer('price').notNullable(); // INTEGER 类型 [价格]
19 | table.integer('review_count').notNullable(); // INTEGER 类型 [评论数量]
20 | table.integer('rate_count').notNullable(); // INTEGER 类型 [评价数量]
21 | table.float('rate_average_2dp').notNullable(); // FLOAT 类型 [平均评价]
22 | table.text('rate_count_detail').notNullable(); // TEXT 类型 [评价分布明细]
23 | table.text('rank'); // TEXT 类型 [历史销售业绩]
24 |
25 | table.foreign('circle_id').references('id').inTable('t_circle'); // FOREIGN KEY 外键
26 | table.index(['circle_id', 'release', 'dl_count', 'review_count', 'price', 'rate_average_2dp']); // INDEX 索引
27 | })
28 | .createTable('t_tag', (table) => {
29 | table.increments(); // id自增列(INTEGER 类型),会被用作主键 [标签id]
30 | table.string('name').notNullable(); // VARCHAR 类型 [标签名称]
31 | })
32 | .createTable('t_va', (table) => {
33 | table.string('id'); // UUID v5, 基于name生成的固定值
34 | table.string('name').notNullable(); // VARCHAR 类型 [声优名称]
35 | table.primary('id');
36 | })
37 | .createTable('r_tag_work', (table) => {
38 | table.integer('tag_id');
39 | table.integer('work_id');
40 | table.foreign('tag_id').references('id').inTable('t_tag'); // FOREIGN KEY 外键
41 | table.foreign('work_id').references('id').inTable('t_work'); // FOREIGN KEY 外键
42 | table.primary(['tag_id', 'work_id']); // PRIMARY KEYprimary 主键
43 | })
44 | .createTable('r_va_work', (table) => {
45 | table.string('va_id');
46 | table.integer('work_id');
47 | table.foreign('va_id').references('id').inTable('t_va').onUpdate('CASCADE').onDelete('CASCADE'); // FOREIGN KEY 外键
48 | table.foreign('work_id').references('id').inTable('t_work').onUpdate('CASCADE').onDelete('CASCADE'); // FOREIGN KEY 外键
49 | table.primary(['va_id', 'work_id']); // PRIMARY KEYprimary 主键
50 | })
51 | .createTable('t_user', (table) => {
52 | table.string('name').notNullable();
53 | table.string('password').notNullable();
54 | table.string('group').notNullable(); // USER ADMIN guest
55 | table.primary(['name']); // PRIMARY KEYprimary 主键
56 | })
57 | .createTable('t_review', (table) => {
58 | table.string('user_name').notNullable();
59 | table.string('work_id').notNullable();
60 | table.integer('rating'); // 用户评分1-5
61 | table.string('review_text'); // 用户评价文字
62 | table.timestamps(true, true); // 时间戳created_at, updated_at
63 | table.string('progress'); // ['marked', 'listening', 'listened', 'postponed', null]
64 | table.foreign('user_name').references('name').inTable('t_user').onDelete('CASCADE'); // FOREIGN KEY
65 | table.foreign('work_id').references('id').inTable('t_work').onDelete('CASCADE'); // FOREIGN KEY
66 | table.primary(['user_name', 'work_id']); // PRIMARY KEY
67 | })
68 |
69 | module.exports = { createOldSchema };
70 |
--------------------------------------------------------------------------------
/test/teardown/teardown-0.6.0.js:
--------------------------------------------------------------------------------
1 | const { knex } = require('../../database/db');
2 |
3 | const dropDatabase = async () => {
4 | await knex.schema.raw('DROP TABLE IF EXISTS knex_migrations');
5 | await knex.schema.raw('DROP TABLE IF EXISTS t_circle');
6 | await knex.schema.raw('DROP TABLE IF EXISTS t_tag');
7 | await knex.schema.raw('DROP TABLE IF EXISTS r_tag_work');
8 | await knex.schema.raw('DROP TABLE IF EXISTS t_user');
9 | await knex.schema.raw('DROP TABLE IF EXISTS t_review');
10 | await knex.schema.raw('DROP TABLE IF EXISTS t_va');
11 | await knex.schema.raw('DROP TABLE IF EXISTS r_va_work');
12 | await knex.schema.raw('DROP TABLE IF EXISTS t_work');
13 | await knex.schema.raw('DROP TABLE IF EXISTS t_work');
14 | await knex.schema.raw('DROP VIEW IF EXISTS userMetadata');
15 | await knex.schema.raw('DROP VIEW IF EXISTS staticMetadata');
16 | }
17 |
18 | module.exports = { dropDatabase };
19 |
--------------------------------------------------------------------------------
/test/urljoin.js:
--------------------------------------------------------------------------------
1 | //eslint-disable-next-line node/no-unpublished-require
2 | const chai = require('chai');
3 | const expect = chai.expect;
4 | const { joinFragments } = require('../routes/utils/url')
5 |
6 | describe('joinfragments()', function(){
7 | it('should return correct offload path for base case', function() {
8 | const baseUrl = '/media/stream/'
9 | const rootFolderName = 'VoiceWork'
10 | const workDir = 'RJ157474'
11 | const trackSubfolder = ''
12 | const trackTitle = 't4 おやすみ.mp3'
13 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
14 | '/media/stream/VoiceWork/RJ157474/t4 おやすみ.mp3'
15 | )
16 | })
17 |
18 | it('should return correct offload URL for base case', function() {
19 | const baseUrl = 'https://cdn.example.com/media/stream/'
20 | const rootFolderName = 'VoiceWork'
21 | const workDir = 'RJ157474'
22 | const trackSubfolder = ''
23 | const trackTitle = 't4 おやすみ.mp3'
24 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
25 | 'https://cdn.example.com/media/stream/VoiceWork/RJ157474/t4%20%E3%81%8A%E3%82%84%E3%81%99%E3%81%BF.mp3'
26 | )
27 | })
28 |
29 | it('should return correct offload path for baseUrl without trailing slash', function() {
30 | const baseUrl = '/media/stream'
31 | const rootFolderName = 'VoiceWork'
32 | const workDir = 'RJ157474'
33 | const trackSubfolder = ''
34 | const trackTitle = 't4 おやすみ.mp3'
35 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
36 | '/media/stream/VoiceWork/RJ157474/t4 おやすみ.mp3'
37 | )
38 | })
39 |
40 | it('should return correct offload URL for baseUrl without trailing slash', function() {
41 | const baseUrl = 'https://cdn.example.com/media/stream'
42 | const rootFolderName = 'VoiceWork'
43 | const workDir = 'RJ157474'
44 | const trackSubfolder = ''
45 | const trackTitle = 't4 おやすみ.mp3'
46 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
47 | 'https://cdn.example.com/media/stream/VoiceWork/RJ157474/t4%20%E3%81%8A%E3%82%84%E3%81%99%E3%81%BF.mp3'
48 | )
49 | })
50 |
51 | it('should return correct offload path for subdirectories', function() {
52 | const baseUrl = '/media/stream/'
53 | const rootFolderName = 'VoiceWork'
54 | const workDir = 'second/RJ290139 【CV: 上坂すみれ】'
55 | const trackSubfolder = 'Necogurashi ep01/mp3'
56 | const trackTitle = '01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
57 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
58 | '/media/stream/VoiceWork/second/RJ290139 【CV: 上坂すみれ】/Necogurashi ep01/mp3/01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
59 | )
60 | })
61 |
62 | it('should return correct encoded offload URL for subdirectories', function() {
63 | const baseUrl = 'https://cdn.example.com/media/stream/'
64 | const rootFolderName = 'VoiceWork'
65 | const workDir = 'second/RJ290139 【CV: 上坂すみれ】'
66 | const trackSubfolder = 'Necogurashi ep01/mp3'
67 | const trackTitle = '01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
68 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
69 | 'https://cdn.example.com/media/stream/VoiceWork/second/RJ290139%20%E3%80%90CV%EF%BC%9A%20%E4%B8%8A%E5%9D%82%E3%81%99%E3%81%BF%E3%82%8C%E3%80%91/Necogurashi%20ep01/mp3/01%20%E3%82%88%E3%81%86%E3%81%93%E3%81%9D%E7%8C%AB%E9%B3%B4%E9%A4%A8(%E3%81%AD%E3%81%93%E3%82%81%E3%81%84%E3%81%8B%E3%82%93)%E3%81%B8%E3%80%82%EF%BD%9E%E3%83%9F%E3%82%B1%E7%8C%AB%E3%81%AE%E5%A0%B4%E5%90%88%EF%BD%9E.mp3'
70 | )
71 | })
72 |
73 | it('should return correct offload path for subdirectories on Windows', function() {
74 | const baseUrl = '/media/stream/'
75 | const rootFolderName = 'VoiceWork'
76 | const workDir = 'second\\RJ290139 【CV: 上坂すみれ】'
77 | const trackSubfolder = 'Necogurashi ep01\\mp3'
78 | const trackTitle = '01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
79 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
80 | '/media/stream/VoiceWork/second/RJ290139 【CV: 上坂すみれ】/Necogurashi ep01/mp3/01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
81 | )
82 | })
83 |
84 | it('should return correct encoded offload URL for subdirectories on Windows', function() {
85 | const baseUrl = 'https://cdn.example.com/media/stream/'
86 | const rootFolderName = 'VoiceWork'
87 | const workDir = 'second\\RJ290139 【CV: 上坂すみれ】'
88 | const trackSubfolder = 'Necogurashi ep01\\mp3'
89 | const trackTitle = '01 ようこそ猫鳴館(ねこめいかん)へ。~ミケ猫の場合~.mp3'
90 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
91 | 'https://cdn.example.com/media/stream/VoiceWork/second/RJ290139%20%E3%80%90CV%EF%BC%9A%20%E4%B8%8A%E5%9D%82%E3%81%99%E3%81%BF%E3%82%8C%E3%80%91/Necogurashi%20ep01/mp3/01%20%E3%82%88%E3%81%86%E3%81%93%E3%81%9D%E7%8C%AB%E9%B3%B4%E9%A4%A8(%E3%81%AD%E3%81%93%E3%82%81%E3%81%84%E3%81%8B%E3%82%93)%E3%81%B8%E3%80%82%EF%BD%9E%E3%83%9F%E3%82%B1%E7%8C%AB%E3%81%AE%E5%A0%B4%E5%90%88%EF%BD%9E.mp3'
92 | )
93 | })
94 |
95 | it('should return encoded offload URL for special characters on Linux', function() {
96 | const baseUrl = 'https://cdn.example.com/media/stream/'
97 | const rootFolderName = 'VoiceWork'
98 | const workDir = 'RJ295760'
99 | const trackSubfolder = 'かそけきの夜~廻る夏~/MP3'
100 | // Half-width "?"" is not allowed on Windows. However, rclone sometimes incorrectly converts the full-width question mark "?"to half-width question mark "?" when the target machine is on Linux.
101 | const trackTitle = '2.よければ、お背中をお流し致しましょうか?.mp3'
102 | expect(joinFragments(baseUrl, rootFolderName, workDir, trackSubfolder, trackTitle)).to.equal(
103 | 'https://cdn.example.com/media/stream/VoiceWork/RJ295760/%E3%81%8B%E3%81%9D%E3%81%91%E3%81%8D%E3%81%AE%E5%A4%9C~%E5%BB%BB%E3%82%8B%E5%A4%8F~/MP3/2.%E3%82%88%E3%81%91%E3%82%8C%E3%81%B0%E3%80%81%E3%81%8A%E8%83%8C%E4%B8%AD%E3%82%92%E3%81%8A%E6%B5%81%E3%81%97%E8%87%B4%E3%81%97%E3%81%BE%E3%81%97%E3%82%87%E3%81%86%E3%81%8B%3F.mp3'
104 | )
105 | })
106 | })
107 |
--------------------------------------------------------------------------------
/upgrade.js:
--------------------------------------------------------------------------------
1 |
2 | const fs = require('fs');
3 | const path = require('path');
4 | const compareVersions = require('compare-versions');
5 | const { configFolderDir } = require('./config');
6 | const knexMigrate = require('./database/knex-migrate');
7 | const { knex } = require('./database/db');
8 |
9 | // Before the following version, there is a hash collision issue in the VA table
10 | const versionVAHashCollisionFixed = '0.6.0-rc.2'
11 | // Before the following version, the knexfile path uses relative path to CWD, which causes a bunch of problems on Mac OS
12 | const versionKnexfilePathFixed = '0.6.0-rc.4'
13 |
14 | const applyFix = async (oldVersion) => {
15 | if (compareVersions.compare(oldVersion, versionVAHashCollisionFixed, '<')) {
16 | console.log('\n');
17 | console.log(' ! 新版解决了旧版扫描时将かの仔和こっこ识别为同一个人的问题');
18 | console.log(' ! 建议进行扫描以自动修复这一问题');
19 | const lockConfig = { fixVA: true };
20 | updateLock.createLockFile(lockConfig);
21 | }
22 |
23 | // A nasty bug in Mac OS version only, >= v0.6.0-rc.0 and <= v0.6.0.rc.3
24 | // Caused by relative path in knexfile.js
25 | // On Mac OS, the current working directory is not the location of the program
26 | // The bug is not obvious on Windows since everyone is double clicking the program
27 | if (compareVersions.compare(oldVersion, versionKnexfilePathFixed, '<')) {
28 | if (process.platform === 'darwin') {
29 | // Skip to v0.6.0-rc.0
30 | await knexMigrate('skipAll', { to: '20210206141840' });
31 | const results = await knex.raw('PRAGMA table_info(\'t_va\')');
32 | if (results[0]['type'] === 'integer') {
33 | // Fill VA ids, migrate to v0.6.0-rc.3
34 | const log = ({ action, migration }) => console.log('Doing ' + action + ' on ' + migration);
35 | await knexMigrate('up', { to: '20210213233544' }, log);
36 | } else {
37 | // Already fixed VA ids, skip to v0.6.0-rc.3
38 | await knexMigrate('skipAll', { to: '20210213233544' });
39 | }
40 | }
41 | }
42 | }
43 |
44 | // Upgrade lock for VA bug fix (maybe needed in the future)
45 | // TODO: refactor to split upgrade lock from upgrade lock file
46 | class upgradeLock {
47 | constructor(fileName = 'update.lock') {
48 | this.lockFileConfig = {}
49 | this.lockFilePath = path.join(configFolderDir, fileName);
50 | this._init();
51 | }
52 | _init() {
53 | if (this.isLockFilePresent) {
54 | this.readLockFileConfig();
55 | }
56 | }
57 | get isLockFilePresent() {
58 | return fs.existsSync(this.lockFilePath);
59 | }
60 | readLockFileConfig() {
61 | this.lockFileConfig = JSON.parse(fs.readFileSync(this.lockFilePath));
62 | }
63 | createLockFile(lockConfig) {
64 | this.lockFileConfig = lockConfig;
65 | fs.writeFileSync(this.lockFilePath, JSON.stringify(this.lockFileConfig, null, "\t"));
66 | }
67 | updateLockFile(lockConfig) {
68 | this.createLockFile(lockConfig);
69 | }
70 | removeLockFile() {
71 | if (this.isLockFilePresent) {
72 | fs.unlinkSync(this.lockFilePath);
73 | }
74 | this.lockFileConfig = {};
75 | }
76 | }
77 |
78 | const updateLock = new upgradeLock();
79 |
80 | module.exports = {
81 | applyFix,
82 | updateLock
83 | }
--------------------------------------------------------------------------------
/用户文档.md:
--------------------------------------------------------------------------------
1 | ### 介绍
2 | #### kikoeru 是一个网络应用程序
3 | > 网络应用程序(英语:web application,英文简称 Webapp)分为客户端到服务器架构或无服务器后端架构。其中的客户端就是网页浏览器。常见的网页应用程序有Webmail、网络购物、网络拍卖、wiki、网络论坛、博客、网络游戏等诸多应用。
4 | 网络应用程序风行的原因之一,是因为可以直接在各种电脑平台上运行,不需要事先安装或定期升级等程序。
5 | (来自维基百科: [网络应用程序](https://zh.wikipedia.org/wiki/%E7%BD%91%E7%BB%9C%E5%BA%94%E7%94%A8%E7%A8%8B%E5%BA%8F))
6 |
7 | ### 使用说明
8 | #### 1.下载并打开可执行文件运行服务端程序 [releases](https://github.com/umonaca/kikoeru-express/releases)
9 |
10 | #### 2.使用浏览器连接服务端
11 | 在运行服务端的电脑上,直接通过浏览器访问 http://localhost:8888
12 |
13 | 如果你还有其它设备与运行服务端的电脑在同一局域网内,并且想在这些设备上使用,这时就需要知道运行服务端的电脑在局域网内的 IP。 以 Windows 系统的电脑为例,首先在运行服务端的电脑上按照教程查看内网 IP https://jingyan.baidu.com/article/9f63fb91f0fa9889400f0ed9.html
14 |
15 | 例如查到运行服务端的电脑的 IP 为 192.168.123.164,再在局域网内的其它的设备(电脑或手机)上,通过浏览器访问 http://192.168.123.164:8888
16 |
17 | #### 3.初次运行,点击左侧边栏的 "设定" 进入后台管理页面
18 | 
19 |
20 | #### 4.添加根文件夹并保存
21 | 
22 |
23 | #### 5.执行扫描
24 | **注意: 文件夹名称中不带 RJ 号的音声文件夹是扫描不到的**
25 |
26 | 点击展开可以看到日志
27 |
28 | 常见错误有以下四种:
29 | 1. ➜ 在抓取元数据/下载封面过程中出错: timeout of 2000ms exceeded
30 | 2. ➜ 在抓取元数据/下载封面过程中出错: Client network socket disconnected before secure TLS connection was established
31 | 3. ➜ 在抓取元数据/下载封面过程中出错: read ECONNRESET
32 | 4. ➜ 在抓取元数据/下载封面过程中出错: Couldn't request work page HTML (https://www.dlsite.com/maniax/work/=/product_id/RJ200879.html), received: 404.
33 |
34 | 第一种错误是请求超时,一般稍后重试可以解决,如果屡次产生都失败,就要考虑是不是网络问题,如果是因为 DLsite 被墙了就需要到 [高级设置] 页面中的 "爬虫相关设置" 中设置代理。
35 |
36 | 第二种错误与第三种错误一般会批量出现,当发现有大量这种错误出现时,可以考虑先终止扫描进程,稍后重试,一般可以解决。
37 |
38 | 第四种错误时因为该音声在 DLSite 上的页面已经不存在了,可能是因为下架或被收到一个合集里打包售卖。这种错误目前无法解决,后续会考虑增加手动添加音声的功能。
39 |
40 | 
41 |
42 | #### 6.初次扫描后**不再需要**重启服务端程序
43 | 这个问题已经修复,现在任何时候都可以随扫随用。
44 |
45 | #### 7.注意数据库中只会存储一种语言的标签
46 | 
47 |
48 | #### 8.关于如何启用 http 代理
49 | 你应该只在 DLsite 被墙时启用 http 代理
50 |
51 | 下面以 v2rayN 为例,找到 http 代理的端口号
52 | 
53 |
54 | 然后在 [高级设置] 页面中的 "爬虫相关设置" 中设置代理并点击保存按钮保存设置
55 |
56 | 注意填写错误的端口号会在爬虫时出错:
57 |
58 | 1. ➜ 在抓取元数据过程中出错: tunneling socket could not be established, cause=socket hang up (错填成 socks 代理服务的端口号)
59 | 2. ➜ 在抓取元数据过程中出错: tunneling socket could not be established, cause=connect ECONNREFUSED 127.0.0.1:10887 (错填成无效的端口号,端口 10887 上并没有启用任何服务)
60 |
61 | 
62 |
63 | #### 9. 关于收藏功能
64 | 目前已经实现了作品打星、标记进度、撰写评论的功能。
65 |
66 | - 作品打星
67 | 点击任意作品的星标,可以看到星标颜色由黄色变为蓝色,表示您自己的评分。
68 |
69 | - 标记进度
70 | 在任意作品详情页可以标记进度
71 |
72 | - 撰写评论
73 | 在任意作品详情页可以撰写评论
74 |
75 | 
76 |
77 | 除了在作品详情页以外,凡是打过星、标记过进度、写过评论的作品都会出现在**我的收藏**中。您可以在里面查看、创建、修改和删除星标、进度和评论。
78 |
79 | 
80 |
81 | 
82 |
83 | 
84 |
85 | #### 10. 默认用户名密码
86 | 如果未开启用户验证,无需用户名密码即可登录。
87 | 如果开启了用户验证,默认用户名和密码都是`admin`。
88 | 更改“是否开启用户验证”选项以后需要重启本程序。
89 |
90 | #### 11.关于升级
91 | 请阅读GitHub releases页面内的更新说明,目前绝大多数情况下不需要额外操作。
92 | 只需要用新版程序覆盖旧版,或者放在与旧版相同的目录下即可完成自动升级。
93 |
94 | ### docker 镜像使用说明
95 | #### 1.下载 docker 镜像文件
96 |
97 | #### 2.从文件中添加镜像
98 | 
99 |
100 | #### 3.挂载数据卷
101 | **使用docker-compose的用户请自行根据存储位置设定docker-compose.yml,在填写设置时应当保持一致。**
102 | 例如:
103 | - `存储位置:/usr/src/kikoeru/VoiceWork`,那么音声库位置就填写`/usr/src/kikoeru/VoiceWork`。
104 | - `存储位置:/usr/src/kikoeru/Folder/VoiceWork`,那么音声库位置就填写`/usr/src/kikoeru/Folder/VoiceWork`。
105 |
106 | 
107 |
108 | #### 4.映射端口
109 | 
110 |
111 | #### 5.注意在 [音声库] 页面添加新的根文件夹时,要填写容器内部的路径
112 | 
113 |
--------------------------------------------------------------------------------