├── .editorconfig ├── .eslintrc.js ├── .github ├── pull_request_template.md └── workflows │ ├── db-backup.yml │ ├── docker-build.yml │ ├── error-server.yml │ ├── generate-sqlite-base.yml │ ├── sb-server.yml │ ├── take-action.yml │ └── test.yaml ├── .gitignore ├── .nycrc.json ├── DatabaseSchema.md ├── Dockerfile ├── LICENSE ├── LICENSE.old ├── README.MD ├── ci.json ├── codecov.yml ├── config.json.example ├── containers ├── backup-db │ ├── Dockerfile │ ├── backup.sh │ └── forget.sh ├── error-server │ ├── Dockerfile │ ├── default.conf │ └── nginx.conf └── rsync │ └── Dockerfile ├── databases ├── _private.db.sql ├── _private_indexes.sql ├── _sponsorTimes.db.sql ├── _sponsorTimes_indexes.sql ├── _upgrade_private_1.sql ├── _upgrade_private_10.sql ├── _upgrade_private_11.sql ├── _upgrade_private_12.sql ├── _upgrade_private_13.sql ├── _upgrade_private_2.sql ├── _upgrade_private_3.sql ├── _upgrade_private_4.sql ├── _upgrade_private_5.sql ├── _upgrade_private_6.sql ├── _upgrade_private_7.sql ├── _upgrade_private_8.sql ├── _upgrade_private_9.sql ├── _upgrade_sponsorTimes_1.sql ├── _upgrade_sponsorTimes_10.sql ├── _upgrade_sponsorTimes_11.sql ├── _upgrade_sponsorTimes_12.sql ├── _upgrade_sponsorTimes_13.sql ├── _upgrade_sponsorTimes_14.sql ├── _upgrade_sponsorTimes_15.sql ├── _upgrade_sponsorTimes_16.sql ├── _upgrade_sponsorTimes_17.sql ├── _upgrade_sponsorTimes_18.sql ├── _upgrade_sponsorTimes_19.sql ├── _upgrade_sponsorTimes_2.sql ├── _upgrade_sponsorTimes_20.sql ├── _upgrade_sponsorTimes_21.sql ├── _upgrade_sponsorTimes_22.sql ├── _upgrade_sponsorTimes_23.sql ├── _upgrade_sponsorTimes_24.sql ├── _upgrade_sponsorTimes_25.sql ├── _upgrade_sponsorTimes_26.sql ├── _upgrade_sponsorTimes_27.sql ├── _upgrade_sponsorTimes_28.sql ├── _upgrade_sponsorTimes_29.sql ├── _upgrade_sponsorTimes_3.sql ├── _upgrade_sponsorTimes_30.sql ├── _upgrade_sponsorTimes_31.sql ├── _upgrade_sponsorTimes_32.sql ├── _upgrade_sponsorTimes_33.sql ├── _upgrade_sponsorTimes_34.sql ├── _upgrade_sponsorTimes_35.sql ├── _upgrade_sponsorTimes_36.sql ├── _upgrade_sponsorTimes_37.sql ├── _upgrade_sponsorTimes_38.sql ├── _upgrade_sponsorTimes_39.sql ├── _upgrade_sponsorTimes_4.sql ├── _upgrade_sponsorTimes_40.sql ├── _upgrade_sponsorTimes_41.sql ├── _upgrade_sponsorTimes_42.sql ├── _upgrade_sponsorTimes_43.sql ├── _upgrade_sponsorTimes_44.sql ├── _upgrade_sponsorTimes_5.sql ├── _upgrade_sponsorTimes_6.sql ├── _upgrade_sponsorTimes_7.sql ├── _upgrade_sponsorTimes_8.sql └── _upgrade_sponsorTimes_9.sql ├── docker ├── database.env.example ├── docker-compose-ci.yml ├── docker-compose.yml ├── migrate │ └── database.pgload.txt ├── newleaf │ └── configuration.py ├── redis │ └── redis.conf └── rsync │ └── rsyncd.conf ├── entrypoint.sh ├── nodemon.json ├── package-lock.json ├── package.json ├── src ├── app.ts ├── config.ts ├── cronjob │ ├── downvoteSegmentArchiveJob.ts │ └── index.ts ├── databases │ ├── IDatabase.ts │ ├── Postgres.ts │ ├── Sqlite.ts │ └── databases.ts ├── index.ts ├── middleware │ ├── apiCsp.ts │ ├── cors.ts │ ├── etag.ts │ ├── hostHeader.ts │ ├── logger.ts │ ├── requestRateLimit.ts │ └── userCounter.ts ├── routes │ ├── addFeature.ts │ ├── addUnlistedVideo.ts │ ├── addUserAsTempVIP.ts │ ├── addUserAsVIP.ts │ ├── deleteLockCategories.ts │ ├── dumpDatabase.ts │ ├── generateToken.ts │ ├── getBranding.ts │ ├── getBrandingStats.ts │ ├── getChapterNames.ts │ ├── getConfig.ts │ ├── getDaysSavedFormatted.ts │ ├── getFeatureFlag.ts │ ├── getIsUserVIP.ts │ ├── getLockCategories.ts │ ├── getLockCategoriesByHash.ts │ ├── getLockReason.ts │ ├── getMetrics.ts │ ├── getReady.ts │ ├── getSavedTimeForUser.ts │ ├── getSearchSegments.ts │ ├── getSegmentID.ts │ ├── getSegmentInfo.ts │ ├── getSkipSegments.ts │ ├── getSkipSegmentsByHash.ts │ ├── getStatus.ts │ ├── getTopBrandingUsers.ts │ ├── getTopCategoryUsers.ts │ ├── getTopUsers.ts │ ├── getTotalStats.ts │ ├── getUserID.ts │ ├── getUserInfo.ts │ ├── getUserStats.ts │ ├── getUsername.ts │ ├── getVideoLabel.ts │ ├── getVideoLabelByHash.ts │ ├── getViewsForUser.ts │ ├── oldSubmitSponsorTimes.ts │ ├── postBranding.ts │ ├── postCasual.ts │ ├── postClearCache.ts │ ├── postLockCategories.ts │ ├── postPurgeAllSegments.ts │ ├── postSegmentShift.ts │ ├── postSkipSegments.ts │ ├── postWarning.ts │ ├── setConfig.ts │ ├── setUsername.ts │ ├── shadowBanUser.ts │ ├── verifyToken.ts │ ├── viewedVideoSponsorTime.ts │ ├── voteOnSponsorTime.ts │ └── youtubeApiProxy.ts ├── types │ ├── branding.model.ts │ ├── config.model.ts │ ├── hash.model.ts │ ├── innerTubeApi.model.ts │ ├── lib.model.ts │ ├── ratings.model.ts │ ├── segments.model.ts │ ├── user.model.ts │ ├── warning.model.ts │ └── youtubeApi.model.ts └── utils │ ├── array.ts │ ├── checkBan.ts │ ├── createMemoryCache.ts │ ├── diskCache.ts │ ├── features.ts │ ├── getCWSUsers.ts │ ├── getCommit.ts │ ├── getFormattedTime.ts │ ├── getHash.ts │ ├── getHashCache.ts │ ├── getIP.ts │ ├── getService.ts │ ├── getSubmissionUUID.ts │ ├── getVideoDetails.ts │ ├── hashPrefixTester.ts │ ├── innerTubeAPI.ts │ ├── isUserTempVIP.ts │ ├── isUserVIP.ts │ ├── logger.ts │ ├── parseParams.ts │ ├── parseSkipSegments.ts │ ├── permissions.ts │ ├── promise.ts │ ├── queryCacher.ts │ ├── redis.ts │ ├── redisKeys.ts │ ├── redisLock.ts │ ├── reputation.ts │ ├── requestValidator.ts │ ├── serverConfig.ts │ ├── tokenUtils.ts │ ├── userAgent.ts │ ├── webhookUtils.ts │ ├── youtubeApi.ts │ └── youtubeID.ts ├── test.json ├── test ├── case_boilerplate.txt ├── cases │ ├── addFeatures.ts │ ├── addUserAsVIP.ts │ ├── dbUpgrade.ts │ ├── downvoteSegmentArchiveJob.ts │ ├── eTag.ts │ ├── environment.ts │ ├── generateVerifyToken.ts │ ├── getBranding.ts │ ├── getChapterNames.ts │ ├── getDaysSavedFormatted.ts │ ├── getHash.ts │ ├── getHashCache.ts │ ├── getIP.ts │ ├── getIsUserVIP.ts │ ├── getLockCategories.ts │ ├── getLockCategoriesByHash.ts │ ├── getLockReason.ts │ ├── getSavedTimeForUser.ts │ ├── getSearchSegments.ts │ ├── getSearchSegments4xx.ts │ ├── getSegmentInfo.ts │ ├── getService.ts │ ├── getSkipSegments.ts │ ├── getSkipSegmentsByHash.ts │ ├── getStatus.ts │ ├── getSubmissionUUID.ts │ ├── getTopCategoryUsers.ts │ ├── getTopUsers.ts │ ├── getTotalStats.ts │ ├── getUserID.ts │ ├── getUserInfo.ts │ ├── getUserInfoFree.ts │ ├── getUserStats.ts │ ├── getUsername.ts │ ├── getVideoLabelByHash.ts │ ├── getVideoLabels.ts │ ├── getViewsForUser.ts │ ├── highLoad.ts │ ├── innerTubeApi.ts │ ├── lockCategoriesHttp.ts │ ├── lockCategoriesRecords.ts │ ├── oldGetSponsorTime.ts │ ├── oldSubmitSponsorTimes.ts │ ├── postBranding.ts │ ├── postCasual.ts │ ├── postClearCache.ts │ ├── postPurgeAllSegments.ts │ ├── postSkipSegments.ts │ ├── postSkipSegments400.ts │ ├── postSkipSegments400Stub.ts │ ├── postSkipSegmentsAutomod.ts │ ├── postSkipSegmentsDuration.ts │ ├── postSkipSegmentsFeatures.ts │ ├── postSkipSegmentsLocked.ts │ ├── postSkipSegmentsShadowban.ts │ ├── postSkipSegmentsUserAgent.ts │ ├── postSkipSegmentsWarnings.ts │ ├── postWarning.ts │ ├── redisTest.ts │ ├── reputation.ts │ ├── requestValidator.ts │ ├── segmentShift.ts │ ├── setUsername.ts │ ├── setUsernamePrivate.ts │ ├── shadowBanUser.ts │ ├── shadowBanUser4xx.ts │ ├── tempVip.ts │ ├── testUtils.ts │ ├── tokenUtils.ts │ ├── unBan.ts │ ├── userAgentTest.ts │ ├── userCounter.ts │ ├── validateVideoIDs.ts │ └── voteOnSponsorTime.ts ├── mocks.ts ├── mocks │ ├── UserCounter.ts │ ├── gumroadMock.ts │ ├── mockExpressRequest.ts │ ├── patreonMock.ts │ └── youtubeMock.ts ├── test.ts └── utils │ ├── genUser.ts │ ├── getRandom.ts │ ├── httpClient.ts │ ├── partialDeepEquals.ts │ ├── queryGen.ts │ ├── reset.ts │ └── segmentQueryGen.ts └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | 11 | [*.{js,json,ts,tsx}] 12 | charset = utf-8 13 | indent_style = space 14 | indent_size = 4 15 | 16 | [package.json] 17 | indent_size = 2 18 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | browser: false, 4 | es2021: true, 5 | node: true, 6 | }, 7 | extends: [ 8 | "eslint:recommended", 9 | "plugin:@typescript-eslint/recommended", 10 | ], 11 | parser: "@typescript-eslint/parser", 12 | parserOptions: { 13 | ecmaVersion: 12, 14 | sourceType: "module", 15 | }, 16 | plugins: ["@typescript-eslint"], 17 | rules: { 18 | // TODO: Remove warn rules when not needed anymore 19 | "@typescript-eslint/no-empty-interface": "off", 20 | "@typescript-eslint/no-explicit-any": "off", 21 | "indent": ["warn", 4, { "SwitchCase": 1 }], 22 | "no-multiple-empty-lines": ["error", { max: 2, maxEOF: 0 }], 23 | "no-self-assign": "off", 24 | "no-trailing-spaces": "warn", 25 | "object-curly-spacing": ["warn", "always"], 26 | "prefer-template": "warn", 27 | "quotes": ["warn", "double", { "avoidEscape": true, "allowTemplateLiterals": true }], 28 | "require-await": "warn", 29 | "semi": "warn", 30 | "no-console": "warn" 31 | }, 32 | overrides: [ 33 | { 34 | files: ["src/**/*.ts"], 35 | 36 | parserOptions: { 37 | project: ["./tsconfig.json"], 38 | }, 39 | 40 | rules: { 41 | "@typescript-eslint/no-misused-promises": "warn", 42 | "@typescript-eslint/no-floating-promises" : "warn" 43 | } 44 | }, 45 | ], 46 | }; 47 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | - [ ] I agree to license my contribution under AGPL-3.0-only with my contribution automatically being licensed under LGPL-3.0 additionally after 6 months 2 | 3 | *** -------------------------------------------------------------------------------- /.github/workflows/db-backup.yml: -------------------------------------------------------------------------------- 1 | name: Docker image builds 2 | on: 3 | push: 4 | branches: 5 | - master 6 | paths: 7 | - containers/backup-db/** 8 | workflow_dispatch: 9 | 10 | jobs: 11 | backup-db: 12 | uses: ./.github/workflows/docker-build.yml 13 | with: 14 | name: "db-backup" 15 | username: "ajayyy" 16 | folder: "./containers/backup-db" 17 | secrets: 18 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/docker-build.yml: -------------------------------------------------------------------------------- 1 | # Based on https://github.com/ajayyy/sb-mirror/blob/main/.github/workflows/docker-build.yml 2 | name: multi-build-docker 3 | on: 4 | workflow_call: 5 | inputs: 6 | name: 7 | required: true 8 | type: string 9 | username: 10 | required: true 11 | type: string 12 | folder: 13 | required: true 14 | type: string 15 | secrets: 16 | GH_TOKEN: 17 | required: true 18 | 19 | jobs: 20 | build_container: 21 | runs-on: ubuntu-latest 22 | permissions: 23 | packages: write 24 | steps: 25 | - uses: actions/checkout@v4 26 | - name: Docker meta 27 | id: meta 28 | uses: docker/metadata-action@v5 29 | with: 30 | images: | 31 | ghcr.io/${{ inputs.username }}/${{ inputs.name }} 32 | tags: | 33 | type-raw,value=alpine 34 | flavor: | 35 | latest=true 36 | - name: Login to GHCR 37 | uses: docker/login-action@v3 38 | with: 39 | registry: ghcr.io 40 | username: ${{ github.repository_owner }} 41 | password: ${{ secrets.GH_TOKEN }} 42 | - name: Set up QEMU 43 | uses: docker/setup-qemu-action@v3 44 | with: 45 | platforms: arm,arm64 46 | - name: Set up buildx 47 | uses: docker/setup-buildx-action@v3 48 | - name: push 49 | uses: docker/build-push-action@v6 50 | with: 51 | context: ${{ inputs.folder }} 52 | platforms: linux/amd64,linux/arm64 53 | push: true 54 | tags: ${{ steps.meta.outputs.tags }} -------------------------------------------------------------------------------- /.github/workflows/error-server.yml: -------------------------------------------------------------------------------- 1 | name: Docker image builds 2 | on: 3 | push: 4 | branches: 5 | - master 6 | workflow_dispatch: 7 | 8 | jobs: 9 | error-server: 10 | uses: ./.github/workflows/docker-build.yml 11 | with: 12 | name: "error-server" 13 | username: "ajayyy" 14 | folder: "./containers/error-server" 15 | secrets: 16 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/generate-sqlite-base.yml: -------------------------------------------------------------------------------- 1 | name: create-sqlite-base 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - databases/** 9 | workflow_dispatch: 10 | 11 | jobs: 12 | make-base-db: 13 | name: Generate SQLite base .db 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | - uses: actions/setup-node@v4 19 | with: 20 | node-version: 18 21 | cache: npm 22 | - run: npm ci 23 | - name: Set config 24 | run: | 25 | echo '{"mode": "init-db-and-exit"}' > config.json 26 | - name: Run Server 27 | timeout-minutes: 10 28 | run: npm start 29 | - uses: actions/upload-artifact@v4 30 | with: 31 | name: SponsorTimesDB.db 32 | path: databases/sponsorTimes.db 33 | - uses: mchangrh/s3cmd-sync@f4f36b9705bdd9af7ac91964136989ac17e3b513 34 | with: 35 | args: --acl-public 36 | env: 37 | S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }} 38 | S3_BUCKET: ${{ secrets.S3_BUCKET }} 39 | S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }} 40 | S3_ACCESS_KEY_SECRET: ${{ secrets.S3_ACCESS_KEY_SECRET }} 41 | SOURCE_DIR: 'databases/sponsorTimes.db' -------------------------------------------------------------------------------- /.github/workflows/sb-server.yml: -------------------------------------------------------------------------------- 1 | name: Docker image builds 2 | on: 3 | push: 4 | branches: 5 | - master 6 | workflow_dispatch: 7 | 8 | jobs: 9 | sb-server: 10 | uses: ./.github/workflows/docker-build.yml 11 | with: 12 | name: "sb-server" 13 | username: "ajayyy" 14 | folder: "." 15 | secrets: 16 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 17 | rsync-host: 18 | needs: sb-server 19 | uses: ./.github/workflows/docker-build.yml 20 | with: 21 | name: "rsync-host" 22 | username: "ajayyy" 23 | folder: "./containers/rsync" 24 | secrets: 25 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/take-action.yml: -------------------------------------------------------------------------------- 1 | name: Assign issue to contributor 2 | on: [issue_comment] 3 | 4 | jobs: 5 | assign: 6 | name: Take an issue 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: take the issue 10 | uses: bdougie/take-action@28b86cd8d25593f037406ecbf96082db2836e928 11 | env: 12 | GITHUB_TOKEN: ${{ github.token }} 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Dependency directories 7 | node_modules/ 8 | 9 | # TypeScript cache 10 | *.tsbuildinfo 11 | 12 | # Optional npm cache directory 13 | .npm 14 | 15 | # Optional eslint cache 16 | .eslintcache 17 | 18 | # dotenv environment variables file 19 | .env 20 | .env.test 21 | 22 | # Databases 23 | databases/sponsorTimes.db 24 | databases/sponsorTimes.db-shm 25 | databases/sponsorTimes.db-wal 26 | databases/private.db 27 | databases/private.db-shm 28 | databases/private.db-wal 29 | databases/sponsorTimesReal.db 30 | test/databases/sponsorTimes.db 31 | test/databases/sponsorTimes.db-shm 32 | test/databases/sponsorTimes.db-wal 33 | test/databases/private.db 34 | databases/cache 35 | docker/database-export 36 | 37 | # Config files 38 | config.json 39 | docker/database.env 40 | 41 | # Other 42 | working 43 | 44 | # Mac files 45 | .DS_Store 46 | /.idea/ 47 | /dist/ 48 | 49 | # nyc coverage output 50 | .nyc_output/ 51 | coverage/ 52 | 53 | .vscode -------------------------------------------------------------------------------- /.nycrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@istanbuljs/nyc-config-typescript", 3 | "check-coverage": false, 4 | "ski-full": true, 5 | "reporter": ["text", "html"], 6 | "include": [ 7 | "src/**/*.ts" 8 | ], 9 | "exclude": [ 10 | "src/routes/addUnlistedVideo.ts", 11 | "src/cronjob/downvoteSegmentArchiveJob.ts", 12 | "src/databases/*" 13 | ] 14 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine as builder 2 | RUN apk add --no-cache --virtual .build-deps python3 make g++ 3 | COPY package.json package-lock.json tsconfig.json entrypoint.sh ./ 4 | COPY src src 5 | RUN npm ci && npm run tsc 6 | 7 | FROM node:18-alpine as app 8 | WORKDIR /usr/src/app 9 | RUN apk add --no-cache git postgresql-client 10 | COPY --from=builder ./node_modules ./node_modules 11 | COPY --from=builder ./dist ./dist 12 | COPY ./.git/ ./.git 13 | COPY entrypoint.sh . 14 | COPY databases/*.sql databases/ 15 | EXPOSE 8080 16 | CMD ./entrypoint.sh 17 | -------------------------------------------------------------------------------- /LICENSE.old: -------------------------------------------------------------------------------- 1 | License for code prior to commit d738e89f203e9cea21b7df4acb9f4b3505f0acdd 2 | 3 | You must follow LICENSE instead if you want to use any newer version. 4 | 5 | ---- 6 | 7 | MIT License 8 | 9 | Copyright (c) 2019 Ajay Ramachandran 10 | 11 | Permission is hereby granted, free of charge, to any person obtaining a copy 12 | of this software and associated documentation files (the "Software"), to deal 13 | in the Software without restriction, including without limitation the rights 14 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 15 | copies of the Software, and to permit persons to whom the Software is 16 | furnished to do so, subject to the following conditions: 17 | 18 | The above copyright notice and this permission notice shall be included in all 19 | copies or substantial portions of the Software. 20 | 21 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 22 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 23 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 24 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 25 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 26 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 27 | SOFTWARE. 28 | -------------------------------------------------------------------------------- /README.MD: -------------------------------------------------------------------------------- 1 | # SponsorBlock Server 2 | 3 | SponsorBlock is an extension that will skip over sponsored segments of YouTube videos. SponsorBlock is a crowdsourced browser extension that let's anyone submit the start and end time's of sponsored segments of YouTube videos. Once one person submits this information, everyone else with this extension will skip right over the sponsored segment. 4 | 5 | This is the server backend for it 6 | 7 | # Server 8 | 9 | This uses a Postgres or Sqlite database to hold all the timing data. 10 | 11 | To make sure that this project doesn't die, I have made the database publicly downloadable at https://sponsor.ajay.app/database. You can download a backup or get archive.org to take a backup if you do desire. The database is under [this license](https://creativecommons.org/licenses/by-nc-sa/4.0/) unless you get explicit permission from me. 12 | 13 | Hopefully this project can be combined with projects like [this](https://github.com/Sponsoff/sponsorship_remover) and use this data to create a neural network to predict when sponsored segments happen. That project is sadly abandoned now, so I have decided to attempt to revive this idea. 14 | 15 | # Client 16 | 17 | The client web browser extension is available here: https://github.com/ajayyy/SponsorBlock 18 | 19 | # Build Yourself 20 | 21 | This is a node.js server, so clone this repo and run `npm install` to install all dependencies. 22 | 23 | Make sure to put the database files in the `./databases` folder if you want to use a pre-existing database. Otherwise, a fresh database will be created. 24 | 25 | Rename `config.json.example` to `config.json` and fill the parameters inside. Make sure to remove the comments as comments are not supported in JSON. 26 | 27 | Ensure all the tests pass with `npm test` 28 | 29 | Run the server with `npm start`. 30 | 31 | # Developing 32 | 33 | If you want to make changes, run `npm run dev` to automatically reload the server and run tests whenever a file is saved. 34 | 35 | # API Docs 36 | 37 | Available [here](https://wiki.sponsor.ajay.app/index.php/API_Docs) 38 | 39 | # License 40 | 41 | This is licensed under AGPL-3.0-only. -------------------------------------------------------------------------------- /ci.json: -------------------------------------------------------------------------------- 1 | { 2 | "port": 8080, 3 | "mockPort": 8081, 4 | "globalSalt": "testSalt", 5 | "adminUserID": "4bdfdc9cddf2c7d07a8a87b57bf6d25389fb75d1399674ee0e0938a6a60f4c3b", 6 | "newLeafURLs": ["placeholder"], 7 | "discordReportChannelWebhookURL": "http://127.0.0.1:8081/webhook/ReportChannel", 8 | "discordFirstTimeSubmissionsWebhookURL": "http://127.0.0.1:8081/webhook/FirstTimeSubmissions", 9 | "discordCompletelyIncorrectReportWebhookURL": "http://127.0.0.1:8081/webhook/CompletelyIncorrectReport", 10 | "discordNeuralBlockRejectWebhookURL": "http://127.0.0.1:8081/webhook/NeuralBlockReject", 11 | "neuralBlockURL": "http://127.0.0.1:8081/NeuralBlock", 12 | "userCounterURL": "http://127.0.0.1:8081/UserCounter", 13 | "behindProxy": true, 14 | "postgres": { 15 | "user": "ci_db_user", 16 | "password": "ci_db_pass", 17 | "host": "localhost", 18 | "port": 5432 19 | }, 20 | "redis": { 21 | "enabled": true, 22 | "socket": { 23 | "host": "localhost", 24 | "port": 6379 25 | }, 26 | "expiryTime": 86400 27 | }, 28 | "createDatabaseIfNotExist": true, 29 | "schemaFolder": "./databases", 30 | "dbSchema": "./databases/_sponsorTimes.db.sql", 31 | "privateDBSchema": "./databases/_private.db.sql", 32 | "categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], 33 | "mode": "test", 34 | "readOnly": false, 35 | "webhooks": [ 36 | { 37 | "url": "http://127.0.0.1:8081/CustomWebhook", 38 | "key": "superSecretKey", 39 | "scopes": [ 40 | "vote.up", 41 | "vote.down" 42 | ] 43 | }, { 44 | "url": "http://127.0.0.1:8081/FailedWebhook", 45 | "key": "superSecretKey", 46 | "scopes": [ 47 | "vote.up", 48 | "vote.down" 49 | ] 50 | }, { 51 | "url": "http://127.0.0.1:8099/WrongPort", 52 | "key": "superSecretKey", 53 | "scopes": [ 54 | "vote.up", 55 | "vote.down" 56 | ] 57 | } 58 | ], 59 | "hoursAfterWarningExpires": 24, 60 | "rateLimit": { 61 | "vote": { 62 | "windowMs": 900000, 63 | "max": 20, 64 | "message": "Too many votes, please try again later", 65 | "statusCode": 429 66 | }, 67 | "view": { 68 | "windowMs": 900000, 69 | "max": 20, 70 | "statusCode": 200 71 | } 72 | }, 73 | "patreon": { 74 | "clientId": "testClientID", 75 | "clientSecret": "testClientSecret", 76 | "redirectUri": "http://127.0.0.1/fake/callback" 77 | }, 78 | "minReputationToSubmitFiller": -1, 79 | "minUserIDLength": 0 80 | } 81 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | coverage: 3 | status: 4 | project: 5 | default: 6 | informational: true 7 | patch: 8 | default: 9 | informational: true -------------------------------------------------------------------------------- /containers/backup-db/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine as builder 2 | WORKDIR /scripts 3 | COPY ./backup.sh ./backup.sh 4 | COPY ./forget.sh ./forget.sh 5 | 6 | FROM alpine 7 | RUN apk add --no-cache postgresql-client restic 8 | COPY --from=builder --chmod=755 /scripts /usr/src/app/ 9 | 10 | RUN echo '30 * * * * /usr/src/app/backup.sh' >> /etc/crontabs/root 11 | RUN echo '10 0 * * */2 /usr/src/app/forget.sh' >> /etc/crontabs/root 12 | 13 | CMD crond -l 2 -f 14 | -------------------------------------------------------------------------------- /containers/backup-db/backup.sh: -------------------------------------------------------------------------------- 1 | mkdir ./dump 2 | 3 | pg_dump -f ./dump/sponsorTimes.dump sponsorTimes 4 | pg_dump -f ./dump/privateDB.dump privateDB 5 | 6 | restic backup ./dump -------------------------------------------------------------------------------- /containers/backup-db/forget.sh: -------------------------------------------------------------------------------- 1 | restic forget --prune --keep-hourly 24 --keep-daily 7 --keep-weekly 8 -------------------------------------------------------------------------------- /containers/error-server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx as app 2 | EXPOSE 80 3 | COPY nginx.conf /etc/nginx/nginx.conf 4 | COPY default.conf /etc/nginx/conf.d/default.conf -------------------------------------------------------------------------------- /containers/error-server/default.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | listen [::]:80; 4 | server_name localhost; 5 | 6 | location / { 7 | return 503; 8 | } 9 | } -------------------------------------------------------------------------------- /containers/error-server/nginx.conf: -------------------------------------------------------------------------------- 1 | user nginx; 2 | worker_processes auto; 3 | 4 | error_log /var/log/nginx/error.log notice; 5 | pid /var/run/nginx.pid; 6 | 7 | events { 8 | worker_connections 4096; 9 | } 10 | 11 | http { 12 | include /etc/nginx/mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log off; 16 | error_log /dev/null crit; 17 | 18 | include /etc/nginx/conf.d/*.conf; 19 | } -------------------------------------------------------------------------------- /containers/rsync/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/ajayyy/sb-server:latest 2 | EXPOSE 873/tcp 3 | RUN apk add rsync>3.4.1-r0 4 | RUN mkdir /usr/src/app/database-export 5 | 6 | CMD rsync --no-detach --daemon & ./entrypoint.sh 7 | -------------------------------------------------------------------------------- /databases/_private.db.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "votes" ( 4 | "UUID" TEXT NOT NULL, 5 | "userID" TEXT NOT NULL, 6 | "hashedIP" TEXT NOT NULL, 7 | "type" INTEGER NOT NULL 8 | ); 9 | 10 | CREATE TABLE IF NOT EXISTS "categoryVotes" ( 11 | "UUID" TEXT NOT NULL, 12 | "userID" TEXT NOT NULL, 13 | "hashedIP" TEXT NOT NULL, 14 | "category" TEXT NOT NULL, 15 | "timeSubmitted" INTEGER NOT NULL 16 | ); 17 | 18 | CREATE TABLE IF NOT EXISTS "sponsorTimes" ( 19 | "videoID" TEXT NOT NULL, 20 | "hashedIP" TEXT NOT NULL, 21 | "timeSubmitted" INTEGER NOT NULL 22 | ); 23 | 24 | CREATE TABLE IF NOT EXISTS "config" ( 25 | "key" TEXT NOT NULL, 26 | "value" TEXT NOT NULL 27 | ); 28 | 29 | CREATE TABLE IF NOT EXISTS "titleVotes" ( 30 | "id" SERIAL PRIMARY KEY, 31 | "videoID" TEXT NOT NULL, 32 | "UUID" TEXT NOT NULL, 33 | "userID" TEXT NOT NULL, 34 | "hashedIP" TEXT NOT NULL, 35 | "type" INTEGER NOT NULL 36 | ); 37 | 38 | CREATE TABLE IF NOT EXISTS "thumbnailVotes" ( 39 | "id" SERIAL PRIMARY KEY, 40 | "videoID" TEXT NOT NULL, 41 | "UUID" TEXT NOT NULL, 42 | "userID" TEXT NOT NULL, 43 | "hashedIP" TEXT NOT NULL, 44 | "type" INTEGER NOT NULL 45 | ); 46 | 47 | CREATE TABLE IF NOT EXISTS "casualVotes" ( 48 | "UUID" SERIAL PRIMARY KEY, 49 | "videoID" TEXT NOT NULL, 50 | "service" TEXT NOT NULL, 51 | "userID" TEXT NOT NULL, 52 | "hashedIP" TEXT NOT NULL, 53 | "category" TEXT NOT NULL, 54 | "type" INTEGER NOT NULL, 55 | "timeSubmitted" INTEGER NOT NULL 56 | ); 57 | 58 | COMMIT; 59 | -------------------------------------------------------------------------------- /databases/_private_indexes.sql: -------------------------------------------------------------------------------- 1 | -- sponsorTimes 2 | 3 | CREATE INDEX IF NOT EXISTS "privateDB_sponsorTimes_v4" 4 | ON public."sponsorTimes" USING btree 5 | ("videoID" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST); 6 | 7 | -- votes 8 | 9 | CREATE INDEX IF NOT EXISTS "votes_userID" 10 | ON public.votes USING btree 11 | ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST) 12 | TABLESPACE pg_default; 13 | 14 | -- categoryVotes 15 | 16 | CREATE INDEX IF NOT EXISTS "categoryVotes_UUID" 17 | ON public."categoryVotes" USING btree 18 | ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST, "hashedIP" COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST) 19 | TABLESPACE pg_default; 20 | 21 | -- ratings 22 | 23 | CREATE INDEX IF NOT EXISTS "ratings_videoID" 24 | ON public."ratings" USING btree 25 | ("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST) 26 | TABLESPACE pg_default; 27 | 28 | -- casualVotes 29 | 30 | CREATE INDEX IF NOT EXISTS "casualVotes_videoID" 31 | ON public."casualVotes" USING btree 32 | ("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST) 33 | TABLESPACE pg_default; 34 | 35 | CREATE INDEX IF NOT EXISTS "casualVotes_userID" 36 | ON public."casualVotes" USING btree 37 | ("userID" COLLATE pg_catalog."default" ASC NULLS LAST) 38 | TABLESPACE pg_default; -------------------------------------------------------------------------------- /databases/_upgrade_private_1.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* for testing the db upgrade, don't remove because it looks empty */ 4 | 5 | /* Add version to config */ 6 | INSERT INTO config (key, value) VALUES('version', 1); 7 | 8 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_10.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | -- Add primary keys 4 | 5 | ALTER TABLE "votes" ADD "originalType" INTEGER NOT NULL default -1; 6 | 7 | UPDATE "config" SET value = 10 WHERE key = 'version'; 8 | 9 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_11.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "licenseKeys" ( 4 | "licenseKey" TEXT NOT NULL PRIMARY KEY, 5 | "time" INTEGER NOT NULL, 6 | "type" TEXT NOT NULL 7 | ); 8 | 9 | CREATE TABLE IF NOT EXISTS "oauthLicenseKeys" ( 10 | "licenseKey" TEXT NOT NULL PRIMARY KEY, 11 | "accessToken" TEXT NOT NULL, 12 | "refreshToken" TEXT NOT NULL, 13 | "expiresIn" INTEGER NOT NULL 14 | ); 15 | 16 | UPDATE "config" SET value = 11 WHERE key = 'version'; 17 | 18 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_12.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "casualVotes" DROP COLUMN "type"; 4 | 5 | UPDATE "config" SET value = 12 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_13.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "casualVotes" ADD "titleID" INTEGER default 0; 4 | 5 | UPDATE "config" SET value = 13 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_2.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "userNameLogs" ( 4 | "userID" TEXT NOT NULL, 5 | "newUserName" TEXT NOT NULL, 6 | "oldUserName" TEXT NOT NULL, 7 | "updatedByAdmin" BOOLEAN NOT NULL, 8 | "updatedAt" INTEGER NOT NULL 9 | ); 10 | 11 | UPDATE "config" SET value = 2 WHERE key = 'version'; 12 | 13 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_3.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "sponsorTimes" ADD "service" TEXT NOT NULL default 'YouTube'; 4 | -- UPDATE "sponsorTimes" SET "service" = "YouTube"; 5 | 6 | DROP INDEX IF EXISTS "privateDB_sponsorTimes_videoID"; 7 | 8 | UPDATE "config" SET value = 3 WHERE key = 'version'; 9 | 10 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_4.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "ratings" ( 4 | "videoID" TEXT NOT NULL, 5 | "service" TEXT NOT NULL default 'YouTube', 6 | "type" INTEGER NOT NULL, 7 | "userID" TEXT NOT NULL, 8 | "timeSubmitted" INTEGER NOT NULL, 9 | "hashedIP" TEXT NOT NULL 10 | ); 11 | 12 | UPDATE "config" SET value = 4 WHERE key = 'version'; 13 | 14 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_5.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "tempVipLog" ( 4 | "issuerUserID" TEXT NOT NULL, 5 | "targetUserID" TEXT NOT NULL, 6 | "enabled" BOOLEAN NOT NULL, 7 | "updatedAt" INTEGER NOT NULL 8 | ); 9 | 10 | UPDATE "config" SET value = 5 WHERE key = 'version'; 11 | 12 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_6.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | DROP INDEX IF EXISTS "sponsorTimes_hashedIP", "privateDB_sponsorTimes_videoID_v2"; --!sqlite-ignore 4 | 5 | UPDATE "config" SET value = 6 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_7.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "votes" ADD "normalUserID" TEXT NOT NULL default ''; 4 | 5 | UPDATE "config" SET value = 7 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_8.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | -- Add primary keys 4 | 5 | ALTER TABLE "userNameLogs" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 6 | ALTER TABLE "categoryVotes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 7 | ALTER TABLE "sponsorTimes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 8 | ALTER TABLE "config" ADD PRIMARY KEY ("key"); --!sqlite-ignore 9 | ALTER TABLE "ratings" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 10 | ALTER TABLE "tempVipLog" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 11 | ALTER TABLE "votes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 12 | 13 | UPDATE "config" SET value = 8 WHERE key = 'version'; 14 | 15 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_private_9.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | -- Add primary keys 4 | 5 | DROP INDEX IF EXISTS "privateDB_sponsorTimes_v3"; --!sqlite-ignore 6 | 7 | UPDATE "config" SET value = 9 WHERE key = 'version'; 8 | 9 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_1.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add incorrectVotes field */ 4 | CREATE TABLE "sqlb_temp_table_1" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "incorrectVotes" INTEGER NOT NULL default 1, 10 | "UUID" TEXT NOT NULL UNIQUE, 11 | "userID" TEXT NOT NULL, 12 | "timeSubmitted" INTEGER NOT NULL, 13 | "views" INTEGER NOT NULL, 14 | "category" TEXT NOT NULL DEFAULT 'sponsor', 15 | "shadowHidden" INTEGER NOT NULL 16 | ); 17 | INSERT INTO sqlb_temp_table_1 SELECT "videoID","startTime","endTime","votes",'1',"UUID","userID","timeSubmitted","views","category","shadowHidden" FROM "sponsorTimes"; 18 | 19 | DROP TABLE "sponsorTimes"; 20 | ALTER TABLE sqlb_temp_table_1 RENAME TO "sponsorTimes"; 21 | 22 | /* Add version to config */ 23 | INSERT INTO config (key, value) VALUES('version', 1); 24 | 25 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_10.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add Hidden field */ 4 | CREATE TABLE "sqlb_temp_table_10" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "service" TEXT NOT NULL DEFAULT 'YouTube', 17 | "videoDuration" REAL NOT NULL DEFAULT '0', 18 | "hidden" INTEGER NOT NULL DEFAULT '0', 19 | "shadowHidden" INTEGER NOT NULL, 20 | "hashedVideoID" TEXT NOT NULL default '' 21 | ); 22 | 23 | INSERT INTO sqlb_temp_table_10 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service","videoDuration",0,"shadowHidden","hashedVideoID" FROM "sponsorTimes"; 24 | 25 | DROP TABLE "sponsorTimes"; 26 | ALTER TABLE sqlb_temp_table_10 RENAME TO "sponsorTimes"; 27 | 28 | UPDATE "config" SET value = 10 WHERE key = 'version'; 29 | 30 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_11.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Rename table: noSegments to lockCategories */ 4 | ALTER TABLE "noSegments" RENAME TO "lockCategories"; 5 | 6 | UPDATE "config" SET value = 11 WHERE key = 'version'; 7 | 8 | COMMIT; 9 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_12.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add reputation field */ 4 | CREATE TABLE "sqlb_temp_table_12" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "service" TEXT NOT NULL DEFAULT 'YouTube', 17 | "videoDuration" REAL NOT NULL DEFAULT '0', 18 | "hidden" INTEGER NOT NULL DEFAULT '0', 19 | "reputation" REAL NOT NULL DEFAULT 0, 20 | "shadowHidden" INTEGER NOT NULL, 21 | "hashedVideoID" TEXT NOT NULL default '' 22 | ); 23 | 24 | INSERT INTO sqlb_temp_table_12 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service","videoDuration","hidden",0,"shadowHidden","hashedVideoID" FROM "sponsorTimes"; 25 | 26 | DROP TABLE "sponsorTimes"; 27 | ALTER TABLE sqlb_temp_table_12 RENAME TO "sponsorTimes"; 28 | 29 | UPDATE "config" SET value = 12 WHERE key = 'version'; 30 | 31 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_13.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add locked field */ 4 | CREATE TABLE "sqlb_temp_table_13" ( 5 | "userID" TEXT NOT NULL, 6 | "userName" TEXT NOT NULL, 7 | "locked" INTEGER NOT NULL default '0' 8 | ); 9 | 10 | INSERT INTO sqlb_temp_table_13 SELECT "userID", "userName", 0 FROM "userNames"; 11 | 12 | DROP TABLE "userNames"; 13 | ALTER TABLE sqlb_temp_table_13 RENAME TO "userNames"; 14 | 15 | UPDATE "config" SET value = 13 WHERE key = 'version'; 16 | 17 | COMMIT; 18 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_14.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "shadowBannedUsers" ( 4 | "userID" TEXT NOT NULL 5 | ); 6 | 7 | UPDATE "config" SET value = 14 WHERE key = 'version'; 8 | 9 | COMMIT; 10 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_15.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "unlistedVideos" ( 4 | "videoID" TEXT NOT NULL, 5 | "timeSubmitted" INTEGER NOT NULL 6 | ); 7 | 8 | UPDATE "config" SET value = 15 WHERE key = 'version'; 9 | 10 | COMMIT; 11 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_16.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | DROP TABLE "unlistedVideos"; 4 | 5 | CREATE TABLE IF NOT EXISTS "unlistedVideos" ( 6 | "videoID" TEXT NOT NULL, 7 | "year" TEXT NOT NULL, 8 | "views" TEXT NOT NULL, 9 | "channelID" TEXT NOT NULL, 10 | "timeSubmitted" INTEGER NOT NULL 11 | ); 12 | 13 | UPDATE "config" SET value = 16 WHERE key = 'version'; 14 | 15 | COMMIT; 16 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_17.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add reason field */ 4 | CREATE TABLE "sqlb_temp_table_17" ( 5 | "userID" TEXT NOT NULL, 6 | "issueTime" INTEGER NOT NULL, 7 | "issuerUserID" TEXT NOT NULL, 8 | enabled INTEGER NOT NULL, 9 | "reason" TEXT NOT NULL default '' 10 | ); 11 | 12 | INSERT INTO sqlb_temp_table_17 SELECT "userID","issueTime","issuerUserID","enabled", '' FROM "warnings"; 13 | 14 | DROP TABLE warnings; 15 | ALTER TABLE sqlb_temp_table_17 RENAME TO "warnings"; 16 | 17 | UPDATE "config" SET value = 17 WHERE key = 'version'; 18 | 19 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_18.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add hash field */ 4 | ALTER TABLE "lockCategories" ADD "hashedVideoID" TEXT NOT NULL default ''; 5 | UPDATE "lockCategories" SET "hashedVideoID" = sha256("videoID"); 6 | 7 | UPDATE "config" SET value = 18 WHERE key = 'version'; 8 | 9 | COMMIT; 10 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_19.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add actionType field */ 4 | CREATE TABLE "sqlb_temp_table_19" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "actionType" TEXT NOT NULL DEFAULT 'skip', 17 | "service" TEXT NOT NULL DEFAULT 'YouTube', 18 | "videoDuration" REAL NOT NULL DEFAULT '0', 19 | "hidden" INTEGER NOT NULL DEFAULT '0', 20 | "reputation" REAL NOT NULL DEFAULT 0, 21 | "shadowHidden" INTEGER NOT NULL, 22 | "hashedVideoID" TEXT NOT NULL default '' 23 | ); 24 | 25 | INSERT INTO sqlb_temp_table_19 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category",'skip',"service","videoDuration","hidden","reputation","shadowHidden","hashedVideoID" FROM "sponsorTimes"; 26 | 27 | DROP TABLE "sponsorTimes"; 28 | ALTER TABLE sqlb_temp_table_19 RENAME TO "sponsorTimes"; 29 | 30 | UPDATE "config" SET value = 19 WHERE key = 'version'; 31 | 32 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_2.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add new table: noSegments */ 4 | CREATE TABLE "noSegments" ( 5 | "videoID" TEXT NOT NULL, 6 | "userID" TEXT NOT NULL, 7 | "category" TEXT NOT NULL 8 | ); 9 | 10 | /* Add version to config */ 11 | UPDATE "config" SET value = 2 WHERE key = 'version'; 12 | 13 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_20.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add hash field */ 4 | ALTER TABLE "lockCategories" ADD "reason" TEXT NOT NULL default ''; 5 | 6 | UPDATE "config" SET value = 20 WHERE key = 'version'; 7 | 8 | COMMIT; 9 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_21.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "archivedSponsorTimes" ( 4 | "videoID" TEXT NOT NULL, 5 | "startTime" REAL NOT NULL, 6 | "endTime" REAL NOT NULL, 7 | "votes" INTEGER NOT NULL, 8 | "locked" INTEGER NOT NULL DEFAULT '0', 9 | "incorrectVotes" INTEGER NOT NULL DEFAULT 1, 10 | "UUID" TEXT NOT NULL UNIQUE, 11 | "userID" TEXT NOT NULL, 12 | "timeSubmitted" INTEGER NOT NULL, 13 | "views" INTEGER NOT NULL, 14 | "category" TEXT NOT NULL DEFAULT 'sponsor', 15 | "service" TEXT NOT NULL DEFAULT 'Youtube', 16 | "actionType" TEXT NOT NULL DEFAULT 'skip', 17 | "videoDuration" INTEGER NOT NULL DEFAULT '0', 18 | "hidden" INTEGER NOT NULL DEFAULT '0', 19 | "reputation" REAL NOT NULL DEFAULT '0', 20 | "shadowHidden" INTEGER NOT NULL, 21 | "hashedVideoID" TEXT NOT NULL DEFAULT '' 22 | ); 23 | 24 | UPDATE "config" SET value = 21 WHERE key = 'version'; 25 | 26 | COMMIT; 27 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_22.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add hash field */ 4 | ALTER TABLE "sponsorTimes" ADD "userAgent" TEXT NOT NULL default ''; 5 | 6 | ALTER TABLE "archivedSponsorTimes" ADD "userAgent" TEXT NOT NULL default ''; 7 | 8 | UPDATE "config" SET value = 22 WHERE key = 'version'; 9 | 10 | COMMIT; 11 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_23.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | DELETE FROM "userNames" WHERE ctid NOT IN ( --!sqlite-ignore 4 | SELECT MIN(ctid) FROM "userNames" --!sqlite-ignore 5 | GROUP BY "userID" --!sqlite-ignore 6 | ); --!sqlite-ignore 7 | 8 | ALTER TABLE "userNames" ADD UNIQUE("userID"); --!sqlite-ignore 9 | 10 | UPDATE "config" SET value = 23 WHERE key = 'version'; 11 | 12 | COMMIT; 13 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_24.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "lockCategories" ADD "service" TEXT NOT NULL default 'YouTube'; 4 | 5 | UPDATE "lockCategories" 6 | SET "service" = "sponsorTimes"."service" 7 | FROM "sponsorTimes" 8 | WHERE "lockCategories"."videoID" = "sponsorTimes"."videoID"; 9 | 10 | ALTER TABLE "unlistedVideos" ADD "service" TEXT NOT NULL default 'YouTube'; 11 | 12 | UPDATE "unlistedVideos" 13 | SET "service" = "sponsorTimes"."service" 14 | FROM "sponsorTimes" 15 | WHERE "unlistedVideos"."videoID" = "sponsorTimes"."videoID"; 16 | 17 | DROP INDEX IF EXISTS "noSegments_videoID"; 18 | 19 | UPDATE "config" SET value = 24 WHERE key = 'version'; 20 | 21 | COMMIT; 22 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_25.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "videoInfo" ( 4 | "videoID" TEXT PRIMARY KEY NOT NULL, 5 | "channelID" TEXT NOT NULL, 6 | "title" TEXT NOT NULL, 7 | "published" REAL NOT NULL, 8 | "genreUrl" REAL NOT NULL 9 | ); 10 | 11 | UPDATE "config" SET value = 25 WHERE key = 'version'; 12 | 13 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_26.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE "sqlb_temp_table_26" ( 4 | "videoID" TEXT PRIMARY KEY NOT NULL, 5 | "channelID" TEXT NOT NULL, 6 | "title" TEXT NOT NULL, 7 | "published" REAL NOT NULL, 8 | "genreUrl" TEXT NOT NULL 9 | ); 10 | 11 | INSERT INTO sqlb_temp_table_26 SELECT "videoID", "channelID", "title", "published", '' FROM "videoInfo"; 12 | 13 | DROP TABLE "videoInfo"; 14 | ALTER TABLE sqlb_temp_table_26 RENAME TO "videoInfo"; 15 | 16 | UPDATE "config" SET value = 26 WHERE key = 'version'; 17 | 18 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_27.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "sponsorTimes" ADD "description" TEXT NOT NULL default ''; 4 | ALTER TABLE "archivedSponsorTimes" ADD "description" TEXT NOT NULL default ''; 5 | 6 | UPDATE "config" SET value = 27 WHERE key = 'version'; 7 | 8 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_28.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "ratings" ( 4 | "videoID" TEXT NOT NULL, 5 | "service" TEXT NOT NULL default 'YouTube', 6 | "type" INTEGER NOT NULL, 7 | "count" INTEGER NOT NULL, 8 | "hashedVideoID" TEXT NOT NULL 9 | ); 10 | 11 | UPDATE "config" SET value = 28 WHERE key = 'version'; 12 | 13 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_29.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE "sqlb_temp_table_29" ( 4 | "videoID" TEXT NOT NULL, 5 | "userID" TEXT NOT NULL, 6 | "actionType" TEXT NOT NULL DEFAULT 'skip', 7 | "category" TEXT NOT NULL, 8 | "hashedVideoID" TEXT NOT NULL default '', 9 | "reason" TEXT NOT NULL default '', 10 | "service" TEXT NOT NULL default 'YouTube' 11 | ); 12 | 13 | INSERT INTO sqlb_temp_table_29 SELECT "videoID","userID",'skip',"category","hashedVideoID","reason","service" FROM "lockCategories"; 14 | INSERT INTO sqlb_temp_table_29 SELECT "videoID","userID",'mute',"category","hashedVideoID","reason","service" FROM "lockCategories"; 15 | 16 | DROP TABLE "lockCategories"; 17 | ALTER TABLE sqlb_temp_table_29 RENAME TO "lockCategories"; 18 | 19 | UPDATE "config" SET value = 29 WHERE key = 'version'; 20 | 21 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_3.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* hash upgrade test sha256('vid') = '1ff838dc6ca9680d88455341118157d59a055fe6d0e3870f9c002847bebe4663' */ 4 | /* Add hash field */ 5 | ALTER TABLE "sponsorTimes" ADD "hashedVideoID" TEXT NOT NULL default ''; 6 | UPDATE "sponsorTimes" SET "hashedVideoID" = sha256("videoID"); 7 | 8 | /* Bump version in config */ 9 | UPDATE "config" SET value = 3 WHERE key = 'version'; 10 | 11 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_30.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | UPDATE "sponsorTimes" SET "actionType" = 'poi' WHERE "category" = 'poi_highlight'; 4 | 5 | UPDATE "config" SET value = 30 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_31.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* START lockCategory migrations 4 | no sponsor migrations 5 | no selfpromo migrations */ 6 | 7 | /* exclusive_access migrations */ 8 | DELETE FROM "lockCategories" WHERE "category" = 'exclusive_access' AND "actionType" != 'full'; 9 | /* delete all full locks on categories without full */ 10 | DELETE FROM "lockCategories" WHERE "actionType" = 'full' AND "category" in ('interaction', 'intro', 'outro', 'preview', 'filler', 'music_offtopic', 'poi_highlight'); 11 | /* delete all non-skip music_offtopic locks */ 12 | DELETE FROM "lockCategories" WHERE "category" = 'music_offtopic' AND "actionType" != 'skip'; 13 | /* convert all poi_highlight to actionType poi */ 14 | UPDATE "lockCategories" SET "actionType" = 'poi' WHERE "category" = 'poi_highlight' AND "actionType" = 'skip'; 15 | /* delete all non-skip poi_highlight locks */ 16 | DELETE FROM "lockCategories" WHERE "category" = 'poi_highlight' AND "actionType" != 'poi'; 17 | 18 | /* END lockCategory migrations */ 19 | 20 | /* delete all redundant userName entries */ 21 | DELETE FROM "userNames" WHERE "userName" = "userID" AND "locked" = 0; 22 | 23 | UPDATE "config" SET value = 31 WHERE key = 'version'; 24 | 25 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_32.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | -- Add primary keys 4 | 5 | ALTER TABLE "sponsorTimes" ADD PRIMARY KEY ("UUID"); --!sqlite-ignore 6 | ALTER TABLE "vipUsers" ADD PRIMARY KEY ("userID"); --!sqlite-ignore 7 | ALTER TABLE "userNames" ADD PRIMARY KEY ("userID"); --!sqlite-ignore 8 | ALTER TABLE "categoryVotes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 9 | ALTER TABLE "lockCategories" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 10 | ALTER TABLE "warnings" ADD PRIMARY KEY ("userID", "issueTime"); --!sqlite-ignore 11 | ALTER TABLE "shadowBannedUsers" ADD PRIMARY KEY ("userID"); --!sqlite-ignore 12 | ALTER TABLE "unlistedVideos" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 13 | ALTER TABLE "config" ADD PRIMARY KEY ("key"); --!sqlite-ignore 14 | ALTER TABLE "archivedSponsorTimes" ADD PRIMARY KEY ("UUID"); --!sqlite-ignore 15 | ALTER TABLE "ratings" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore 16 | 17 | UPDATE "config" SET value = 32 WHERE key = 'version'; 18 | 19 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_33.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | CREATE TABLE IF NOT EXISTS "userFeatures" ( 4 | "userID" TEXT NOT NULL, 5 | "feature" INTEGER NOT NULL, 6 | "issuerUserID" TEXT NOT NULL, 7 | "timeSubmitted" INTEGER NOT NULL, 8 | PRIMARY KEY ("userID", "feature") 9 | ); 10 | 11 | UPDATE "config" SET value = 33 WHERE key = 'version'; 12 | 13 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_34.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "videoInfo" DROP COLUMN "genreUrl"; 4 | 5 | UPDATE "config" SET value = 34 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_35.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "titleVotes" ADD "verification" INTEGER default 0; 4 | 5 | UPDATE "config" SET value = 35 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_36.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "warnings" ADD "type" INTEGER default 0; 4 | 5 | UPDATE "config" SET value = 36 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_37.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "titles" ADD UNIQUE ("videoID", "title"); --!sqlite-ignore 4 | 5 | UPDATE "config" SET value = 37 WHERE key = 'version'; 6 | 7 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_38.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | UPDATE "titleVotes" SET "shadowHidden" = 1 4 | WHERE "UUID" IN (SELECT "UUID" FROM "titles" INNER JOIN "shadowBannedUsers" "bans" ON "titles"."userID" = "bans"."userID"); 5 | 6 | UPDATE "thumbnailVotes" SET "shadowHidden" = 1 7 | WHERE "UUID" IN (SELECT "UUID" FROM "thumbnails" INNER JOIN "shadowBannedUsers" "bans" ON "thumbnails"."userID" = "bans"."userID"); 8 | 9 | UPDATE "config" SET value = 38 WHERE key = 'version'; 10 | 11 | COMMIT; 12 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_39.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "titleVotes" ADD "downvotes" INTEGER default 0; 4 | ALTER TABLE "titleVotes" ADD "removed" INTEGER default 0; 5 | 6 | ALTER TABLE "thumbnailVotes" ADD "downvotes" INTEGER default 0; 7 | ALTER TABLE "thumbnailVotes" ADD "removed" INTEGER default 0; 8 | 9 | UPDATE "config" SET value = 39 WHERE key = 'version'; 10 | 11 | COMMIT; 12 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_4.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Create warnings table */ 4 | CREATE TABLE "warnings" ( 5 | "userID" TEXT NOT NULL, 6 | "issueTime" INTEGER NOT NULL, 7 | "issuerUserID" TEXT NOT NULL 8 | ); 9 | 10 | UPDATE "config" SET value = 4 WHERE key = 'version'; 11 | 12 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_40.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | DROP INDEX IF EXISTS "titles_hashedVideoID"; 4 | DROP INDEX IF EXISTS "thumbnails_hashedVideoID"; 5 | 6 | UPDATE "config" SET value = 40 WHERE key = 'version'; 7 | 8 | COMMIT; 9 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_41.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "titles" ADD "casualMode" INTEGER default 0; 4 | ALTER TABLE "thumbnails" ADD "casualMode" INTEGER default 0; 5 | 6 | UPDATE "config" SET value = 41 WHERE key = 'version'; 7 | 8 | COMMIT; 9 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_42.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "casualVotes" DROP COLUMN "downvotes"; 4 | 5 | UPDATE "config" SET value = 42 WHERE key = 'version'; 6 | 7 | COMMIT; 8 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_43.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "casualVotes" ADD "titleID" INTEGER default 0; 4 | 5 | UPDATE "config" SET value = 43 WHERE key = 'version'; 6 | 7 | COMMIT; 8 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_44.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | ALTER TABLE "titles" ADD "userAgent" TEXT NOT NULL default ''; 4 | ALTER TABLE "thumbnails" ADD "userAgent" TEXT NOT NULL default ''; 5 | 6 | UPDATE "config" SET value = 44 WHERE key = 'version'; 7 | 8 | COMMIT; 9 | -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_5.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add enabled field */ 4 | CREATE TABLE "sqlb_temp_table_5" ( 5 | "userID" TEXT NOT NULL, 6 | "issueTime" INTEGER NOT NULL, 7 | "issuerUserID" TEXT NOT NULL, 8 | enabled INTEGER NOT NULL 9 | ); 10 | INSERT INTO sqlb_temp_table_5 SELECT "userID","issueTime","issuerUserID",1 FROM "warnings"; 11 | 12 | DROP TABLE warnings; 13 | ALTER TABLE sqlb_temp_table_5 RENAME TO "warnings";; 14 | 15 | UPDATE "config" SET value = 5 WHERE key = 'version'; 16 | 17 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_6.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add 'locked' field */ 4 | CREATE TABLE "sqlb_temp_table_6" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "shadowHidden" INTEGER NOT NULL, 17 | "hashedVideoID" TEXT NOT NULL default '' 18 | ); 19 | 20 | INSERT INTO sqlb_temp_table_6 SELECT "videoID","startTime","endTime","votes",'0',"incorrectVotes","UUID","userID","timeSubmitted","views","category","shadowHidden","hashedVideoID" FROM "sponsorTimes"; 21 | 22 | DROP TABLE "sponsorTimes"; 23 | ALTER TABLE sqlb_temp_table_6 RENAME TO "sponsorTimes"; 24 | 25 | UPDATE "config" SET value = 6 WHERE key = 'version'; 26 | 27 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_7.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add Service field */ 4 | CREATE TABLE "sqlb_temp_table_7" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "service" TEXT NOT NULL DEFAULT 'YouTube', 17 | "shadowHidden" INTEGER NOT NULL, 18 | "hashedVideoID" TEXT NOT NULL default '' 19 | ); 20 | 21 | INSERT INTO sqlb_temp_table_7 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category",'YouTube', "shadowHidden","hashedVideoID" FROM "sponsorTimes"; 22 | 23 | DROP TABLE "sponsorTimes"; 24 | ALTER TABLE sqlb_temp_table_7 RENAME TO "sponsorTimes"; 25 | 26 | UPDATE "config" SET value = 7 WHERE key = 'version'; 27 | 28 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_8.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Add 'videoDuration' field */ 4 | CREATE TABLE "sqlb_temp_table_8" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "service" TEXT NOT NULL DEFAULT 'YouTube', 17 | "videoDuration" INTEGER NOT NULL DEFAULT '0', 18 | "shadowHidden" INTEGER NOT NULL, 19 | "hashedVideoID" TEXT NOT NULL default '' 20 | ); 21 | 22 | INSERT INTO sqlb_temp_table_8 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service",'0', "shadowHidden","hashedVideoID" FROM "sponsorTimes"; 23 | 24 | DROP TABLE "sponsorTimes"; 25 | ALTER TABLE sqlb_temp_table_8 RENAME TO "sponsorTimes"; 26 | 27 | UPDATE "config" SET value = 8 WHERE key = 'version'; 28 | 29 | COMMIT; -------------------------------------------------------------------------------- /databases/_upgrade_sponsorTimes_9.sql: -------------------------------------------------------------------------------- 1 | BEGIN TRANSACTION; 2 | 3 | /* Change 'videoDuration' field from INTEGER to REAL */ 4 | CREATE TABLE "sqlb_temp_table_9" ( 5 | "videoID" TEXT NOT NULL, 6 | "startTime" REAL NOT NULL, 7 | "endTime" REAL NOT NULL, 8 | "votes" INTEGER NOT NULL, 9 | "locked" INTEGER NOT NULL default '0', 10 | "incorrectVotes" INTEGER NOT NULL default '1', 11 | "UUID" TEXT NOT NULL UNIQUE, 12 | "userID" TEXT NOT NULL, 13 | "timeSubmitted" INTEGER NOT NULL, 14 | "views" INTEGER NOT NULL, 15 | "category" TEXT NOT NULL DEFAULT 'sponsor', 16 | "service" TEXT NOT NULL DEFAULT 'YouTube', 17 | "videoDuration" REAL NOT NULL DEFAULT '0', 18 | "shadowHidden" INTEGER NOT NULL, 19 | "hashedVideoID" TEXT NOT NULL default '' 20 | ); 21 | 22 | INSERT INTO sqlb_temp_table_9 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service",'0', "shadowHidden","hashedVideoID" FROM "sponsorTimes"; 23 | 24 | DROP TABLE "sponsorTimes"; 25 | ALTER TABLE sqlb_temp_table_9 RENAME TO "sponsorTimes"; 26 | 27 | UPDATE "config" SET value = 9 WHERE key = 'version'; 28 | 29 | COMMIT; -------------------------------------------------------------------------------- /docker/database.env.example: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=sponsorblock 2 | POSTGRES_PASSWORD= 3 | POSTGRES_DB=sponsorTimes -------------------------------------------------------------------------------- /docker/docker-compose-ci.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | postgres: 4 | container_name: database-co 5 | image: postgres:alpine 6 | environment: 7 | - POSTGRES_USER=${PG_USER} 8 | - POSTGRES_PASSWORD=${PG_PASS} 9 | ports: 10 | - 5432:5432 11 | redis: 12 | container_name: redis-ci 13 | image: redis:alpine 14 | ports: 15 | - 6379:6379 -------------------------------------------------------------------------------- /docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | database: 4 | container_name: database 5 | image: postgres:14 6 | env_file: 7 | - database.env 8 | volumes: 9 | - database-data:/var/lib/postgresql/data 10 | ports: 11 | - 5432:5432 12 | restart: always 13 | redis: 14 | container_name: redis 15 | image: redis:7.0 16 | command: /usr/local/etc/redis/redis.conf 17 | volumes: 18 | - ./redis/redis.conf:/usr/local/etc/redis/redis.conf 19 | ports: 20 | - 32773:6379 21 | sysctls: 22 | - net.core.somaxconn=324000 23 | - net.ipv4.tcp_max_syn_backlog=3240000 24 | restart: always 25 | newleaf: 26 | image: abeltramo/newleaf:latest 27 | container_name: newleaf 28 | restart: always 29 | ports: 30 | - 3241:3000 31 | volumes: 32 | - ./newleaf/configuration.py:/workdir/configuration.py 33 | rsync: 34 | image: mchangrh/rsync:latest 35 | container_name: rsync 36 | restart: always 37 | ports: 38 | - 873:873 39 | volumes: 40 | - ./rsync/rsyncd.conf:/etc/rsyncd.conf 41 | - ./database-export/:/mirror 42 | 43 | volumes: 44 | database-data: 45 | -------------------------------------------------------------------------------- /docker/migrate/database.pgload.txt: -------------------------------------------------------------------------------- 1 | load database 2 | from ./database.db 3 | into postgresql://sponsorblock:pw@127.0.0.1:5432/sponsorTimes 4 | 5 | with quote identifiers; -------------------------------------------------------------------------------- /docker/newleaf/configuration.py: -------------------------------------------------------------------------------- 1 | # ============================== 2 | # You MUST set these settings. 3 | # ============================== 4 | 5 | # A URL that this site can be accessed on. Do not include a trailing slash. 6 | website_origin = "http://newleaf:3000" 7 | 8 | 9 | # ============================== 10 | # These settings are optional. 11 | # ============================== 12 | 13 | # The address of the interface to bind to. 14 | #bind_host = "0.0.0.0" 15 | 16 | # The port to bind to. 17 | #bind_port = 3000 -------------------------------------------------------------------------------- /docker/redis/redis.conf: -------------------------------------------------------------------------------- 1 | maxmemory-policy allkeys-lru 2 | maxmemory 6000mb 3 | 4 | appendonly no 5 | save "" -------------------------------------------------------------------------------- /docker/rsync/rsyncd.conf: -------------------------------------------------------------------------------- 1 | pid file = /var/run/rsyncd.pid 2 | lock file = /var/run/rsync.lock 3 | log file = /var/log/rsync.log 4 | # replace with user accessing the files 5 | 6 | [sponsorblock] 7 | use chroot = no 8 | max connections = 10 9 | # path to mirrored files 10 | path = /mirror 11 | comment = sponsorblock-database 12 | read only = true 13 | refuse options = c delete zl 14 | # disallow checksumming and compression level to reduce CPU/IO load 15 | # disallow deleting files clientside -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | echo 'Entrypoint script' 4 | cd /usr/src/app 5 | 6 | # blank config, use defaults 7 | test -e config.json || cat < config.json 8 | { 9 | } 10 | EOF 11 | 12 | node dist/src/index.js 13 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "watch": ["src"], 3 | "ext": "ts,json", 4 | "exec": "(npm test || echo test failed) && npm start" 5 | } 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sponsor_block_server", 3 | "version": "0.1.0", 4 | "description": "Server that holds the SponsorBlock database", 5 | "main": "src/index.ts", 6 | "scripts": { 7 | "test": "npm run tsc && ts-node test/test.ts", 8 | "cover": "nyc npm test", 9 | "cover:report": "nyc report", 10 | "dev": "nodemon", 11 | "dev:bash": "nodemon -x 'npm test ; npm start'", 12 | "postgres:docker": "docker run --init -it --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:14-alpine", 13 | "redis:docker": "docker run --init -it --rm -p 6379:6379 redis:7-alpine --save '' --appendonly no", 14 | "start": "ts-node src/index.ts", 15 | "tsc": "tsc -p tsconfig.json", 16 | "lint": "eslint src test", 17 | "lint:fix": "eslint src test --fix" 18 | }, 19 | "author": "Ajay Ramachandran", 20 | "license": "AGPL-3.0-only", 21 | "dependencies": { 22 | "axios": "^1.8.4", 23 | "better-sqlite3": "^11.2.1", 24 | "cron": "^2.1.0", 25 | "express": "^4.21.1", 26 | "express-promise-router": "^4.1.1", 27 | "express-rate-limit": "^6.7.0", 28 | "form-data": "^4.0.0", 29 | "lodash": "^4.17.21", 30 | "lru-cache": "^10.2.0", 31 | "lz4-napi": "^2.2.0", 32 | "pg": "^8.8.0", 33 | "rate-limit-redis": "^3.0.1", 34 | "redis": "^4.6.13", 35 | "seedrandom": "^3.0.5" 36 | }, 37 | "devDependencies": { 38 | "@istanbuljs/nyc-config-typescript": "^1.0.2", 39 | "@types/better-sqlite3": "^7.6.2", 40 | "@types/cron": "^2.0.0", 41 | "@types/express": "^4.17.14", 42 | "@types/lodash": "^4.14.189", 43 | "@types/mocha": "^10.0.0", 44 | "@types/node": "^18.11.9", 45 | "@types/pg": "^8.6.5", 46 | "@types/seedrandom": "^3.0.5", 47 | "@types/sinon": "^10.0.13", 48 | "@typescript-eslint/eslint-plugin": "^5.44.0", 49 | "@typescript-eslint/parser": "^5.44.0", 50 | "axios-mock-adapter": "^1.21.2", 51 | "eslint": "^8.28.0", 52 | "mocha": "^10.1.0", 53 | "nodemon": "^3.1.9", 54 | "nyc": "^15.1.0", 55 | "sinon": "^14.0.2", 56 | "ts-mock-imports": "^1.3.8", 57 | "ts-node": "^10.9.1", 58 | "typescript": "^4.9.3" 59 | }, 60 | "engines": { 61 | "node": ">=18" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/cronjob/downvoteSegmentArchiveJob.ts: -------------------------------------------------------------------------------- 1 | import { CronJob } from "cron"; 2 | 3 | import { config as serverConfig } from "../config"; 4 | import { Logger } from "../utils/logger"; 5 | import { db } from "../databases/databases"; 6 | import { DBSegment } from "../types/segments.model"; 7 | 8 | const jobConfig = serverConfig?.crons?.downvoteSegmentArchive; 9 | 10 | export const archiveDownvoteSegment = async (dayLimit: number, voteLimit: number, runTime?: number): Promise => { 11 | const timeNow = runTime || new Date().getTime(); 12 | const threshold = dayLimit * 86400000; 13 | 14 | Logger.info(`DownvoteSegmentArchiveJob starts at ${timeNow}`); 15 | try { 16 | // insert into archive sponsorTime 17 | await db.prepare( 18 | "run", 19 | `INSERT INTO "archivedSponsorTimes" 20 | SELECT * 21 | FROM "sponsorTimes" 22 | WHERE "votes" < ? AND (? - "timeSubmitted") > ?`, 23 | [ 24 | voteLimit, 25 | timeNow, 26 | threshold 27 | ] 28 | ) as DBSegment[]; 29 | 30 | } catch (err) { 31 | Logger.error("Execption when insert segment in archivedSponsorTimes"); 32 | Logger.error(err as string); 33 | return 1; 34 | } 35 | 36 | // remove from sponsorTime 37 | try { 38 | await db.prepare( 39 | "run", 40 | 'DELETE FROM "sponsorTimes" WHERE "votes" < ? AND (? - "timeSubmitted") > ?', 41 | [ 42 | voteLimit, 43 | timeNow, 44 | threshold 45 | ] 46 | ) as DBSegment[]; 47 | 48 | } catch (err) { 49 | Logger.error("Execption when deleting segment in sponsorTimes"); 50 | Logger.error(err as string); 51 | return 1; 52 | } 53 | 54 | Logger.info("DownvoteSegmentArchiveJob finished"); 55 | return 0; 56 | }; 57 | 58 | const DownvoteSegmentArchiveJob = new CronJob( 59 | jobConfig?.schedule || "0 0 * * * 0", 60 | () => void archiveDownvoteSegment(jobConfig?.timeThresholdInDays, jobConfig?.voteThreshold) 61 | ); 62 | 63 | if (serverConfig?.crons?.enabled && jobConfig && !jobConfig.schedule) { 64 | Logger.error("Invalid cron schedule for downvoteSegmentArchive"); 65 | } 66 | 67 | export default DownvoteSegmentArchiveJob; 68 | -------------------------------------------------------------------------------- /src/cronjob/index.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { config } from "../config"; 3 | import DownvoteSegmentArchiveJob from "./downvoteSegmentArchiveJob"; 4 | 5 | export function startAllCrons (): void { 6 | if (config?.crons?.enabled) { 7 | Logger.info("Crons started"); 8 | 9 | DownvoteSegmentArchiveJob.start(); 10 | } else { 11 | Logger.info("Crons dissabled"); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/databases/IDatabase.ts: -------------------------------------------------------------------------------- 1 | export interface QueryOption { 2 | useReplica?: boolean; 3 | forceReplica?: boolean; 4 | } 5 | 6 | export interface IDatabase { 7 | init(): Promise; 8 | 9 | prepare(type: QueryType, query: string, params?: any[], options?: QueryOption): Promise; 10 | 11 | highLoad(): boolean; 12 | 13 | shouldUseRedisTimeout(): boolean; 14 | } 15 | 16 | export type QueryType = "get" | "all" | "run"; -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { config } from "./config"; 2 | import { initDb } from "./databases/databases"; 3 | import { createServer } from "./app"; 4 | import { Logger } from "./utils/logger"; 5 | import { startAllCrons } from "./cronjob"; 6 | import { getCommit } from "./utils/getCommit"; 7 | import { connectionPromise } from "./utils/redis"; 8 | 9 | async function init() { 10 | process.on("unhandledRejection", (error: any) => { 11 | // eslint-disable-next-line no-console 12 | console.dir(error?.stack); 13 | }); 14 | 15 | process.on("uncaughtExceptions", (error: any) => { 16 | // eslint-disable-next-line no-console 17 | console.dir(error?.stack); 18 | }); 19 | 20 | try { 21 | await initDb(); 22 | await connectionPromise; 23 | } catch (e) { 24 | Logger.error(`Init Db: ${e}`); 25 | process.exit(1); 26 | } 27 | 28 | // edge case clause for creating compatible .db files, do not enable 29 | if (config.mode === "init-db-and-exit") process.exit(0); 30 | // do not enable init-db-only mode for usage. 31 | (global as any).HEADCOMMIT = config.mode === "development" ? "development" 32 | : config.mode === "test" ? "test" 33 | : getCommit() as string; 34 | createServer(() => { 35 | Logger.info(`Server started on port ${config.port}.`); 36 | 37 | // ignite cron job after server created 38 | startAllCrons(); 39 | }).setTimeout(15000); 40 | } 41 | 42 | init().catch((err) => Logger.error(`Index.js: ${err}`)); -------------------------------------------------------------------------------- /src/middleware/apiCsp.ts: -------------------------------------------------------------------------------- 1 | import { NextFunction, Request, Response } from "express"; 2 | 3 | export function apiCspMiddleware(req: Request, res: Response, next: NextFunction): void { 4 | res.header("Content-Security-Policy", "script-src 'none'; object-src 'none'"); 5 | next(); 6 | } -------------------------------------------------------------------------------- /src/middleware/cors.ts: -------------------------------------------------------------------------------- 1 | import { NextFunction, Request, Response } from "express"; 2 | 3 | export function corsMiddleware(req: Request, res: Response, next: NextFunction): void { 4 | res.header("Access-Control-Allow-Origin", "*"); 5 | res.header("Access-Control-Allow-Methods", "GET, POST, OPTIONS, DELETE"); 6 | res.header("Access-Control-Allow-Headers", "Content-Type, If-None-Match, x-client-name"); 7 | next(); 8 | } 9 | -------------------------------------------------------------------------------- /src/middleware/hostHeader.ts: -------------------------------------------------------------------------------- 1 | import { NextFunction, Request, Response } from "express"; 2 | import os from "os"; 3 | 4 | export function hostHeader(req: Request, res: Response, next: NextFunction): void { 5 | res.header("SBSERVER-HOST", os.hostname()); 6 | next(); 7 | } -------------------------------------------------------------------------------- /src/middleware/logger.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { NextFunction, Request, Response } from "express"; 3 | 4 | export function loggerMiddleware(req: Request, res: Response, next: NextFunction): void { 5 | Logger.info(`Request received: ${req.method} ${req.url}`); 6 | next(); 7 | } 8 | -------------------------------------------------------------------------------- /src/middleware/requestRateLimit.ts: -------------------------------------------------------------------------------- 1 | import { getIP } from "../utils/getIP"; 2 | import { getHash } from "../utils/getHash"; 3 | import { getHashCache } from "../utils/getHashCache"; 4 | import rateLimit from "express-rate-limit"; 5 | import { RateLimitConfig } from "../types/config.model"; 6 | import { Request, RequestHandler } from "express"; 7 | import { isUserVIP } from "../utils/isUserVIP"; 8 | import { UserID } from "../types/user.model"; 9 | import RedisStore, { RedisReply } from "rate-limit-redis"; 10 | import redis from "../utils/redis"; 11 | import { config } from "../config"; 12 | import { Logger } from "../utils/logger"; 13 | 14 | export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RequestHandler { 15 | try { 16 | return rateLimit({ 17 | windowMs: limitConfig.windowMs, 18 | max: limitConfig.max, 19 | message: limitConfig.message, 20 | statusCode: limitConfig.statusCode, 21 | legacyHeaders: false, 22 | standardHeaders: false, 23 | keyGenerator: (req) => { 24 | return getHash(getIP(req), 1); 25 | }, 26 | // eslint-disable-next-line @typescript-eslint/no-misused-promises 27 | handler: async (req, res, next) => { 28 | if (getUserID === undefined || !await isUserVIP(await getHashCache(getUserID(req)))) { 29 | return res.status(limitConfig.statusCode).send(limitConfig.message); 30 | } else { 31 | return next(); 32 | } 33 | }, 34 | store: config.redis?.enabled ? new RedisStore({ 35 | sendCommand: (...args: string[]) => redis.sendCommand(args).catch((err) => Logger.error(err)) as Promise, 36 | }) : null, 37 | }); 38 | } catch (e) { 39 | Logger.error(`Rate limit error: ${e}`); 40 | return (req, res, next) => next(); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/middleware/userCounter.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { Logger } from "../utils/logger"; 3 | import { config } from "../config"; 4 | import { getIP } from "../utils/getIP"; 5 | import { NextFunction, Request, Response } from "express"; 6 | import { Agent } from "http"; 7 | 8 | const httpAgent = new Agent({ keepAlive: true }); 9 | 10 | export function userCounter(req: Request, res: Response, next: NextFunction): void { 11 | if (req.method !== "OPTIONS") { 12 | if (Math.random() < 1 / config.userCounterRatio) { 13 | axios({ 14 | method: "post", 15 | url: `${config.userCounterURL}/api/v1/addIP?hashedIP=${getIP(req)}`, 16 | httpAgent 17 | }).catch(() => /* instanbul skip next */ Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`)); 18 | } 19 | } 20 | 21 | next(); 22 | } 23 | -------------------------------------------------------------------------------- /src/routes/addUnlistedVideo.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { db } from "../databases/databases"; 3 | import { getService } from "../utils/getService"; 4 | import { Logger } from "../utils/logger"; 5 | 6 | /** 7 | * Optional API method that will be used temporarily to help collect 8 | * unlisted videos created before 2017 9 | * 10 | * https://support.google.com/youtube/answer/9230970 11 | */ 12 | 13 | export async function addUnlistedVideo(req: Request, res: Response): Promise { 14 | const { 15 | body: { 16 | videoID = null, 17 | year = 0, 18 | views = 0, 19 | channelID = "Unknown", 20 | service 21 | } 22 | } = req; 23 | 24 | if (typeof(videoID) !== "string" || videoID.length !== 11) { 25 | return res.status(400).send("Invalid parameters"); 26 | } 27 | 28 | try { 29 | const timeSubmitted = Date.now(); 30 | await db.prepare( 31 | "run", 32 | `INSERT INTO "unlistedVideos" ("videoID", "year", "views", "channelID", "timeSubmitted", "service") values (?, ?, ?, ?, ?, ?)`, 33 | [videoID, year, views, channelID, timeSubmitted, getService(service)] 34 | ); 35 | 36 | return res.sendStatus(200); 37 | } catch (err) { 38 | Logger.error(err as string); 39 | } 40 | 41 | return res.sendStatus(500); 42 | } 43 | -------------------------------------------------------------------------------- /src/routes/addUserAsVIP.ts: -------------------------------------------------------------------------------- 1 | import { getHashCache } from "../utils/getHashCache"; 2 | import { db } from "../databases/databases"; 3 | import { config } from "../config"; 4 | import { Request, Response } from "express"; 5 | import { isUserVIP } from "../utils/isUserVIP"; 6 | import { HashedUserID } from "../types/user.model"; 7 | import { Logger } from "../utils/logger"; 8 | 9 | interface AddUserAsVIPRequest extends Request { 10 | query: { 11 | userID: HashedUserID; 12 | adminUserID: string; 13 | enabled: string; 14 | } 15 | } 16 | 17 | export async function addUserAsVIP(req: AddUserAsVIPRequest, res: Response): Promise { 18 | const { query: { userID, adminUserID } } = req; 19 | 20 | const enabled = req.query?.enabled === "true"; 21 | 22 | if (!userID || !adminUserID) { 23 | // invalid request 24 | return res.sendStatus(400); 25 | } 26 | 27 | // hash the userID 28 | const adminUserIDInput = await getHashCache(adminUserID); 29 | 30 | if (adminUserIDInput !== config.adminUserID) { 31 | // not authorized 32 | return res.sendStatus(403); 33 | } 34 | 35 | // check to see if this user is already a vip 36 | const userIsVIP = await isUserVIP(userID); 37 | 38 | try { 39 | if (enabled && !userIsVIP) { 40 | // add them to the vip list 41 | await db.prepare("run", 'INSERT INTO "vipUsers" VALUES(?)', [userID]); 42 | } 43 | 44 | if (!enabled && userIsVIP) { 45 | //remove them from the shadow ban list 46 | await db.prepare("run", 'DELETE FROM "vipUsers" WHERE "userID" = ?', [userID]); 47 | } 48 | 49 | return res.sendStatus(200); 50 | } catch (e) { 51 | Logger.error(e as string); 52 | return res.sendStatus(500); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /src/routes/deleteLockCategories.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { isUserVIP } from "../utils/isUserVIP"; 3 | import { getHashCache } from "../utils/getHashCache"; 4 | import { db } from "../databases/databases"; 5 | import { ActionType, Category, Service, VideoID } from "../types/segments.model"; 6 | import { UserID } from "../types/user.model"; 7 | import { getService } from "../utils/getService"; 8 | import { config } from "../config"; 9 | import { Logger } from "../utils/logger"; 10 | 11 | interface DeleteLockCategoriesRequest extends Request { 12 | body: { 13 | categories: Category[]; 14 | service: string; 15 | userID: UserID; 16 | videoID: VideoID; 17 | actionTypes: ActionType[]; 18 | }; 19 | } 20 | 21 | export async function deleteLockCategoriesEndpoint(req: DeleteLockCategoriesRequest, res: Response): Promise { 22 | // Collect user input data 23 | const { 24 | body: { 25 | videoID, 26 | userID, 27 | categories, 28 | service, 29 | actionTypes 30 | } 31 | } = req; 32 | 33 | // Check input data is valid 34 | if (!videoID 35 | || !userID 36 | || !categories 37 | || !Array.isArray(categories) 38 | || categories.length === 0 39 | || actionTypes && !Array.isArray(actionTypes) 40 | || actionTypes.length === 0 41 | ) { 42 | return res.status(400).json({ 43 | message: "Bad Format", 44 | }); 45 | } 46 | 47 | // Check if user is VIP 48 | const hashedUserID = await getHashCache(userID); 49 | const userIsVIP = await isUserVIP(hashedUserID); 50 | 51 | if (!userIsVIP) { 52 | return res.status(403).json({ 53 | message: "Must be a VIP to lock videos.", 54 | }); 55 | } 56 | 57 | try { 58 | await deleteLockCategories(videoID, categories, actionTypes, getService(service)); 59 | } catch (e) { 60 | Logger.error(e as string); 61 | return res.status(500); 62 | } 63 | 64 | return res.status(200).json({ message: `Removed lock categories entries for video ${videoID}` }); 65 | } 66 | 67 | export async function deleteLockCategories(videoID: VideoID, categories: Category[], actionTypes: ActionType[], service: Service): Promise { 68 | categories ??= config.categoryList as Category[]; 69 | actionTypes ??= [ActionType.Skip, ActionType.Mute]; 70 | 71 | const arrJoin = (arr: string[]): string => `'${arr.join(`','`)}'`; 72 | const categoryString = arrJoin(categories.filter((v) => !/[^a-z|_|-]/.test(v))); 73 | const actionTypeString = arrJoin(actionTypes.filter((v) => !/[^a-z|_|-]/.test(v))); 74 | 75 | await db.prepare("run", `DELETE FROM "lockCategories" WHERE "videoID" = ? AND "service" = ? AND "category" IN (${categoryString}) AND "actionType" IN (${actionTypeString})`, [videoID, service]); 76 | } 77 | -------------------------------------------------------------------------------- /src/routes/generateToken.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { config } from "../config"; 3 | import { createAndSaveToken, TokenType } from "../utils/tokenUtils"; 4 | import { getHashCache } from "../utils/getHashCache"; 5 | 6 | interface GenerateTokenRequest extends Request { 7 | query: { 8 | code: string; 9 | adminUserID?: string; 10 | total?: string; 11 | key?: string; 12 | }, 13 | params: { 14 | type: TokenType; 15 | } 16 | } 17 | 18 | export async function generateTokenRequest(req: GenerateTokenRequest, res: Response): Promise { 19 | const { query: { code, adminUserID, total, key }, params: { type } } = req; 20 | const adminUserIDHash = adminUserID ? (await getHashCache(adminUserID)) : null; 21 | 22 | if (!type || (!code && type === TokenType.patreon)) { 23 | return res.status(400).send("Invalid request"); 24 | } 25 | 26 | if (type === TokenType.free && (!key || Math.abs(Date.now() - parseInt(key)) > 1000 * 60 * 60 * 24)) { 27 | return res.status(400).send("Invalid request"); 28 | } 29 | 30 | if (type === TokenType.patreon 31 | || ([TokenType.local, TokenType.gift].includes(type) && adminUserIDHash === config.adminUserID) 32 | || type === TokenType.free) { 33 | const licenseKeys = await createAndSaveToken(type, code, adminUserIDHash === config.adminUserID ? parseInt(total) : 1); 34 | 35 | /* istanbul ignore else */ 36 | if (licenseKeys) { 37 | if (type === TokenType.patreon) { 38 | return res.status(200).send(` 39 |

40 | Your license key: 41 |

42 |

43 | 44 | ${licenseKeys[0]} 45 | 46 |

47 |

48 | Copy this into the textbox in the other tab 49 |

50 | `); 51 | } else if (type === TokenType.free) { 52 | return res.status(200).send({ 53 | licenseKey: licenseKeys[0] 54 | }); 55 | } else { 56 | return res.status(200).send(licenseKeys.join("
")); 57 | } 58 | } else { 59 | return res.status(401).send(` 60 |

61 | Failed to generate an license key 62 |

63 | `); 64 | } 65 | } else { 66 | return res.sendStatus(403); 67 | } 68 | } -------------------------------------------------------------------------------- /src/routes/getChapterNames.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { Request, Response } from "express"; 3 | import { db } from "../databases/databases"; 4 | import { Postgres } from "../databases/Postgres"; 5 | 6 | export async function getChapterNames(req: Request, res: Response): Promise { 7 | const description = req.query.description as string; 8 | const channelID = req.query.channelID as string; 9 | 10 | if (!description || typeof(description) !== "string" 11 | || !channelID || typeof(channelID) !== "string") { 12 | return res.sendStatus(400); 13 | } 14 | 15 | if (!(db instanceof Postgres)) { 16 | return res.sendStatus(500).json({ 17 | message: "Not supported on this instance" 18 | }); 19 | } 20 | 21 | try { 22 | const descriptions = await db.prepare("all", ` 23 | SELECT "description" 24 | FROM "sponsorTimes" 25 | WHERE ("locked" = 1 OR "votes" >= 0) AND "videoID" IN ( 26 | SELECT "videoID" 27 | FROM "videoInfo" 28 | WHERE "channelID" = ? 29 | ) AND "description" != '' 30 | AND similarity("description", ?) >= 0.1 31 | GROUP BY "description" 32 | ORDER BY SUM("votes"), similarity("description", ?) DESC 33 | LIMIT 5;` 34 | , [channelID, description, description]) as { description: string }[]; 35 | 36 | if (descriptions?.length > 0) { 37 | return res.status(200).json(descriptions.map(d => ({ 38 | description: d.description 39 | }))); 40 | } 41 | } catch (err) { 42 | Logger.error(err as string); 43 | return res.sendStatus(500); 44 | } 45 | 46 | return res.status(404).json([]); 47 | } 48 | -------------------------------------------------------------------------------- /src/routes/getConfig.ts: -------------------------------------------------------------------------------- 1 | import { getHashCache } from "../utils/getHashCache"; 2 | import { Request, Response } from "express"; 3 | import { isUserVIP } from "../utils/isUserVIP"; 4 | import { UserID } from "../types/user.model"; 5 | import { Logger } from "../utils/logger"; 6 | import { getServerConfig } from "../utils/serverConfig"; 7 | 8 | export async function getConfigEndpoint(req: Request, res: Response): Promise { 9 | const userID = req.query.userID as string; 10 | const key = req.query.key as string; 11 | 12 | if (!userID || !key) { 13 | // invalid request 14 | return res.sendStatus(400); 15 | } 16 | 17 | // hash the userID 18 | const hashedUserID = await getHashCache(userID as UserID); 19 | const isVIP = (await isUserVIP(hashedUserID)); 20 | 21 | if (!isVIP) { 22 | // not authorized 23 | return res.sendStatus(403); 24 | } 25 | 26 | try { 27 | return res.status(200).json({ 28 | value: await getServerConfig(key) 29 | }); 30 | } catch (e) { 31 | Logger.error(e as string); 32 | 33 | return res.sendStatus(500); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/routes/getDaysSavedFormatted.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | import { Logger } from "../utils/logger"; 4 | 5 | export async function getDaysSavedFormatted(req: Request, res: Response): Promise { 6 | try { 7 | const row = await db.prepare("get", 'SELECT SUM(("endTime" - "startTime") / 60 / 60 / 24 * "views") as "daysSaved" from "sponsorTimes" where "shadowHidden" != 1', []); 8 | 9 | if (row !== undefined) { 10 | //send this result 11 | return res.send({ 12 | daysSaved: row.daysSaved?.toFixed(2) ?? "0", 13 | }); 14 | } else { 15 | return res.send({ 16 | daysSaved: 0 17 | }); 18 | } 19 | } catch (err) { 20 | Logger.error(err as string); 21 | return res.sendStatus(500); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/routes/getFeatureFlag.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | import { Request, Response } from "express"; 3 | 4 | export function getFeatureFlag(req: Request, res: Response): Response { 5 | const { params: { name } } = req; 6 | 7 | switch (name) { 8 | case "deArrowPaywall": 9 | return res.status(200).json({ 10 | enabled: config.deArrowPaywall, 11 | }); 12 | } 13 | 14 | return res.status(404).json(); 15 | } 16 | -------------------------------------------------------------------------------- /src/routes/getIsUserVIP.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { getHashCache } from "../utils/getHashCache"; 3 | import { isUserVIP } from "../utils/isUserVIP"; 4 | import { Request, Response } from "express"; 5 | import { HashedUserID, UserID } from "../types/user.model"; 6 | 7 | export async function getIsUserVIP(req: Request, res: Response): Promise { 8 | const userID = req.query.userID as UserID; 9 | 10 | if (userID == undefined) { 11 | //invalid request 12 | return res.sendStatus(400); 13 | } 14 | 15 | //hash the userID 16 | const hashedUserID: HashedUserID = await getHashCache(userID); 17 | 18 | try { 19 | const vipState = await isUserVIP(hashedUserID); 20 | return res.status(200).json({ 21 | hashedUserID: hashedUserID, 22 | vip: vipState, 23 | }); 24 | } catch (err) /* istanbul ignore next */ { 25 | Logger.error(err as string); 26 | return res.sendStatus(500); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/routes/getLockCategories.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Logger } from "../utils/logger"; 3 | import { Request, Response } from "express"; 4 | import { ActionType, Category, VideoID } from "../types/segments.model"; 5 | import { getService } from "../utils/getService"; 6 | import { parseActionTypes } from "../utils/parseParams"; 7 | 8 | export async function getLockCategories(req: Request, res: Response): Promise { 9 | const videoID = req.query.videoID as VideoID; 10 | const service = getService(req.query.service as string); 11 | const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip, ActionType.Mute]); 12 | if (!videoID || !Array.isArray(actionTypes)) { 13 | //invalid request 14 | return res.sendStatus(400); 15 | } 16 | try { 17 | // Get existing lock categories markers 18 | const row = await db.prepare("all", 'SELECT "category", "reason", "actionType" from "lockCategories" where "videoID" = ? AND "service" = ?', [videoID, service]) as {category: Category, reason: string, actionType: ActionType}[]; 19 | const actionTypeMatches = row.filter((lock) => actionTypes.includes(lock.actionType)); 20 | // map categories to array in JS becaues of SQL incompatibilities 21 | const categories = actionTypeMatches.map(item => item.category); 22 | if (categories.length === 0 || !categories[0]) return res.sendStatus(404); 23 | // Get longest lock reason 24 | const reason = actionTypeMatches.map(item => item.reason) 25 | .reduce((a,b) => (a.length > b.length) ? a : b); 26 | return res.send({ 27 | reason, 28 | categories, 29 | actionTypes 30 | }); 31 | } catch (err) /* istanbul ignore next */{ 32 | Logger.error(err as string); 33 | return res.sendStatus(500); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/routes/getLockCategoriesByHash.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Logger } from "../utils/logger"; 3 | import { Request, Response } from "express"; 4 | import { hashPrefixTester } from "../utils/hashPrefixTester"; 5 | import { ActionType, Category, VideoID, VideoIDHash } from "../types/segments.model"; 6 | import { parseActionTypes } from "../utils/parseParams"; 7 | 8 | interface LockResultByHash { 9 | videoID: VideoID, 10 | hash: VideoIDHash, 11 | reason: string, 12 | categories: Category[] 13 | } 14 | 15 | interface DBLock { 16 | videoID: VideoID, 17 | hash: VideoIDHash, 18 | category: Category, 19 | reason: string, 20 | actionType: ActionType, 21 | } 22 | 23 | const mergeLocks = (source: DBLock[], actionTypes: ActionType[]): LockResultByHash[] => { 24 | const dest: { [videoID: VideoID]: LockResultByHash } = {}; 25 | for (const obj of source) { 26 | if (!actionTypes.includes(obj.actionType)) continue; 27 | // videoID already exists 28 | if (obj.videoID in dest) { 29 | // override longer reason 30 | const destMatch = dest[obj.videoID]; 31 | if (obj.reason?.length > destMatch.reason?.length) destMatch.reason = obj.reason; 32 | // push to categories 33 | destMatch.categories.push(obj.category); 34 | } else { 35 | dest[obj.videoID] = { 36 | videoID: obj.videoID, 37 | hash: obj.hash, 38 | reason: obj.reason, 39 | categories: [obj.category] 40 | }; 41 | } 42 | } 43 | return Object.values(dest); 44 | }; 45 | 46 | export async function getLockCategoriesByHash(req: Request, res: Response): Promise { 47 | let hashPrefix = req.params.prefix as VideoIDHash; 48 | const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip, ActionType.Mute]); 49 | if (!Array.isArray(actionTypes)) { 50 | //invalid request 51 | return res.sendStatus(400); 52 | } 53 | 54 | if (!hashPrefixTester(req.params.prefix)) { 55 | return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix 56 | } 57 | hashPrefix = hashPrefix.toLowerCase() as VideoIDHash; 58 | 59 | try { 60 | // Get existing lock categories markers 61 | const lockedRows = await db.prepare("all", 'SELECT "videoID", "hashedVideoID" as "hash", "category", "reason", "actionType" from "lockCategories" where "hashedVideoID" LIKE ?', [`${hashPrefix}%`]) as DBLock[]; 62 | if (lockedRows.length === 0 || !lockedRows[0]) return res.sendStatus(404); 63 | // merge all locks 64 | return res.send(mergeLocks(lockedRows, actionTypes)); 65 | } catch (err) /* istanbul ignore next */ { 66 | Logger.error(err as string); 67 | return res.sendStatus(500); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/routes/getReady.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { Server } from "http"; 3 | import { config } from "../config"; 4 | import { getRedisStats } from "../utils/redis"; 5 | import { Postgres } from "../databases/Postgres"; 6 | import { db } from "../databases/databases"; 7 | 8 | export async function getReady(req: Request, res: Response, server: Server): Promise { 9 | const connections = await new Promise((resolve) => server.getConnections((_, count) => resolve(count))) as number; 10 | 11 | const redisStats = getRedisStats(); 12 | const postgresStats = (db as Postgres).getStats?.(); 13 | 14 | if (!connections 15 | || (connections < config.maxConnections 16 | && (!config.redis || redisStats.activeRequests < config.redis.maxConnections * 0.8) 17 | && (!config.redis || redisStats.activeRequests < 1 || redisStats.avgReadTime < config.maxResponseTime 18 | || (redisStats.memoryCacheSize < config.redis.clientCacheSize * 0.8 && redisStats.avgReadTime < config.maxResponseTimeWhileLoadingCache)) 19 | && (!config.postgres || postgresStats.activeRequests < config.postgres.maxActiveRequests * 0.8) 20 | && (!config.postgres || postgresStats.avgReadTime < config.maxResponseTime 21 | || (redisStats.memoryCacheSize < config.redis.clientCacheSize * 0.8 && postgresStats.avgReadTime < config.maxResponseTimeWhileLoadingCache)))) { 22 | return res.sendStatus(200); 23 | } else { 24 | return res.sendStatus(500); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/routes/getSavedTimeForUser.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | import { getHashCache } from "../utils/getHashCache"; 4 | import { config } from "../config"; 5 | import { Logger } from "../utils/logger"; 6 | 7 | const maxRewardTimePerSegmentInSeconds = config.maxRewardTimePerSegmentInSeconds ?? 86400; 8 | 9 | export async function getSavedTimeForUser(req: Request, res: Response): Promise { 10 | let userID = req.query.userID as string; 11 | 12 | if (userID == undefined) { 13 | //invalid request 14 | return res.sendStatus(400); 15 | } 16 | 17 | //hash the userID 18 | userID = await getHashCache(userID); 19 | 20 | try { 21 | const row = await db.prepare("get", 'SELECT SUM(((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views") as "minutesSaved" FROM "sponsorTimes" WHERE "userID" = ? AND "votes" > -1 AND "shadowHidden" != 1 ', [maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID], { useReplica: true }); 22 | 23 | if (row.minutesSaved != null) { 24 | return res.send({ 25 | timeSaved: row.minutesSaved, 26 | }); 27 | } else { 28 | return res.sendStatus(404); 29 | } 30 | } catch (err) /* istanbul ignore next */ { 31 | Logger.error(`getSavedTimeForUser ${err}`); 32 | return res.sendStatus(500); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/routes/getSegmentID.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | 4 | export async function getSegmentID(req: Request, res: Response): Promise { 5 | const partialUUID = req.query?.UUID; 6 | const videoID = req.query?.videoID; 7 | 8 | if (!partialUUID || !videoID) { 9 | //invalid request 10 | return res.sendStatus(400); 11 | } 12 | 13 | const data = await db.prepare("get", `SELECT "UUID" from "sponsorTimes" WHERE "UUID" LIKE ? AND "videoID" = ?`, [`${partialUUID}%`, videoID]); 14 | 15 | if (data) { 16 | return res.status(200).send(data.UUID); 17 | } else { 18 | return res.sendStatus(404); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/routes/getSegmentInfo.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { db } from "../databases/databases"; 3 | import { DBSegment, SegmentUUID } from "../types/segments.model"; 4 | 5 | const isValidSegmentUUID = (str: string): boolean => /^([a-f0-9]{64}|[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})/.test(str); 6 | 7 | async function getSegmentFromDBByUUID(UUID: SegmentUUID): Promise { 8 | try { 9 | return await db.prepare("get", `SELECT * FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]); 10 | } catch (err) /* istanbul ignore next */ { 11 | return null; 12 | } 13 | } 14 | 15 | async function getSegmentsByUUID(UUIDs: SegmentUUID[]): Promise { 16 | const DBSegments: DBSegment[] = []; 17 | for (const UUID of UUIDs) { 18 | // if UUID is invalid, skip 19 | if (!isValidSegmentUUID(UUID)) continue; 20 | DBSegments.push(await getSegmentFromDBByUUID(UUID as SegmentUUID)); 21 | } 22 | return DBSegments; 23 | } 24 | 25 | async function handleGetSegmentInfo(req: Request, res: Response): Promise { 26 | // If using params instead of JSON, only one UUID can be pulled 27 | let UUIDs = req.query.UUIDs 28 | ? JSON.parse(req.query.UUIDs as string) 29 | : req.query.UUID 30 | ? Array.isArray(req.query.UUID) 31 | ? req.query.UUID 32 | : [req.query.UUID] 33 | : null; 34 | // deduplicate with set 35 | UUIDs = [ ...new Set(UUIDs)]; 36 | // if more than 10 entries, slice 37 | if (!Array.isArray(UUIDs) || !UUIDs?.length) { 38 | res.status(400).send("UUIDs parameter does not match format requirements."); 39 | return; 40 | } 41 | if (UUIDs.length > 10) UUIDs = UUIDs.slice(0, 10); 42 | const DBSegments = await getSegmentsByUUID(UUIDs); 43 | // all uuids failed lookup 44 | if (!DBSegments?.length) { 45 | res.sendStatus(400); 46 | return; 47 | } 48 | // uuids valid but not found 49 | if (DBSegments[0] === null || DBSegments[0] === undefined) { 50 | res.sendStatus(400); 51 | return; 52 | } 53 | return DBSegments; 54 | } 55 | 56 | async function endpoint(req: Request, res: Response): Promise { 57 | try { 58 | const DBSegments = await handleGetSegmentInfo(req, res); 59 | 60 | // If false, res.send has already been called 61 | if (DBSegments) { 62 | //send result 63 | return res.send(DBSegments); 64 | } 65 | } catch (err) /* istanbul ignore next */ { 66 | if (err instanceof SyntaxError) { // catch JSON.parse error 67 | return res.status(400).send("UUIDs parameter does not match format requirements."); 68 | } else return res.sendStatus(500); 69 | } 70 | } 71 | 72 | export { 73 | getSegmentFromDBByUUID, 74 | getSegmentsByUUID, 75 | handleGetSegmentInfo, 76 | endpoint 77 | }; 78 | -------------------------------------------------------------------------------- /src/routes/getSkipSegmentsByHash.ts: -------------------------------------------------------------------------------- 1 | import { hashPrefixTester } from "../utils/hashPrefixTester"; 2 | import { getSegmentsByHash } from "./getSkipSegments"; 3 | import { Request, Response } from "express"; 4 | import { VideoIDHash } from "../types/segments.model"; 5 | import { Logger } from "../utils/logger"; 6 | import { parseSkipSegments } from "../utils/parseSkipSegments"; 7 | import { getEtag } from "../middleware/etag"; 8 | 9 | export async function getSkipSegmentsByHash(req: Request, res: Response): Promise { 10 | let hashPrefix = req.params.prefix as VideoIDHash; 11 | if (!req.params.prefix || !hashPrefixTester(req.params.prefix)) { 12 | return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix 13 | } 14 | hashPrefix = hashPrefix.toLowerCase() as VideoIDHash; 15 | 16 | const parseResult = parseSkipSegments(req); 17 | if (parseResult.errors.length > 0) { 18 | return res.status(400).send(parseResult.errors); 19 | } 20 | const { categories, actionTypes, trimUUIDs, requiredSegments, service } = parseResult; 21 | 22 | // Get all video id's that match hash prefix 23 | const segments = await getSegmentsByHash(req, hashPrefix, categories, actionTypes, trimUUIDs, requiredSegments, service); 24 | 25 | try { 26 | const hashKey = hashPrefix.length === 4 ? "skipSegmentsHash" : "skipSegmentsLargerHash"; 27 | await getEtag(hashKey, hashPrefix, service) 28 | .then(etag => res.set("ETag", etag)) 29 | .catch(/* istanbul ignore next */ () => null); 30 | const output = Object.entries(segments).map(([videoID, data]) => ({ 31 | videoID, 32 | segments: data.segments, 33 | })); 34 | return res.status(output.length === 0 ? 404 : 200).json(output); 35 | } catch (e) /* istanbul ignore next */ { 36 | Logger.error(`skip segments by hash error: ${e}`); 37 | 38 | return res.status(500).send("Internal server error"); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/routes/getStatus.ts: -------------------------------------------------------------------------------- 1 | import { db, privateDB } from "../databases/databases"; 2 | import { Logger } from "../utils/logger"; 3 | import { Request, Response } from "express"; 4 | import os from "os"; 5 | import redis, { getRedisStats } from "../utils/redis"; 6 | import { promiseOrTimeout } from "../utils/promise"; 7 | import { Postgres } from "../databases/Postgres"; 8 | import { Server } from "http"; 9 | 10 | export async function getStatus(req: Request, res: Response, server: Server): Promise { 11 | const startTime = Date.now(); 12 | let value = req.params.value as string[] | string; 13 | value = Array.isArray(value) ? value[0] : value; 14 | let processTime, redisProcessTime = -1; 15 | try { 16 | const dbStartTime = Date.now(); 17 | const dbVersion = await promiseOrTimeout(db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"]), 1000) 18 | .then(e => { 19 | processTime = Date.now() - dbStartTime; 20 | return e.value; 21 | }) 22 | .catch(e => /* istanbul ignore next */ { 23 | Logger.error(`status: SQL query timed out: ${e}`); 24 | return -1; 25 | }); 26 | let statusRequests: unknown = 0; 27 | const redisStartTime = Date.now(); 28 | const numberRequests = await promiseOrTimeout(redis.increment("statusRequest"), 1000) 29 | .then(e => { 30 | redisProcessTime = Date.now() - redisStartTime; 31 | return e; 32 | }).catch(e => /* istanbul ignore next */ { 33 | Logger.error(`status: redis increment timed out ${e}\nload: ${os.loadavg().slice(1)} with ${JSON.stringify(getRedisStats())}\n${JSON.stringify((db as Postgres)?.getStats?.())}`); 34 | return [-1]; 35 | }); 36 | statusRequests = numberRequests?.[0]; 37 | 38 | const statusValues: Record = { 39 | uptime: process.uptime(), 40 | commit: (global as any)?.HEADCOMMIT ?? "unknown", 41 | db: Number(dbVersion), 42 | startTime, 43 | processTime, 44 | redisProcessTime, 45 | loadavg: os.loadavg().slice(1), // only return 5 & 15 minute load average 46 | connections: await new Promise((resolve) => server.getConnections((_, count) => resolve(count))), 47 | statusRequests, 48 | hostname: os.hostname(), 49 | postgresStats: (db as Postgres)?.getStats?.(), 50 | postgresPrivateStats: (privateDB as Postgres)?.getStats?.(), 51 | redisStats: getRedisStats(), 52 | }; 53 | return value ? res.send(JSON.stringify(statusValues[value])) : res.send(statusValues); 54 | } catch (err) /* istanbul ignore next */ { 55 | Logger.error(err as string); 56 | return res.sendStatus(500); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/routes/getTopBrandingUsers.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | import { Logger } from "../utils/logger"; 4 | 5 | async function generateTopUsersStats(sortBy: string) { 6 | const rows = await db.prepare("all", `SELECT COUNT(distinct "titles"."UUID") as "titleCount", COUNT(distinct "thumbnails"."UUID") as "thumbnailCount", COALESCE("userName", "titles"."userID") as "userName" 7 | FROM "titles" 8 | LEFT JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID" 9 | LEFT JOIN "userNames" ON "titles"."userID"="userNames"."userID" 10 | LEFT JOIN "thumbnails" ON "titles"."userID" = "thumbnails"."userID" 11 | LEFT JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" 12 | WHERE "titleVotes"."votes" > -1 AND "titleVotes"."shadowHidden" != 1 13 | GROUP BY COALESCE("userName", "titles"."userID") HAVING SUM("titleVotes"."votes") > 2 OR SUM("thumbnailVotes"."votes") > 2 14 | ORDER BY "${sortBy}" DESC LIMIT 100`, []) as { titleCount: number, thumbnailCount: number, userName: string }[]; 15 | 16 | return rows.map((row) => ({ 17 | userName: row.userName, 18 | titles: row.titleCount, 19 | thumbnails: row.thumbnailCount 20 | })); 21 | } 22 | 23 | export async function getTopBrandingUsers(req: Request, res: Response): Promise { 24 | const sortType = parseInt(req.query.sortType as string); 25 | 26 | let sortBy = ""; 27 | if (sortType == 0) { 28 | sortBy = "titleCount"; 29 | } else if (sortType == 1) { 30 | sortBy = "thumbnailCount"; 31 | } else { 32 | //invalid request 33 | return res.sendStatus(400); 34 | } 35 | 36 | if (db.highLoad()) { 37 | return res.status(503).send("Disabled for load reasons"); 38 | } 39 | 40 | try { 41 | const stats = await generateTopUsersStats(sortBy); 42 | 43 | //send this result 44 | return res.send(stats); 45 | } catch (e) { 46 | Logger.error(e as string); 47 | return res.sendStatus(500); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/routes/getUserID.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | import { UserID } from "../types/user.model"; 4 | import { Logger } from "../utils/logger"; 5 | 6 | function getFuzzyUserID(userName: string): Promise<{userName: string, userID: UserID }[]> { 7 | // escape [_ % \] to avoid ReDOS 8 | userName = userName.replace(/\\/g, "\\\\") 9 | .replace(/_/g, "\\_") 10 | .replace(/%/g, "\\%"); 11 | userName = `%${userName}%`; // add wildcard to username 12 | // LIMIT to reduce overhead | ESCAPE to escape LIKE wildcards 13 | try { 14 | return db.prepare("all", `SELECT "userName", "userID" FROM "userNames" WHERE "userName" 15 | LIKE ? ESCAPE '\\' LIMIT 10`, [userName]); 16 | } catch (err) /* istanbul ignore next */ { 17 | return null; 18 | } 19 | } 20 | 21 | function getExactUserID(userName: string): Promise<{userName: string, userID: UserID }[]> { 22 | try { 23 | return db.prepare("all", `SELECT "userName", "userID" from "userNames" WHERE "userName" = ? LIMIT 10`, [userName]); 24 | } catch (err) /* istanbul ignore next */{ 25 | return null; 26 | } 27 | } 28 | 29 | export async function getUserID(req: Request, res: Response): Promise { 30 | const userName = req.query.username as string; 31 | const exactSearch = req.query.exact 32 | ? req.query.exact == "true" 33 | : false as boolean; 34 | 35 | // if not exact and length is 1, also skip 36 | if (userName == undefined || userName.length > 64 || 37 | (!exactSearch && userName.length < 3)) { 38 | // invalid request 39 | return res.sendStatus(400); 40 | } 41 | 42 | try { 43 | const results = exactSearch 44 | ? await getExactUserID(userName) 45 | : await getFuzzyUserID(userName); 46 | 47 | if (results === undefined || results === null) { 48 | /* istanbul ignore next */ 49 | return res.sendStatus(500); 50 | } else if (results.length === 0) { 51 | return res.sendStatus(404); 52 | } else { 53 | return res.send(results); 54 | } 55 | } catch (e) { 56 | Logger.error(e as string); 57 | return res.sendStatus(500); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/routes/getUsername.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { getHashCache } from "../utils/getHashCache"; 3 | import { Logger } from "../utils/logger"; 4 | import { Request, Response } from "express"; 5 | 6 | export async function getUsername(req: Request, res: Response): Promise { 7 | let userID = req.query.userID as string; 8 | 9 | if (userID == undefined) { 10 | //invalid request 11 | return res.sendStatus(400); 12 | } 13 | 14 | //hash the userID 15 | userID = await getHashCache(userID); 16 | 17 | try { 18 | const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID], { useReplica: true }); 19 | 20 | if (row !== undefined) { 21 | return res.send({ 22 | userName: row.userName, 23 | }); 24 | } else { 25 | //no username yet, just send back the userID 26 | return res.send({ 27 | userName: userID, 28 | }); 29 | } 30 | } catch (err) /* istanbul ignore next */ { 31 | Logger.error(err as string); 32 | return res.sendStatus(500); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/routes/getVideoLabelByHash.ts: -------------------------------------------------------------------------------- 1 | import { hashPrefixTester } from "../utils/hashPrefixTester"; 2 | import { getLabelsByHash } from "./getVideoLabel"; 3 | import { Request, Response } from "express"; 4 | import { VideoIDHash, Service } from "../types/segments.model"; 5 | import { getService } from "../utils/getService"; 6 | 7 | export async function getVideoLabelsByHash(req: Request, res: Response): Promise { 8 | let hashPrefix = req.params.prefix as VideoIDHash; 9 | if (!req.params.prefix || !hashPrefixTester(req.params.prefix)) { 10 | return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix 11 | } 12 | hashPrefix = hashPrefix.toLowerCase() as VideoIDHash; 13 | 14 | const checkHasStartSegment = req.query.hasStartSegment === "true"; 15 | 16 | const service: Service = getService(req.query.service, req.body.service); 17 | 18 | // Get all video id's that match hash prefix 19 | const segments = await getLabelsByHash(hashPrefix, service, checkHasStartSegment); 20 | 21 | if (!segments) return res.status(404).json([]); 22 | 23 | const output = Object.entries(segments).map(([videoID, data]) => ({ 24 | videoID, 25 | segments: data.segments, 26 | hasStartSegment: data.hasStartSegment 27 | })); 28 | return res.status(output.length === 0 ? 404 : 200).json(output); 29 | } 30 | -------------------------------------------------------------------------------- /src/routes/getViewsForUser.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | import { getHashCache } from "../utils/getHashCache"; 4 | import { Logger } from "../utils/logger"; 5 | 6 | export async function getViewsForUser(req: Request, res: Response): Promise { 7 | let userID = req.query.userID as string; 8 | 9 | if (userID == undefined) { 10 | //invalid request 11 | return res.sendStatus(400); 12 | } 13 | 14 | //hash the userID 15 | userID = await getHashCache(userID); 16 | 17 | try { 18 | const row = await db.prepare("get", `SELECT SUM("views") as "viewCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID], { useReplica: true }); 19 | 20 | //increase the view count by one 21 | if (row.viewCount != null) { 22 | return res.send({ 23 | viewCount: row.viewCount, 24 | }); 25 | } else { 26 | return res.sendStatus(404); 27 | } 28 | } catch (err) /* istanbul ignore next */ { 29 | Logger.error(err as string); 30 | return res.sendStatus(500); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/routes/oldSubmitSponsorTimes.ts: -------------------------------------------------------------------------------- 1 | import { postSkipSegments } from "./postSkipSegments"; 2 | import { Request, Response } from "express"; 3 | 4 | export function oldSubmitSponsorTimes(req: Request, res: Response): Promise { 5 | req.query.category = "sponsor"; 6 | return postSkipSegments(req, res); 7 | } 8 | -------------------------------------------------------------------------------- /src/routes/postClearCache.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { HashedUserID, UserID } from "../types/user.model"; 3 | import { getHashCache } from "../utils/getHashCache"; 4 | import { Request, Response } from "express"; 5 | import { Service, VideoID } from "../types/segments.model"; 6 | import { QueryCacher } from "../utils/queryCacher"; 7 | import { isUserVIP } from "../utils/isUserVIP"; 8 | import { VideoIDHash } from "../types/segments.model"; 9 | import { getService } from "../utils/getService"; 10 | 11 | export async function postClearCache(req: Request, res: Response): Promise { 12 | const videoID = req.query.videoID as VideoID; 13 | const userID = req.query.userID as UserID; 14 | const service = getService(req.query.service as Service); 15 | 16 | const invalidFields = []; 17 | if (typeof videoID !== "string") { 18 | invalidFields.push("videoID"); 19 | } 20 | if (typeof userID !== "string") { 21 | invalidFields.push("userID"); 22 | } 23 | 24 | if (invalidFields.length !== 0) { 25 | // invalid request 26 | const fields = invalidFields.join(", "); 27 | return res.status(400).send(`No valid ${fields} field(s) provided`); 28 | } 29 | 30 | // hash the userID as early as possible 31 | const hashedUserID: HashedUserID = await getHashCache(userID); 32 | // hash videoID 33 | const hashedVideoID: VideoIDHash = await getHashCache(videoID, 1); 34 | 35 | // Ensure user is a VIP 36 | if (!(await isUserVIP(hashedUserID))){ 37 | Logger.warn(`Permission violation: User ${hashedUserID} attempted to clear cache for video ${videoID}.`); 38 | return res.status(403).json({ "message": "Not a VIP" }); 39 | } 40 | 41 | try { 42 | QueryCacher.clearSegmentCache({ 43 | videoID, 44 | hashedVideoID, 45 | service 46 | }); 47 | QueryCacher.clearBrandingCache({ 48 | videoID, 49 | hashedVideoID, 50 | service 51 | }); 52 | return res.status(200).json({ 53 | message: `Cache cleared on video ${videoID}` 54 | }); 55 | } catch(err) /* istanbul ignore next */ { 56 | return res.sendStatus(500); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/routes/postPurgeAllSegments.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "../utils/logger"; 2 | import { getHashCache } from "../utils/getHashCache"; 3 | import { isUserVIP } from "../utils/isUserVIP"; 4 | import { Request, Response } from "express"; 5 | import { HashedUserID, UserID } from "../types/user.model"; 6 | import { Service, VideoID, VideoIDHash } from "../types/segments.model"; 7 | import { db } from "../databases/databases"; 8 | import { QueryCacher } from "../utils/queryCacher"; 9 | 10 | export async function postPurgeAllSegments(req: Request, res: Response): Promise { 11 | const userID = req.body.userID as UserID; 12 | const service = req.body.service as Service ?? Service.YouTube; 13 | const videoID = req.body.videoID as VideoID; 14 | 15 | if (userID == undefined) { 16 | //invalid request 17 | return res.sendStatus(400); 18 | } 19 | 20 | //hash the userID 21 | const hashedUserID: HashedUserID = await getHashCache(userID); 22 | 23 | try { 24 | const vipState = await isUserVIP(hashedUserID); 25 | if (!vipState) { 26 | return res.status(403).json({ 27 | message: "Must be a VIP to perform this action.", 28 | }); 29 | } 30 | 31 | await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "videoID" = ?`, [videoID]); 32 | 33 | const hashedVideoID: VideoIDHash = await getHashCache(videoID, 1); 34 | QueryCacher.clearSegmentCache({ 35 | videoID, 36 | hashedVideoID, 37 | service 38 | }); 39 | 40 | } catch (err) /* istanbul ignore next */ { 41 | Logger.error(err as string); 42 | return res.sendStatus(500); 43 | } 44 | return res.sendStatus(200); 45 | } 46 | -------------------------------------------------------------------------------- /src/routes/setConfig.ts: -------------------------------------------------------------------------------- 1 | import { getHashCache } from "../utils/getHashCache"; 2 | import { db } from "../databases/databases"; 3 | import { Request, Response } from "express"; 4 | import { isUserVIP } from "../utils/isUserVIP"; 5 | import { UserID } from "../types/user.model"; 6 | import { Logger } from "../utils/logger"; 7 | 8 | interface SetConfigRequest extends Request { 9 | body: { 10 | userID: UserID; 11 | key: string; 12 | value: string; 13 | } 14 | } 15 | 16 | const allowedConfigs = [ 17 | "old-submitter-block-date", 18 | "max-users-per-minute", 19 | "max-users-per-minute-dearrow" 20 | ]; 21 | 22 | export async function setConfig(req: SetConfigRequest, res: Response): Promise { 23 | const { body: { userID, key, value } } = req; 24 | 25 | if (!userID || !allowedConfigs.includes(key)) { 26 | // invalid request 27 | return res.sendStatus(400); 28 | } 29 | 30 | // hash the userID 31 | const hashedUserID = await getHashCache(userID as UserID); 32 | const isVIP = (await isUserVIP(hashedUserID)); 33 | 34 | if (!isVIP) { 35 | // not authorized 36 | return res.sendStatus(403); 37 | } 38 | 39 | try { 40 | await db.prepare("run", `INSERT INTO "config" ("key", "value") VALUES(?, ?) ON CONFLICT ("key") DO UPDATE SET "value" = ?`, [key, value, value]); 41 | 42 | return res.sendStatus(200); 43 | } catch (e) { 44 | Logger.error(e as string); 45 | 46 | return res.sendStatus(500); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/routes/viewedVideoSponsorTime.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Request, Response } from "express"; 3 | 4 | export async function viewedVideoSponsorTime(req: Request, res: Response): Promise { 5 | const UUID = req.query?.UUID; 6 | const videoID = req.query?.videoID; 7 | 8 | if (!UUID) { 9 | //invalid request 10 | return res.sendStatus(400); 11 | } 12 | 13 | if (!videoID) { 14 | await db.prepare("run", `UPDATE "sponsorTimes" SET views = views + 1 WHERE "UUID" = ?`, [UUID]); 15 | } else { 16 | await db.prepare("run", `UPDATE "sponsorTimes" SET views = views + 1 WHERE "UUID" LIKE ? AND "videoID" = ?`, [`${UUID}%`, videoID]); 17 | } 18 | 19 | return res.sendStatus(200); 20 | } 21 | -------------------------------------------------------------------------------- /src/routes/youtubeApiProxy.ts: -------------------------------------------------------------------------------- 1 | import { Request, Response } from "express"; 2 | import { YouTubeAPI } from "../utils/youtubeApi"; 3 | 4 | 5 | export async function youtubeApiProxy(req: Request, res: Response): Promise { 6 | if (req.query.key !== "8NpFUCMr2Gq4cy4UrUJPBfGBbRQudhJ8zzex8Gq44RYDywLt3UtbbfDap3KPDbcS") { 7 | return res.send("Invalid key").status(403); 8 | } 9 | 10 | const videoID = req.query.videoID; 11 | if (videoID === undefined || typeof(videoID) !== "string" || videoID.length !== 11) { 12 | return res.status(400).send("Invalid parameters"); 13 | } 14 | 15 | const result = await YouTubeAPI.listVideos(videoID); 16 | if (result.err) { 17 | return res.send("API failure").status(500); 18 | } else { 19 | return res.send(result.data).status(200); 20 | } 21 | } -------------------------------------------------------------------------------- /src/types/hash.model.ts: -------------------------------------------------------------------------------- 1 | export type HashedValue = string & { __hashBrand: unknown }; -------------------------------------------------------------------------------- /src/types/innerTubeApi.model.ts: -------------------------------------------------------------------------------- 1 | export interface innerTubeVideoDetails { 2 | "videoId": string, 3 | "title": string, 4 | "lengthSeconds"?: string, // yes, don't ask. 5 | "channelId": string, 6 | "isOwnerViewing": boolean, 7 | "shortDescription": string, 8 | "isCrawlable": boolean, 9 | "thumbnail": { 10 | "thumbnails": [{ 11 | "url": string, 12 | "width": number, 13 | "height": number 14 | } 15 | ] 16 | }, 17 | "allowRatings": boolean, 18 | "viewCount": string, // yes, don't ask 19 | "author": string, 20 | "isPrivate": boolean, 21 | "isUnpluggedCorpus": boolean, 22 | "isLiveContent": boolean, 23 | "publishDate": string 24 | } -------------------------------------------------------------------------------- /src/types/lib.model.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Better record that will work with branded types 3 | * Keys still don't work properly though and are always string 4 | */ 5 | export type SBRecord = { 6 | [P in string | K]: T; 7 | }; -------------------------------------------------------------------------------- /src/types/ratings.model.ts: -------------------------------------------------------------------------------- 1 | export enum RatingType { 2 | Downvote = 0, 3 | Upvote = 1 4 | } 5 | 6 | export const RatingTypes = [RatingType.Downvote, RatingType.Upvote]; -------------------------------------------------------------------------------- /src/types/user.model.ts: -------------------------------------------------------------------------------- 1 | import { HashedValue } from "./hash.model"; 2 | 3 | export type UserID = string & { __userIDBrand: unknown }; 4 | export type HashedUserID = UserID & HashedValue; 5 | 6 | export enum Feature { 7 | ChapterSubmitter = 0, 8 | FillerSubmitter = 1, 9 | DeArrowTitleSubmitter = 2, 10 | } -------------------------------------------------------------------------------- /src/types/warning.model.ts: -------------------------------------------------------------------------------- 1 | export enum WarningType { 2 | SponsorBlock = 0, 3 | DeArrow = 1 4 | } -------------------------------------------------------------------------------- /src/types/youtubeApi.model.ts: -------------------------------------------------------------------------------- 1 | export interface APIVideoData { 2 | "title": string, 3 | "videoId": string, 4 | "videoThumbnails": [ 5 | { 6 | "quality": string, 7 | "url": string, 8 | second__originalUrl: string, 9 | "width": number, 10 | "height": number 11 | } 12 | ], 13 | 14 | "description": string, 15 | "descriptionHtml": string, 16 | "published": number, 17 | "publishedText": string, 18 | 19 | "keywords": string[], 20 | "viewCount": number, 21 | "likeCount": number, 22 | "dislikeCount": number, 23 | 24 | "paid": boolean, 25 | "premium": boolean, 26 | "isFamilyFriendly": boolean, 27 | "allowedRegions": string[], 28 | "genre": string, 29 | "genreUrl": string, 30 | 31 | "author": string, 32 | "authorId": string, 33 | "authorUrl": string, 34 | "authorThumbnails": [ 35 | { 36 | "url": string, 37 | "width": number, 38 | "height": number 39 | } 40 | ], 41 | 42 | "subCountText": string, 43 | "lengthSeconds": number, 44 | "allowRatings": boolean, 45 | "rating": number, 46 | "isListed": boolean, 47 | "liveNow": boolean, 48 | "isUpcoming": boolean, 49 | "premiereTimestamp"?: number, 50 | 51 | "hlsUrl"?: string, 52 | "adaptiveFormats": [ 53 | { 54 | "index": string, 55 | "bitrate": string, 56 | "init": string, 57 | "url": string, 58 | "itag": string, 59 | "type": string, 60 | "clen": string, 61 | "lmt": string, 62 | "projectionType": number, 63 | "container": string, 64 | "encoding": string, 65 | "qualityLabel"?: string, 66 | "resolution"?: string 67 | } 68 | ], 69 | "formatStreams": [ 70 | { 71 | "url": string, 72 | "itag": string, 73 | "type": string, 74 | "quality": string, 75 | "container": string, 76 | "encoding": string, 77 | "qualityLabel": string, 78 | "resolution": string, 79 | "size": string 80 | } 81 | ], 82 | "captions": [ 83 | { 84 | "label": string, 85 | "languageCode": string, 86 | "url": string 87 | } 88 | ], 89 | "recommendedVideos": [ 90 | { 91 | "videoId": string, 92 | "title": string, 93 | "videoThumbnails": [ 94 | { 95 | "quality": string, 96 | "url": string, 97 | "width": number, 98 | "height": number 99 | } 100 | ], 101 | "author": string, 102 | "lengthSeconds": number, 103 | "viewCountText": string 104 | } 105 | ] 106 | } 107 | 108 | export interface APIVideoInfo { 109 | err: boolean | string, 110 | data?: APIVideoData 111 | } -------------------------------------------------------------------------------- /src/utils/array.ts: -------------------------------------------------------------------------------- 1 | export function shuffleArray(array: T[]): T[] { 2 | for (let i = array.length - 1; i > 0; i--) { 3 | const j = Math.floor(Math.random() * (i + 1)); 4 | [array[i], array[j]] = [array[j], array[i]]; 5 | } 6 | 7 | return array; 8 | } -------------------------------------------------------------------------------- /src/utils/checkBan.ts: -------------------------------------------------------------------------------- 1 | import { HashedUserID } from "../types/user.model"; 2 | import { db } from "../databases/databases"; 3 | import { Category, HashedIP } from "../types/segments.model"; 4 | import { banUser } from "../routes/shadowBanUser"; 5 | import { config } from "../config"; 6 | import { Logger } from "./logger"; 7 | 8 | export async function isUserBanned(userID: HashedUserID): Promise { 9 | return (await db.prepare("get", `SELECT 1 FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID], { useReplica: true })) !== undefined; 10 | } 11 | 12 | export async function isIPBanned(ip: HashedIP): Promise { 13 | return (await db.prepare("get", `SELECT 1 FROM "shadowBannedIPs" WHERE "hashedIP" = ? LIMIT 1`, [ip], { useReplica: true })) !== undefined; 14 | } 15 | 16 | // NOTE: this function will propagate IP bans 17 | export async function checkBanStatus(userID: HashedUserID, ip: HashedIP): Promise { 18 | const [userBanStatus, ipBanStatus] = await Promise.all([isUserBanned(userID), isIPBanned(ip)]); 19 | 20 | if (!userBanStatus && ipBanStatus) { 21 | // Make sure the whole user is banned 22 | banUser(userID, true, true, 1, config.categoryList as Category[], config.deArrowTypes) 23 | .catch((e) => Logger.error(`Error banning user after submitting from a banned IP: ${e}`)); 24 | } 25 | return userBanStatus || ipBanStatus; 26 | } 27 | -------------------------------------------------------------------------------- /src/utils/createMemoryCache.ts: -------------------------------------------------------------------------------- 1 | export function createMemoryCache(memoryFn: (...args: any[]) => void, cacheTimeMs: number): any { 2 | /* istanbul ignore if */ 3 | if (isNaN(cacheTimeMs)) cacheTimeMs = 0; 4 | 5 | // holds the promise results 6 | const cache = new Map(); 7 | // holds the promises that are not fulfilled 8 | const promiseMemory = new Map(); 9 | return (...args: any[]) => { 10 | // create cacheKey by joining arguments as string 11 | const cacheKey = args.join("."); 12 | // check if promising is already running 13 | if (promiseMemory.has(cacheKey)) { 14 | return promiseMemory.get(cacheKey); 15 | } else { 16 | // check if result is in cache 17 | if (cache.has(cacheKey)) { 18 | const cacheItem = cache.get(cacheKey); 19 | const now = Date.now(); 20 | // check if cache is valid 21 | if (!(cacheItem.cacheTime + cacheTimeMs < now)) { 22 | return Promise.resolve(cacheItem.result); 23 | } 24 | } 25 | // create new promise 26 | const promise = Promise.resolve(memoryFn(...args)); 27 | // store promise reference until fulfilled 28 | promiseMemory.set(cacheKey, promise); 29 | return promise.then(result => { 30 | // store promise result in cache 31 | cache.set(cacheKey, { 32 | result, 33 | cacheTime: Date.now(), 34 | }); 35 | // remove fulfilled promise from memory 36 | promiseMemory.delete(cacheKey); 37 | // return promise result 38 | return result; 39 | }); 40 | } 41 | }; 42 | } 43 | -------------------------------------------------------------------------------- /src/utils/diskCache.ts: -------------------------------------------------------------------------------- 1 | import axios, { AxiosError } from "axios"; 2 | import { Agent } from "http"; 3 | import { config } from "../config"; 4 | import { Logger } from "./logger"; 5 | 6 | const httpAgent = new Agent({ keepAlive: true }); 7 | 8 | class DiskCache { 9 | async set(key: string, value: unknown): Promise { 10 | if (!config.diskCacheURL) return false; 11 | 12 | try { 13 | const result = await axios({ 14 | method: "post", 15 | url: `${config.diskCacheURL}/api/v1/item`, 16 | data: { 17 | key, 18 | value 19 | }, 20 | httpAgent 21 | }); 22 | 23 | return result.status === 200; 24 | } catch (err) { 25 | const response = (err as AxiosError).response; 26 | if (!response || response.status !== 404) { 27 | Logger.error(`DiskCache: Error setting key ${key}: ${err}`); 28 | } 29 | 30 | return false; 31 | } 32 | } 33 | 34 | async get(key: string): Promise { 35 | if (!config.diskCacheURL) return null; 36 | 37 | try { 38 | const result = await axios.get(`${config.diskCacheURL}/api/v1/item?key=${key}`, { timeout: 500 }); 39 | 40 | return result.status === 200 ? result.data : null; 41 | } catch (err) { 42 | const response = (err as AxiosError).response; 43 | if (!response || response.status !== 404) { 44 | Logger.error(`DiskCache: Error getting key ${key}: ${err}`); 45 | } 46 | 47 | return null; 48 | } 49 | } 50 | } 51 | 52 | const diskCache = new DiskCache(); 53 | export default diskCache; -------------------------------------------------------------------------------- /src/utils/features.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { Feature, HashedUserID } from "../types/user.model"; 3 | import { QueryCacher } from "./queryCacher"; 4 | import { userFeatureKey } from "./redisKeys"; 5 | 6 | export async function hasFeature(userID: HashedUserID, feature: Feature): Promise { 7 | return await QueryCacher.get(async () => { 8 | const result = await db.prepare("get", 'SELECT "feature" from "userFeatures" WHERE "userID" = ? AND "feature" = ?', [userID, feature], { useReplica: true }); 9 | return !!result; 10 | }, userFeatureKey(userID, feature)); 11 | } -------------------------------------------------------------------------------- /src/utils/getCWSUsers.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { Logger } from "../utils/logger"; 3 | 4 | export const getCWSUsers = (extID: string): Promise => 5 | 6 | axios.post(`https://chrome.google.com/webstore/ajax/detail?pv=20210820&id=${extID}`) 7 | .then(res => res.data.split("\n")[2]) 8 | .then(data => JSON.parse(data)) 9 | .then(data => (data[1][1][0][23]).replaceAll(/,|\+/g,"")) 10 | .then(data => parseInt(data)) 11 | .catch((err) => { 12 | Logger.error(`Error getting chrome users - ${err}`); 13 | return 0; 14 | }); 15 | 16 | /* istanbul ignore next */ 17 | export function getChromeUsers(chromeExtensionUrl: string): Promise { 18 | return axios.get(chromeExtensionUrl) 19 | .then(res => { 20 | const body = res.data; 21 | // 2024-02-09 22 | // >20,000 users< 23 | const match = body.match(/>([\d,]+) users { 29 | Logger.debug(`Failing to connect to ${chromeExtensionUrl}`); 30 | return 0; 31 | }); 32 | } -------------------------------------------------------------------------------- /src/utils/getCommit.ts: -------------------------------------------------------------------------------- 1 | import { execSync } from "child_process"; 2 | const gitCommand = "git rev-parse HEAD"; 3 | 4 | export const getCommit = ():string => execSync(gitCommand).toString().trim(); -------------------------------------------------------------------------------- /src/utils/getFormattedTime.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Converts time in seconds to minutes:seconds 3 | */ 4 | export function getFormattedTime(totalSeconds: number): string { 5 | const minutes = Math.floor(totalSeconds / 60); 6 | const seconds = totalSeconds - minutes * 60; 7 | let secondsDisplay = seconds.toFixed(3); 8 | if (seconds < 10) { 9 | //add a zero 10 | secondsDisplay = `0${secondsDisplay}`; 11 | } 12 | 13 | return `${minutes}:${secondsDisplay}`; 14 | } 15 | -------------------------------------------------------------------------------- /src/utils/getHash.ts: -------------------------------------------------------------------------------- 1 | import crypto from "crypto"; 2 | import { HashedValue } from "../types/hash.model"; 3 | 4 | export function getHash(value: T, times = 5000): T & HashedValue { 5 | if (times <= 0) return "" as T & HashedValue; 6 | 7 | for (let i = 0; i < times; i++) { 8 | const hashCreator = crypto.createHash("sha256"); 9 | value = hashCreator.update(value).digest("hex") as T; 10 | } 11 | 12 | return value as T & HashedValue; 13 | } -------------------------------------------------------------------------------- /src/utils/getHashCache.ts: -------------------------------------------------------------------------------- 1 | import redis from "../utils/redis"; 2 | import { shaHashKey } from "../utils/redisKeys"; 3 | import { HashedValue } from "../types/hash.model"; 4 | import { Logger } from "../utils/logger"; 5 | import { getHash } from "../utils/getHash"; 6 | import { config } from "../config"; 7 | 8 | const defaultedHashTimes = 5000; 9 | const cachedHashTimes = defaultedHashTimes - 1; 10 | 11 | export async function getHashCache(value: T, times = defaultedHashTimes): Promise { 12 | if (times === defaultedHashTimes) { 13 | const hashKey = getHash(value, 1); 14 | const result: HashedValue = await getFromRedis(hashKey); 15 | return result as T & HashedValue; 16 | } 17 | return getHash(value, times); 18 | } 19 | 20 | async function getFromRedis(key: HashedValue): Promise { 21 | const redisKey = shaHashKey(key); 22 | 23 | if (!config.redis?.disableHashCache) { 24 | try { 25 | const reply = await redis.get(redisKey); 26 | 27 | if (reply) { 28 | Logger.debug(`Got data from redis: ${reply}`); 29 | return reply as T & HashedValue; 30 | } 31 | } catch (err) /* istanbul ignore next */ { 32 | Logger.error(err as string); 33 | } 34 | } 35 | 36 | // Otherwise, calculate it 37 | const data = getHash(key, cachedHashTimes); 38 | 39 | if (!config.redis?.disableHashCache) { 40 | redis.set(redisKey, data).catch(/* istanbul ignore next */ (err) => Logger.error(err)); 41 | } 42 | 43 | return data as T & HashedValue; 44 | } -------------------------------------------------------------------------------- /src/utils/getIP.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | import { Request } from "express"; 3 | import { IPAddress } from "../types/segments.model"; 4 | 5 | export function getIP(req: Request): IPAddress { 6 | // if in testing mode, return immediately 7 | if (config.mode === "test") return "127.0.0.1" as IPAddress; 8 | 9 | if (config.behindProxy === true || config.behindProxy === "true") { 10 | config.behindProxy = "X-Forwarded-For"; 11 | } 12 | 13 | switch (config.behindProxy as string) { 14 | case "X-Forwarded-For": 15 | return req.headers["x-forwarded-for"] as IPAddress; 16 | case "Cloudflare": 17 | return req.headers["cf-connecting-ip"] as IPAddress; 18 | case "X-Real-IP": 19 | return req.headers["x-real-ip"] as IPAddress; 20 | default: 21 | return req.socket?.remoteAddress as IPAddress; 22 | } 23 | } -------------------------------------------------------------------------------- /src/utils/getService.ts: -------------------------------------------------------------------------------- 1 | import { Service } from "../types/segments.model"; 2 | 3 | export function getService(...value: T[]): Service { 4 | const serviceByName = Object.values(Service).reduce((acc, serviceName) => { 5 | acc[serviceName.toLowerCase()] = serviceName; 6 | 7 | return acc; 8 | }, {} as Record); 9 | 10 | for (const name of value) { 11 | if (name?.trim().toLowerCase() in serviceByName) { 12 | return serviceByName[name.trim().toLowerCase()]; 13 | } 14 | } 15 | 16 | return Service.YouTube; 17 | } 18 | -------------------------------------------------------------------------------- /src/utils/getSubmissionUUID.ts: -------------------------------------------------------------------------------- 1 | import { getHash } from "./getHash"; 2 | import { HashedValue } from "../types/hash.model"; 3 | import { ActionType, VideoID, Service, Category } from "../types/segments.model"; 4 | import { HashedUserID } from "../types/user.model"; 5 | 6 | export function getSubmissionUUID( 7 | videoID: VideoID, 8 | category: Category, 9 | actionType: ActionType, 10 | description: string, 11 | userID: HashedUserID, 12 | startTime: number, 13 | endTime: number, 14 | service: Service 15 | ) : HashedValue { 16 | return `${getHash(`${videoID}${startTime}${endTime}${userID}${description}${category}${actionType}${service}`, 1)}7` as HashedValue; 17 | } 18 | -------------------------------------------------------------------------------- /src/utils/getVideoDetails.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | import { innerTubeVideoDetails } from "../types/innerTubeApi.model"; 3 | import { APIVideoData } from "../types/youtubeApi.model"; 4 | import { YouTubeAPI } from "../utils/youtubeApi"; 5 | import { getPlayerData } from "../utils/innerTubeAPI"; 6 | 7 | export interface videoDetails { 8 | videoId: string, 9 | duration: number, 10 | authorId: string, 11 | authorName: string, 12 | title: string, 13 | published: number, 14 | thumbnails: { 15 | url: string, 16 | width: number, 17 | height: number, 18 | }[] 19 | } 20 | 21 | const convertFromInnerTube = (input: innerTubeVideoDetails): videoDetails => ({ 22 | videoId: input.videoId, 23 | duration: Number(input.lengthSeconds), 24 | authorId: input.channelId, 25 | authorName: input.author, 26 | title: input.title, 27 | published: new Date(input.publishDate).getTime()/1000, 28 | thumbnails: input.thumbnail.thumbnails 29 | }); 30 | 31 | const convertFromNewLeaf = (input: APIVideoData): videoDetails => ({ 32 | videoId: input.videoId, 33 | duration: input.lengthSeconds, 34 | authorId: input.authorId, 35 | authorName: input.author, 36 | title: input.title, 37 | published: input.published, 38 | thumbnails: input.videoThumbnails 39 | }); 40 | 41 | async function newLeafWrapper(videoId: string, ignoreCache: boolean) { 42 | const result = await YouTubeAPI.listVideos(videoId, ignoreCache); 43 | return result?.data ?? Promise.reject(); 44 | } 45 | 46 | export function getVideoDetails(videoId: string, ignoreCache = false): Promise { 47 | if (!config.newLeafURLs) { 48 | return getPlayerData(videoId, ignoreCache) 49 | .then(data => convertFromInnerTube(data)) 50 | .catch(() => { 51 | return null; 52 | }); 53 | } 54 | return Promise.any([ 55 | newLeafWrapper(videoId, ignoreCache) 56 | .then(videoData => convertFromNewLeaf(videoData)), 57 | getPlayerData(videoId, ignoreCache) 58 | .then(data => convertFromInnerTube(data)) 59 | ]).catch(() => { 60 | return null; 61 | }); 62 | } -------------------------------------------------------------------------------- /src/utils/hashPrefixTester.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | 3 | const minimumPrefix = config.minimumPrefix || "3"; 4 | const maximumPrefix = config.maximumPrefix || "32"; // Half the hash. 5 | 6 | const prefixChecker = new RegExp(`^[\\da-f]{${minimumPrefix},${maximumPrefix}}$`, "i"); 7 | 8 | export function hashPrefixTester(prefix: string): boolean { 9 | return prefixChecker.test(prefix); 10 | } 11 | -------------------------------------------------------------------------------- /src/utils/isUserTempVIP.ts: -------------------------------------------------------------------------------- 1 | import redis from "../utils/redis"; 2 | import { tempVIPKey } from "../utils/redisKeys"; 3 | import { HashedUserID } from "../types/user.model"; 4 | import { VideoID } from "../types/segments.model"; 5 | import { Logger } from "./logger"; 6 | import { getVideoDetails } from "./getVideoDetails"; 7 | 8 | export const isUserTempVIP = async (hashedUserID: HashedUserID, videoID: VideoID): Promise => { 9 | const apiVideoDetails = await getVideoDetails(videoID); 10 | const channelID = apiVideoDetails?.authorId; 11 | try { 12 | const reply = await redis.get(tempVIPKey(hashedUserID)); 13 | return reply && reply == channelID; 14 | } catch (e) /* istanbul ignore next */ { 15 | Logger.error(e as string); 16 | return false; 17 | } 18 | }; -------------------------------------------------------------------------------- /src/utils/isUserVIP.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | import { HashedUserID } from "../types/user.model"; 3 | 4 | export async function isUserVIP(userID: HashedUserID): Promise { 5 | return (await db.prepare("get", `SELECT count(*) as "userCount" FROM "vipUsers" WHERE "userID" = ? LIMIT 1`, 6 | [userID]))?.userCount > 0; 7 | } 8 | -------------------------------------------------------------------------------- /src/utils/logger.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | 3 | const enum LogLevel { 4 | ERROR = "ERROR", 5 | WARN = "WARN", 6 | INFO = "INFO", 7 | DEBUG = "DEBUG" 8 | } 9 | 10 | const colors = { 11 | Reset: "\x1b[0m", 12 | Bright: "\x1b[1m", 13 | Dim: "\x1b[2m", 14 | Underscore: "\x1b[4m", 15 | Blink: "\x1b[5m", 16 | Reverse: "\x1b[7m", 17 | Hidden: "\x1b[8m", 18 | 19 | FgBlack: "\x1b[30m", 20 | FgRed: "\x1b[31m", 21 | FgGreen: "\x1b[32m", 22 | FgYellow: "\x1b[33m", 23 | FgBlue: "\x1b[34m", 24 | FgMagenta: "\x1b[35m", 25 | FgCyan: "\x1b[36m", 26 | FgWhite: "\x1b[37m", 27 | 28 | BgBlack: "\x1b[40m", 29 | BgRed: "\x1b[41m", 30 | BgGreen: "\x1b[42m", 31 | BgYellow: "\x1b[43m", 32 | BgBlue: "\x1b[44m", 33 | BgMagenta: "\x1b[45m", 34 | BgCyan: "\x1b[46m", 35 | BgWhite: "\x1b[47m", 36 | }; 37 | 38 | 39 | class Logger { 40 | private _settings = { 41 | ERROR: true, 42 | WARN: true, 43 | INFO: false, 44 | DEBUG: false, 45 | }; 46 | 47 | constructor() { 48 | /* istanbul ignore if */ 49 | if (config.mode === "development") { 50 | this._settings.INFO = true; 51 | this._settings.DEBUG = true; 52 | } else if (config.mode === "test") { 53 | this._settings.WARN = false; 54 | } 55 | } 56 | 57 | error(str: string) { 58 | this.log(LogLevel.ERROR, str); 59 | } 60 | warn(str: string) { 61 | this.log(LogLevel.WARN, str); 62 | } 63 | info(str: string) { 64 | this.log(LogLevel.INFO, str); 65 | } 66 | debug(str: string) { 67 | this.log(LogLevel.DEBUG, str); 68 | } 69 | 70 | private log(level: LogLevel, str: string) { 71 | if (!this._settings[level]) { 72 | return; 73 | } 74 | 75 | let color = colors.Bright; 76 | if (level === LogLevel.ERROR) color = colors.FgRed; 77 | /* istanbul ignore if */ 78 | if (level === LogLevel.WARN) color = colors.FgYellow; 79 | 80 | let levelStr = level.toString(); 81 | /* istanbul ignore if */ 82 | if (levelStr.length === 4) { 83 | levelStr += " "; // ensure logs are aligned 84 | } 85 | // eslint-disable-next-line no-console 86 | console.log(colors.Dim, `${levelStr} ${new Date().toISOString()}: `, color, str, colors.Reset); 87 | } 88 | } 89 | 90 | const loggerInstance = new Logger(); 91 | export { 92 | loggerInstance as Logger 93 | }; 94 | -------------------------------------------------------------------------------- /src/utils/parseSkipSegments.ts: -------------------------------------------------------------------------------- 1 | import { Request } from "express"; 2 | import { ActionType, SegmentUUID, Category, Service } from "../types/segments.model"; 3 | import { getService } from "./getService"; 4 | 5 | import { parseCategories, parseActionTypes, parseRequiredSegments } from "./parseParams"; 6 | 7 | const errorMessage = (parameter: string) => `${parameter} parameter does not match format requirements.`; 8 | 9 | export function parseSkipSegments(req: Request): { 10 | categories: Category[]; 11 | actionTypes: ActionType[]; 12 | trimUUIDs: number | null; 13 | requiredSegments: SegmentUUID[]; 14 | service: Service; 15 | errors: string[]; 16 | } { 17 | const categories: Category[] = parseCategories(req, [ "sponsor" as Category ]); 18 | const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip]); 19 | const trimUUIDs: number | null = req.query.trimUUIDs ? (parseInt(req.query.trimUUIDs as string) || null) : null; 20 | const requiredSegments: SegmentUUID[] = parseRequiredSegments(req); 21 | const service: Service = getService(req.query.service, req.body.services); 22 | const errors: string[] = []; 23 | if (!Array.isArray(categories)) errors.push(errorMessage("categories")); 24 | else if (categories.length === 0) errors.push("No valid categories provided."); 25 | 26 | if (!Array.isArray(actionTypes)) errors.push(errorMessage("actionTypes")); 27 | if (!Array.isArray(requiredSegments)) errors.push(errorMessage("requiredSegments")); 28 | // finished parsing 29 | return { 30 | categories, 31 | actionTypes, 32 | trimUUIDs, 33 | requiredSegments, 34 | service, 35 | errors 36 | }; 37 | } -------------------------------------------------------------------------------- /src/utils/promise.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "./logger"; 2 | 3 | export class PromiseTimeoutError extends Error { 4 | promise?: Promise; 5 | 6 | constructor(promise?: Promise) { 7 | super("Promise timed out"); 8 | 9 | this.promise = promise; 10 | } 11 | } 12 | 13 | export interface PromiseWithState extends Promise { 14 | isResolved: boolean; 15 | isRejected: boolean; 16 | } 17 | 18 | export function promiseOrTimeout(promise: Promise, timeout?: number): Promise { 19 | return Promise.race([timeoutPomise(timeout), promise]); 20 | } 21 | 22 | export function timeoutPomise(timeout?: number): Promise { 23 | return new Promise((resolve, reject) => { 24 | if (timeout) { 25 | setTimeout(() => { 26 | reject(new PromiseTimeoutError()); 27 | }, timeout); 28 | } 29 | }); 30 | } 31 | 32 | export function savePromiseState(promise: Promise): PromiseWithState { 33 | const p = promise as PromiseWithState; 34 | p.isResolved = false; 35 | p.isRejected = false; 36 | 37 | p.then(() => { 38 | p.isResolved = true; 39 | }).catch(() => { 40 | p.isRejected = true; 41 | }); 42 | 43 | return p; 44 | } 45 | 46 | /** 47 | * Allows rejection or resolve 48 | * Allows past resolves too, but not past rejections 49 | */ 50 | export function nextFulfilment(promises: PromiseWithState[]): Promise { 51 | return Promise.race(promises.filter((p) => !p.isRejected)); 52 | } 53 | 54 | export function oneOf(promises: Promise[]): Promise { 55 | return new Promise((resolve, reject) => { 56 | let fulfilments = 0; 57 | for (const promise of promises) { 58 | promise.then((result) => { 59 | fulfilments++; 60 | 61 | if (result || fulfilments === promises.length) { 62 | resolve(result); 63 | } 64 | }).catch((err) => { 65 | fulfilments++; 66 | 67 | if (fulfilments === promises.length) { 68 | reject(err); 69 | } else { 70 | Logger.error(`oneOf ignore error (promise): ${err}`); 71 | } 72 | }); 73 | } 74 | }); 75 | } -------------------------------------------------------------------------------- /src/utils/redisLock.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | import redis from "../utils/redis"; 3 | import { Logger } from "./logger"; 4 | 5 | const defaultTimeout = 20000; 6 | 7 | export type AcquiredLock = { 8 | status: false 9 | } | { 10 | status: true; 11 | unlock: () => void; 12 | }; 13 | 14 | export async function acquireLock(key: string, timeout = defaultTimeout): Promise { 15 | if (!config.redis?.enabled) { 16 | return { 17 | status: true, 18 | unlock: () => void 0 19 | }; 20 | } 21 | 22 | try { 23 | const result = await redis.set(key, "1", { 24 | PX: timeout, 25 | NX: true 26 | }); 27 | 28 | if (result) { 29 | return { 30 | status: true, 31 | unlock: () => void redis.del(key).catch((err) => Logger.error(err)) 32 | }; 33 | } else { 34 | return { 35 | status: false 36 | }; 37 | } 38 | } catch (e) { 39 | Logger.error(e as string); 40 | 41 | // Fallback to allowing 42 | return { 43 | status: true, 44 | unlock: () => void 0 45 | }; 46 | } 47 | 48 | return { 49 | status: false 50 | }; 51 | } -------------------------------------------------------------------------------- /src/utils/serverConfig.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../databases/databases"; 2 | 3 | export async function getServerConfig(key: string): Promise { 4 | const row = await db.prepare("get", `SELECT "value" as v FROM "config" WHERE "key" = ?`, [key]); 5 | 6 | return row?.v ?? null; 7 | } -------------------------------------------------------------------------------- /src/utils/userAgent.ts: -------------------------------------------------------------------------------- 1 | export function parseUserAgent(userAgent: string): string { 2 | const ua = userAgent; 3 | 4 | const revanced = ua.match(/RVX\S+|ReVanced\S+/i); 5 | if (revanced) { 6 | return revanced[0]; 7 | } 8 | 9 | if (ua.match(/(com.google.android.youtube\/)|(com.vanced.android.youtube\/)|(^YouTube\/)|(^Dalvik\/)/i)) { 10 | return `Vanced/${ua.match(/.android.youtube\/([^\s]+)/i)?.[1]}`; 11 | } 12 | 13 | if (ua.match(/(mpv_sponsorblock)|(^python-requests)|(^GuzzleHttp\/)|(^PostmanRuntime\/)/i)) { 14 | return ua; 15 | } 16 | 17 | return ""; 18 | } -------------------------------------------------------------------------------- /src/utils/youtubeApi.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../config"; 2 | import { Logger } from "./logger"; 3 | import { APIVideoData, APIVideoInfo } from "../types/youtubeApi.model"; 4 | import DiskCache from "./diskCache"; 5 | import axios from "axios"; 6 | 7 | export class YouTubeAPI { 8 | static async listVideos(videoID: string, ignoreCache = false): Promise { 9 | if (!videoID || videoID.length !== 11 || videoID.includes(".")) { 10 | return { err: "Invalid video ID" }; 11 | } 12 | 13 | const cacheKey = `yt.newleaf.video.${videoID}`; 14 | if (!ignoreCache) { 15 | try { 16 | const data = await DiskCache.get(cacheKey); 17 | 18 | if (data) { 19 | Logger.debug(`YouTube API: cache used for video information: ${videoID}`); 20 | return { err: null, data: data as APIVideoData }; 21 | } 22 | } catch (err) { 23 | return { err: err as string | boolean, data: null }; 24 | } 25 | } 26 | 27 | if (!config.newLeafURLs || config.newLeafURLs.length <= 0) return { err: "NewLeaf URL not found", data: null }; 28 | 29 | try { 30 | const result = await axios.get(`${config.newLeafURLs[Math.floor(Math.random() * config.newLeafURLs.length)]}/api/v1/videos/${videoID}`, { 31 | timeout: 3500 32 | }); 33 | 34 | if (result.status === 200) { 35 | const data = result.data; 36 | if (data.error) { 37 | Logger.warn(`NewLeaf API Error for ${videoID}: ${data.error}`); 38 | return { err: data.error, data: null }; 39 | } 40 | const apiResult = data as APIVideoData; 41 | DiskCache.set(cacheKey, apiResult) 42 | .then(() => Logger.debug(`YouTube API: video information cache set for: ${videoID}`)) 43 | .catch((err: any) => Logger.warn(err)); 44 | 45 | return { err: false, data: apiResult }; 46 | } else { 47 | return { err: result.statusText, data: null }; 48 | } 49 | } catch (err) { 50 | return { err: err as string | boolean, data: null }; 51 | } 52 | } 53 | } 54 | 55 | export const getMaxResThumbnail = (videoID: string): string => 56 | `https://dearrow-thumb.ajay.app/api/v1/getThumbnail?videoID=${videoID}&redirectUrl=https://i.ytimg.com/vi/${videoID}/maxresdefault.jpg`; -------------------------------------------------------------------------------- /src/utils/youtubeID.ts: -------------------------------------------------------------------------------- 1 | import { VideoID } from "../types/segments.model"; 2 | 3 | const idRegex = new RegExp(/([0-9A-Za-z_-]{11})/); // group to always be index 1 4 | const exclusiveIdegex = new RegExp(`^${idRegex.source}$`); 5 | // match /c/, /channel/, /@channel, full UUIDs 6 | const negativeRegex = new RegExp(/(\/(channel|c)\/.+)|(\/@.+)|([a-f0-9]{64,65})|(youtube\.com\/clip\/)/); 7 | const urlRegex = new RegExp(`(?:v=|/|youtu.be/)${idRegex.source}(?:|/|[?&]t=\\d+s?)>?(?:\\s|$)`); 8 | const negateIdRegex = new RegExp(/(?:[^0-9A-Za-z_-]*?)/); 9 | const looseEndsRegex = new RegExp(`${negateIdRegex.source}${idRegex.source}${negateIdRegex.source}`); 10 | 11 | export const validate = (id: string): boolean => exclusiveIdegex.test(id); 12 | 13 | export const sanitize = (id: string): VideoID | null => { 14 | // first decode URI 15 | id = decodeURIComponent(id); 16 | // strict matching 17 | const strictMatch = id.match(exclusiveIdegex)?.[1]; 18 | const urlMatch = id.match(urlRegex)?.[1]; 19 | // return match, if not negative, return looseMatch 20 | const looseMatch = id.match(looseEndsRegex)?.[1]; 21 | return strictMatch ? (strictMatch as VideoID) 22 | : negativeRegex.test(id) ? null 23 | : urlMatch ? (urlMatch as VideoID) 24 | : looseMatch ? (looseMatch as VideoID) 25 | : null; 26 | }; -------------------------------------------------------------------------------- /test.json: -------------------------------------------------------------------------------- 1 | { 2 | "port": 8080, 3 | "mockPort": 8081, 4 | "globalSalt": "testSalt", 5 | "adminUserID": "4bdfdc9cddf2c7d07a8a87b57bf6d25389fb75d1399674ee0e0938a6a60f4c3b", 6 | "newLeafURLs": ["placeholder"], 7 | "discordReportChannelWebhookURL": "http://127.0.0.1:8081/webhook/ReportChannel", 8 | "discordFirstTimeSubmissionsWebhookURL": "http://127.0.0.1:8081/webhook/FirstTimeSubmissions", 9 | "discordCompletelyIncorrectReportWebhookURL": "http://127.0.0.1:8081/webhook/CompletelyIncorrectReport", 10 | "discordNeuralBlockRejectWebhookURL": "http://127.0.0.1:8081/webhook/NeuralBlockReject", 11 | "neuralBlockURL": "http://127.0.0.1:8081/NeuralBlock", 12 | "userCounterURL": "http://127.0.0.1:8081/UserCounter", 13 | "behindProxy": true, 14 | "db": ":memory:", 15 | "privateDB": ":memory:", 16 | "createDatabaseIfNotExist": true, 17 | "schemaFolder": "./databases", 18 | "dbSchema": "./databases/_sponsorTimes.db.sql", 19 | "privateDBSchema": "./databases/_private.db.sql", 20 | "categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], 21 | "mode": "test", 22 | "readOnly": false, 23 | "webhooks": [ 24 | { 25 | "url": "http://127.0.0.1:8081/CustomWebhook", 26 | "key": "superSecretKey", 27 | "scopes": [ 28 | "vote.up", 29 | "vote.down" 30 | ] 31 | }, { 32 | "url": "http://127.0.0.1:8081/FailedWebhook", 33 | "key": "superSecretKey", 34 | "scopes": [ 35 | "vote.up", 36 | "vote.down" 37 | ] 38 | }, { 39 | "url": "http://127.0.0.1:8099/WrongPort", 40 | "key": "superSecretKey", 41 | "scopes": [ 42 | "vote.up", 43 | "vote.down" 44 | ] 45 | }, { 46 | "url": "http://127.0.0.1:8081/WarningWebhook", 47 | "key": "superSecretKey", 48 | "scopes": [ 49 | "warning" 50 | ] 51 | } 52 | ], 53 | "hoursAfterWarningExpires": 24, 54 | "rateLimit": { 55 | "vote": { 56 | "windowMs": 900000, 57 | "max": 20, 58 | "message": "Too many votes, please try again later", 59 | "statusCode": 429 60 | }, 61 | "view": { 62 | "windowMs": 900000, 63 | "max": 20, 64 | "statusCode": 200 65 | } 66 | }, 67 | "patreon": { 68 | "clientId": "testClientID", 69 | "clientSecret": "testClientSecret", 70 | "redirectUri": "http://127.0.0.1/fake/callback" 71 | }, 72 | "minReputationToSubmitFiller": -1, 73 | "minUserIDLength": 0 74 | } 75 | -------------------------------------------------------------------------------- /test/case_boilerplate.txt: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import assert from "assert"; 3 | import { client } from "../utils/httpClient"; 4 | import { genUsers, User } from "../utils/genUser"; 5 | import { insertSegment, insertVip } from "../utils/queryGen"; 6 | 7 | const endpoint = "/api/endpoint"; 8 | 9 | const postTestEndpoint = () => client({ 10 | method: "POST", 11 | url: endpoint, 12 | data: { 13 | } 14 | }); 15 | 16 | const cases = [ 17 | "firstCase", 18 | "secondCase", 19 | "thirdCase" 20 | ]; 21 | const users = genUsers("endpoint", cases); 22 | const vipUser = genUser("endpoint", "vip"); -------------------------------------------------------------------------------- /test/cases/addFeatures.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { db } from "../../src/databases/databases"; 3 | import { Feature } from "../../src/types/user.model"; 4 | import { hasFeature } from "../../src/utils/features"; 5 | import { client } from "../utils/httpClient"; 6 | import { grantFeature, insertVip } from "../utils/queryGen"; 7 | import { User, genUser, genUsers } from "../utils/genUser"; 8 | 9 | const endpoint = "/api/feature"; 10 | 11 | const postAddFeatures = (userID: string, adminUserID: string, feature: Feature, enabled: boolean) => client({ 12 | method: "POST", 13 | url: endpoint, 14 | data: { 15 | userID, 16 | feature, 17 | enabled: String(enabled), 18 | adminUserID 19 | } 20 | }); 21 | 22 | const cases = [ 23 | "grant", 24 | "remove", 25 | "update" 26 | ]; 27 | const users = genUsers("addFeatures", cases); 28 | const vipUser = genUser("addFeatures", "vip"); 29 | 30 | const testedFeature = Feature.ChapterSubmitter; 31 | const validFeatures = [testedFeature]; 32 | 33 | const updateValidateFeature = (user: User, feature: Feature, grant: boolean, issuer: User): Promise => 34 | postAddFeatures(user.pubID, issuer.privID, feature, grant) 35 | .then(res => assert.strictEqual(res.status, 200)) // ensure request was successful 36 | .then(() => hasFeature(user.pubID, feature)) 37 | .then(result => assert.strictEqual(result, grant)); // ensure user has new feature 38 | 39 | describe("addFeatures", () => { 40 | before(async () => { 41 | await insertVip(db, vipUser.pubID); 42 | await grantFeature(db, users["remove"].pubID, testedFeature, vipUser.pubID); 43 | await grantFeature(db, users["update"].pubID, testedFeature, vipUser.pubID); 44 | }); 45 | 46 | it("can add features", (done) => { 47 | for (const feature of validFeatures) { 48 | updateValidateFeature(users["grant"], feature, true, vipUser) 49 | .catch(err => done(err)); 50 | } 51 | done(); 52 | }); 53 | 54 | it("can remove features", () => updateValidateFeature(users["remove"], testedFeature, false, vipUser)); 55 | 56 | it("can update features", () => updateValidateFeature(users["update"], testedFeature, true, vipUser)); 57 | }); -------------------------------------------------------------------------------- /test/cases/dbUpgrade.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | 3 | import { db, privateDB } from "../../src/databases/databases"; 4 | 5 | describe("dbUpgrade", () => { 6 | it("Should update the database version when starting the application", async () => { 7 | const dbVersion = (await db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"])).value; 8 | const privateVersion = (await privateDB.prepare("get", "SELECT key, value FROM config where key = ?", ["version"])).value; 9 | 10 | assert( 11 | dbVersion >= 1 && privateVersion >= 1, 12 | `Versions are not at least 1. db is ${dbVersion}, private is ${privateVersion}`); 13 | }); 14 | }); 15 | -------------------------------------------------------------------------------- /test/cases/eTag.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { client } from "../utils/httpClient"; 3 | import redis from "../../src/utils/redis"; 4 | import { config } from "../../src/config"; 5 | import { genRandom } from "../utils/getRandom"; 6 | 7 | const validateEtag = (expected: string, actual: string): boolean => { 8 | const [actualHashType, actualHashKey, actualService] = actual.split(";"); 9 | const [expectedHashType, expectedHashKey, expectedService] = expected.split(";"); 10 | return (actualHashType === expectedHashType) && (actualHashKey === expectedHashKey) && (actualService === expectedService); 11 | }; 12 | 13 | describe("eTag", () => { 14 | before(function() { 15 | if (!config.redis?.enabled) this.skip(); 16 | }); 17 | 18 | const endpoint = "/etag"; 19 | it("Should reject weak etag", () => { 20 | const etagKey = `W/test-etag-${genRandom()}`; 21 | return client.get(endpoint, { headers: { "If-None-Match": etagKey } }) 22 | .then(res => assert.strictEqual(res.status, 404)); 23 | }); 24 | }); 25 | 26 | describe("304 etag validation", () => { 27 | before(function() { 28 | if (!config.redis?.enabled) this.skip(); 29 | }); 30 | 31 | const endpoint = "/etag"; 32 | for (const hashType of ["skipSegments", "skipSegmentsHash", "videoLabel", "videoLabelHash"]) { 33 | it(`${hashType} etag should return 304`, () => { 34 | const etagKey = `"${hashType};${genRandom};YouTube;${Date.now()}"`; 35 | return redis.setEx(etagKey, 8400, "test").then(() => 36 | client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => { 37 | assert.strictEqual(res.status, 304); 38 | const etag = res.headers?.etag ?? ""; 39 | assert.ok(validateEtag(etagKey, etag)); 40 | }) 41 | ); 42 | }); 43 | } 44 | 45 | it(`other etag type should not return 304`, () => { 46 | const etagKey = `"invalidHashType;${genRandom};YouTube;${Date.now()}"`; 47 | return client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => { 48 | assert.strictEqual(res.status, 404); 49 | }); 50 | }); 51 | 52 | it(`outdated etag type should not return 304`, () => { 53 | const etagKey = `"skipSegments;${genRandom};YouTube;5000"`; 54 | return client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => { 55 | assert.strictEqual(res.status, 404); 56 | }); 57 | }); 58 | }); -------------------------------------------------------------------------------- /test/cases/environment.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { config } from "../../src/config"; 3 | 4 | describe("environment", () => { 5 | it("minUserIDLength should be < 10", () => { 6 | assert(config.minUserIDLength < 10); 7 | }); 8 | it("nodeJS major version should be >= 16", () => { 9 | const [major] = process.versions.node.split(".").map(i => parseInt(i)); 10 | assert(major >= 16); 11 | }); 12 | }); 13 | -------------------------------------------------------------------------------- /test/cases/getChapterNames.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { db } from "../../src/databases/databases"; 3 | import { Postgres } from "../../src/databases/Postgres"; 4 | import { client } from "../utils/httpClient"; 5 | import { partialDeepEquals } from "../utils/partialDeepEquals"; 6 | import { insertChapter } from "../utils/segmentQueryGen"; 7 | import { genRandomValue } from "../utils/getRandom"; 8 | import { insertVideoInfo } from "../utils/queryGen"; 9 | 10 | describe("getChapterNames", function () { 11 | const endpoint = "/api/chapterNames"; 12 | 13 | const chapterNamesVid1 = genRandomValue("video", "getChapterNames"); 14 | const chapterChannelID = genRandomValue("channel", "getChapterNames"); 15 | const chapterNames = [ 16 | "Weird name", 17 | "A different one", 18 | "Something else", 19 | "Weirder name", 20 | ]; 21 | 22 | const nameSearch = (query: string, expected: string | null, expectedResults: number): Promise => { 23 | const expectedData = [{ 24 | description: expected 25 | }]; 26 | return client.get(`${endpoint}?description=${query}&channelID=${chapterChannelID}`) 27 | .then(res => { 28 | assert.strictEqual(res.status, expectedResults == 0 ? 404 : 200); 29 | assert.strictEqual(res.data.length, expectedResults); 30 | if (expected != null) assert.ok(partialDeepEquals(res.data, expectedData)); 31 | }); 32 | }; 33 | 34 | before(async function() { 35 | if (!(db instanceof Postgres)) this.skip(); // only works with Postgres 36 | await insertChapter(db, chapterNames[0], { videoID: chapterNamesVid1, startTime: 60, endTime: 80 }); 37 | await insertChapter(db, chapterNames[1], { videoID: chapterNamesVid1, startTime: 70, endTime: 75 }); 38 | await insertChapter(db, chapterNames[2], { videoID: chapterNamesVid1, startTime: 71, endTime: 76 }); 39 | await insertChapter(db, chapterNames[3], { videoID: chapterNamesVid1, startTime: 72, endTime: 77 }); 40 | 41 | await insertVideoInfo(db, chapterNamesVid1, chapterChannelID); 42 | }); 43 | 44 | it("Search for 'weird' (2 results)", () => nameSearch("weird", chapterNames[0], 2)); 45 | it("Search for 'different' (1 result)", () => nameSearch("different", chapterNames[1], 1)); 46 | it("Search for 'something' (1 result)", () => nameSearch("something", chapterNames[2], 1)); 47 | it("Search for 'unrelated' (0 result)", () => nameSearch("unrelated", null, 0)); 48 | }); 49 | -------------------------------------------------------------------------------- /test/cases/getDaysSavedFormatted.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { client } from "../utils/httpClient"; 3 | import sinon from "sinon"; 4 | import { db } from "../../src/databases/databases"; 5 | 6 | const endpoint = "/api/getDaysSavedFormatted"; 7 | 8 | describe("getDaysSavedFormatted", () => { 9 | it("can get days saved", async () => { 10 | const result = await client({ url: endpoint }); 11 | assert.ok(result.data.daysSaved >= 0); 12 | }); 13 | 14 | it("returns 0 days saved if no segments", async () => { 15 | const stub = sinon.stub(db, "prepare").resolves(undefined); 16 | const result = await client({ url: endpoint }); 17 | assert.ok(result.data.daysSaved >= 0); 18 | stub.restore(); 19 | }); 20 | 21 | it("returns days saved to 2 fixed points", async () => { 22 | const stub = sinon.stub(db, "prepare").resolves({ daysSaved: 1.23456789 }); 23 | const result = await client({ url: endpoint }); 24 | assert.strictEqual(result.data.daysSaved, "1.23"); 25 | stub.restore(); 26 | }); 27 | }); -------------------------------------------------------------------------------- /test/cases/getHash.ts: -------------------------------------------------------------------------------- 1 | import { getHash } from "../../src/utils/getHash"; 2 | import { notStrictEqual, strictEqual } from "assert"; 3 | 4 | describe("getHash", () => { 5 | it("Should not output the input string", () => { 6 | notStrictEqual(getHash("test"), "test"); 7 | notStrictEqual(getHash("test", -1), "test"); 8 | notStrictEqual(getHash("test", 0), "test"); 9 | notStrictEqual(getHash("test", null), "test"); 10 | }); 11 | 12 | it("Should return a hashed value", () => { 13 | strictEqual(getHash("test"), "2f327ef967ade1ebf4319163f7debbda9cc17bb0c8c834b00b30ca1cf1c256ee"); 14 | }); 15 | 16 | it("Should be able to output the same has the DB upgrade script will output", () => { 17 | strictEqual(getHash("vid", 1), "1ff838dc6ca9680d88455341118157d59a055fe6d0e3870f9c002847bebe4663"); 18 | }); 19 | 20 | it("Should take a variable number of passes", () => { 21 | strictEqual(getHash("test", 1), "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"); 22 | strictEqual(getHash("test", 2), "7b3d979ca8330a94fa7e9e1b466d8b99e0bcdea1ec90596c0dcc8d7ef6b4300c"); 23 | strictEqual(getHash("test", 3), "5b24f7aa99f1e1da5698a4f91ae0f4b45651a1b625c61ed669dd25ff5b937972"); 24 | }); 25 | 26 | it("Should default to 5000 passes", () => { 27 | strictEqual(getHash("test"), getHash("test", 5000)); 28 | }); 29 | 30 | it("Should not take a negative number of passes", () => { 31 | strictEqual(getHash("test", -1), ""); 32 | }); 33 | }); 34 | -------------------------------------------------------------------------------- /test/cases/getHashCache.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../../src/config"; 2 | import { getHashCache } from "../../src/utils/getHashCache"; 3 | import { shaHashKey } from "../../src/utils/redisKeys"; 4 | import { getHash } from "../../src/utils/getHash"; 5 | import redis from "../../src/utils/redis"; 6 | import assert from "assert"; 7 | import { setTimeout } from "timers/promises"; 8 | import { genRandom } from "../utils/getRandom"; 9 | 10 | const rand1Hash = genRandom(24); 11 | const rand1Hash_Key = getHash(rand1Hash, 1); 12 | const rand1Hash_Result = getHash(rand1Hash); 13 | 14 | describe("getHashCache test", function() { 15 | before(function() { 16 | if (!config.redis?.enabled) this.skip(); 17 | }); 18 | it("Should set hashKey and be able to retreive", (done) => { 19 | const redisKey = shaHashKey(rand1Hash_Key); 20 | getHashCache(rand1Hash) 21 | .then(() => setTimeout(50)) // add timeout for redis to complete async 22 | .then(() => redis.get(redisKey)) 23 | .then(result => { 24 | assert.strictEqual(result, rand1Hash_Result); 25 | done(); 26 | }) 27 | .catch(err => done(err === undefined ? "no set value" : err)); 28 | }).timeout(5000); 29 | }); -------------------------------------------------------------------------------- /test/cases/getIsUserVIP.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import { genUsers, User } from "../utils/genUser"; 3 | import { client } from "../utils/httpClient"; 4 | import assert from "assert"; 5 | import { insertVip } from "../utils/queryGen"; 6 | 7 | const endpoint = "/api/isUserVIP"; 8 | const vipUserRequest = (userID: string) => client.get(endpoint, { params: { userID } }); 9 | const checkVipStatus = (user: User, expected: boolean) => 10 | vipUserRequest(user.privID) 11 | .then(res => { 12 | assert.strictEqual(res.status, 200); 13 | assert.strictEqual(res.data.vip, expected); 14 | }); 15 | 16 | const cases = [ 17 | "vip", 18 | "normal", 19 | ]; 20 | const users = genUsers("endpoint", cases); 21 | 22 | describe("getIsUserVIP", () => { 23 | before(async () => { 24 | await insertVip(db, users["vip"].pubID); 25 | }); 26 | 27 | // status checks 28 | it("Should be able to get a 200", () => 29 | vipUserRequest(users["vip"].privID) 30 | .then(res => assert.strictEqual(res.status, 200)) 31 | ); 32 | 33 | 34 | it("Should get a 400 if no userID", () => 35 | client.get(endpoint) 36 | .then(res => assert.strictEqual(res.status, 400, "response should be 400")) 37 | ); 38 | 39 | // user checks 40 | it("Should say a VIP is a VIP", () => checkVipStatus(users["vip"], true)); 41 | it("Should say a normal user is not a VIP", () => checkVipStatus(users["normal"], false)); 42 | }); 43 | -------------------------------------------------------------------------------- /test/cases/getSavedTimeForUser.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import { getHash } from "../../src/utils/getHash"; 3 | import { deepStrictEqual } from "assert"; 4 | import { client } from "../utils/httpClient"; 5 | import assert from "assert"; 6 | 7 | // helpers 8 | const endpoint = "/api/getSavedTimeForUser"; 9 | const getSavedTimeForUser = (userID: string) => client({ 10 | url: endpoint, 11 | params: { userID } 12 | }); 13 | 14 | describe("getSavedTimeForUser", () => { 15 | const user1 = "getSavedTimeForUser1"; 16 | const user2 = "getSavedTimeforUser2"; 17 | const [ start, end, views ] = [1, 11, 50]; 18 | 19 | before(async () => { 20 | const startOfQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", "views", "shadowHidden") VALUES'; 21 | await db.prepare("run", `${startOfQuery}(?, ?, ?, ?, ?, ?, ?, ?, ?)`, 22 | ["getSavedTimeForUser", start, end, 2, "getSavedTimeUUID0", getHash(user1), 0, views, 0]); 23 | return; 24 | }); 25 | it("Should be able to get a saved time", (done) => { 26 | getSavedTimeForUser(user1) 27 | .then(res => { 28 | // (end-start)*minute * views 29 | const savedMinutes = ((end-start)/60) * views; 30 | const expected = { 31 | timeSaved: savedMinutes 32 | }; 33 | deepStrictEqual(res.data, expected); 34 | done(); 35 | }) 36 | .catch((err) => done(err)); 37 | }); 38 | it("Should return 404 if no submissions", (done) => { 39 | getSavedTimeForUser(user2) 40 | .then(res => { 41 | assert.strictEqual(res.status, 404); 42 | done(); 43 | }) 44 | .catch((err) => done(err)); 45 | }); 46 | it("Should return 400 if no userID", (done) => { 47 | client({ url: endpoint }) 48 | .then(res => { 49 | assert.strictEqual(res.status, 400); 50 | done(); 51 | }) 52 | .catch((err) => done(err)); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /test/cases/getSearchSegments4xx.ts: -------------------------------------------------------------------------------- 1 | import { client } from "../utils/httpClient"; 2 | import assert from "assert"; 3 | 4 | describe("getSearchSegments 4xx", () => { 5 | const endpoint = "/api/searchSegments"; 6 | 7 | it("Should return 400 if no videoID", (done) => { 8 | client.get(endpoint, { params: {} }) 9 | .then(res => { 10 | assert.strictEqual(res.status, 400); 11 | const data = res.data; 12 | assert.strictEqual(data, "videoID not specified"); 13 | done(); 14 | }) 15 | .catch(err => done(err)); 16 | }); 17 | 18 | it("Should return 400 if invalid categories", (done) => { 19 | client.get(endpoint, { params: { videoID: "nullVideo", categories: 3 } }) 20 | .then(res => { 21 | assert.strictEqual(res.status, 400); 22 | const data = res.data; 23 | assert.strictEqual(data, "Categories parameter does not match format requirements."); 24 | done(); 25 | }) 26 | .catch(err => done(err)); 27 | }); 28 | 29 | it("Should return 400 if invalid actionTypes", (done) => { 30 | client.get(endpoint, { params: { videoID: "nullVideo", actionTypes: 3 } }) 31 | .then(res => { 32 | assert.strictEqual(res.status, 400); 33 | const data = res.data; 34 | assert.strictEqual(data, "actionTypes parameter does not match format requirements."); 35 | done(); 36 | }) 37 | .catch(err => done(err)); 38 | }); 39 | 40 | it("Should return 404 if no segments", (done) => { 41 | client.get(endpoint, { params: { videoID: "nullVideo", actionType: "chapter" } }) 42 | .then(res => { 43 | assert.strictEqual(res.status, 404); 44 | done(); 45 | }) 46 | .catch(err => done(err)); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /test/cases/getService.ts: -------------------------------------------------------------------------------- 1 | import { getService } from "../../src/utils/getService"; 2 | import { Service } from "../../src/types/segments.model"; 3 | import assert from "assert"; 4 | 5 | describe("getService", () => { 6 | it("Should return youtube if not match", () => { 7 | assert.strictEqual(getService(), Service.YouTube); 8 | assert.strictEqual(getService(""), Service.YouTube); 9 | assert.strictEqual(getService("test", "not exist"), Service.YouTube); 10 | assert.strictEqual(getService(null, null), Service.YouTube); 11 | assert.strictEqual(getService(undefined, undefined), Service.YouTube); 12 | assert.strictEqual(getService(undefined), Service.YouTube); 13 | }); 14 | 15 | it("Should return Youtube", () => { 16 | assert.strictEqual(getService("youtube"), Service.YouTube); 17 | assert.strictEqual(getService(" Youtube "), Service.YouTube); 18 | assert.strictEqual(getService(" YouTube "), Service.YouTube); 19 | assert.strictEqual(getService(undefined, " YouTube "), Service.YouTube); 20 | }); 21 | 22 | it("Should return PeerTube", () => { 23 | assert.strictEqual(getService("PeerTube"), Service.PeerTube); 24 | assert.strictEqual(getService(" PeerTube "), Service.PeerTube); 25 | assert.strictEqual(getService(" peertube "), Service.PeerTube); 26 | assert.strictEqual(getService(undefined, " PeerTube "), Service.PeerTube); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /test/cases/getSubmissionUUID.ts: -------------------------------------------------------------------------------- 1 | import { getSubmissionUUID } from "../../src/utils/getSubmissionUUID"; 2 | import assert from "assert"; 3 | import { ActionType, VideoID, Service, Category } from "../../src/types/segments.model"; 4 | import { HashedUserID } from "../../src/types/user.model"; 5 | import { getHash } from "../../src/utils/getHash"; 6 | import { HashedValue } from "../../src/types/hash.model"; 7 | import { genAnonUser } from "../utils/genUser"; 8 | import { genRandomValue } from "../utils/getRandom"; 9 | 10 | function testHash (segment: segment, version: number): HashedValue { 11 | const manualHash = getHash(Object.values(segment).join(""), 1) as HashedValue; 12 | const generatedHash = getSubmissionUUID(segment.videoID, segment.category, segment.actionType, segment.description, segment.userID, segment.startTime, segment.endTime, segment.service); 13 | assert.strictEqual(version, Number(generatedHash.at(-1)), "version should match passed in version"); 14 | assert.strictEqual(`${manualHash}${version}`, generatedHash); 15 | return generatedHash; 16 | } 17 | 18 | interface segment { 19 | videoID: VideoID, 20 | startTime: number, 21 | endTime: number, 22 | userID: HashedUserID, 23 | description: string, 24 | category: Category, 25 | actionType: ActionType, 26 | service: Service 27 | } 28 | 29 | const version = 7; 30 | 31 | describe("getSubmissionUUID", () => { 32 | it("Should return the hashed value identical to manually generated value", () => { 33 | const segment: segment = { 34 | videoID: "video001" as VideoID, 35 | startTime: 13.33337, 36 | endTime: 42.000001, 37 | userID: "testuser001" as HashedUserID, 38 | description: "", 39 | category: "sponsor" as Category, 40 | actionType: "skip" as ActionType, 41 | service: Service.YouTube 42 | }; 43 | const testedHash = testHash(segment, version); 44 | // test against baked hash 45 | assert.strictEqual(testedHash, "2a473bca993dd84d8c2f6a4785989b20948dfe0c12c00f6f143bbda9ed561dca7"); 46 | }); 47 | it ("Should return identical hash for randomly generated values", () => { 48 | const user = genAnonUser(); 49 | const segment: segment = { 50 | videoID: genRandomValue("video", "getUUID") as VideoID, 51 | startTime: Math.random()*1000, 52 | endTime: Math.random()*500, 53 | userID: user.pubID, 54 | description: genRandomValue("description", "getUUID"), 55 | category: "sponsor" as Category, 56 | actionType: "skip" as ActionType, 57 | service: Service.YouTube 58 | }; 59 | testHash(segment, version); 60 | }); 61 | }); 62 | -------------------------------------------------------------------------------- /test/cases/getTotalStats.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { client } from "../utils/httpClient"; 3 | 4 | const endpoint = "/api/getTotalStats"; 5 | 6 | describe("getTotalStats", () => { 7 | it("Can get total stats", async () => { 8 | const result = await client({ url: endpoint }); 9 | const data = result.data; 10 | assert.strictEqual(data.userCount, 0, "User count should default false"); 11 | assert.ok(data.activeUsers >= 0); 12 | assert.ok(data.apiUsers >= 0); 13 | assert.ok(data.viewCount >= 0); 14 | assert.ok(data.totalSubmissions >= 0); 15 | assert.ok(data.minutesSaved >= 0); 16 | }); 17 | 18 | it("Can get total stats without contributing users", async () => { 19 | const result = await client({ url: `${endpoint}?countContributingUsers=false` }); 20 | const data = result.data; 21 | assert.strictEqual(data.userCount, 0); 22 | assert.ok(data.activeUsers >= 0); 23 | assert.ok(data.apiUsers >= 0); 24 | assert.ok(data.viewCount >= 0); 25 | assert.ok(data.totalSubmissions >= 0); 26 | assert.ok(data.minutesSaved >= 0); 27 | }); 28 | 29 | it("Can get total stats with contributing users", async () => { 30 | const result = await client({ url: `${endpoint}?countContributingUsers=true` }); 31 | const data = result.data; 32 | assert.ok(data.userCount >= 0); 33 | assert.ok(data.activeUsers >= 0); 34 | assert.ok(data.apiUsers >= 0); 35 | assert.ok(data.viewCount >= 0); 36 | assert.ok(data.totalSubmissions >= 0); 37 | assert.ok(data.minutesSaved >= 0); 38 | }); 39 | }); -------------------------------------------------------------------------------- /test/cases/getUsername.ts: -------------------------------------------------------------------------------- 1 | import { getHash } from "../../src/utils/getHash"; 2 | import { client } from "../utils/httpClient"; 3 | import assert from "assert"; 4 | import { insertSegment } from "../utils/segmentQueryGen"; 5 | import { db } from "../../src/databases/databases"; 6 | import { HashedUserID } from "../../src/types/user.model"; 7 | 8 | // helpers 9 | const getUsername = (userID: string) => client({ 10 | url: "/api/getUsername", 11 | params: { userID } 12 | }); 13 | 14 | const postSetUserName = (userID: string, username: string) => client({ 15 | method: "POST", 16 | url: "/api/setUsername", 17 | params: { 18 | userID, 19 | username, 20 | } 21 | }); 22 | 23 | const userOnePrivate = "getUsername_0"; 24 | const userOnePublic = getHash(userOnePrivate); 25 | const userOneUsername = "getUsername_username"; 26 | 27 | describe("getUsername test", function() { 28 | before(async () => { 29 | await insertSegment(db, { userID: userOnePublic as HashedUserID }); 30 | }); 31 | 32 | it("Should get back publicUserID if not set", (done) => { 33 | getUsername(userOnePrivate) 34 | .then(result => { 35 | assert.strictEqual(result.data.userName, userOnePublic); 36 | done(); 37 | }) 38 | .catch(err => done(err)); 39 | }); 40 | it("Should be able to get username after setting", (done) => { 41 | postSetUserName(userOnePrivate, userOneUsername) 42 | .then(async () => { 43 | const result = await getUsername(userOnePrivate); 44 | const actual = result.data.userName; 45 | assert.strictEqual(actual, userOneUsername); 46 | done(); 47 | }) 48 | .catch(err => done(err)); 49 | }); 50 | it("Should return 400 if no userID provided", (done) => { 51 | client({ 52 | url: "/api/getUsername" 53 | }) 54 | .then(res => { 55 | assert.strictEqual(res.status, 400); 56 | done(); 57 | }) 58 | .catch(err => done(err)); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /test/cases/getViewsForUser.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import { client } from "../utils/httpClient"; 3 | import assert from "assert"; 4 | import { genUsers, User } from "../utils/genUser"; 5 | import { insertSegment } from "../utils/segmentQueryGen"; 6 | 7 | // helpers 8 | const endpoint = "/api/getViewsForUser"; 9 | const getViewsForUser = (userID: string) => client({ 10 | url: endpoint, 11 | params: { userID } 12 | }); 13 | 14 | const cases = [ 15 | "u-1", 16 | "u-2", 17 | "u-3" 18 | ]; 19 | const users = genUsers("getViewUser", cases); 20 | 21 | // set views for users 22 | users["u-1"].info["views1"] = 30; 23 | users["u-1"].info["views2"] = 0; 24 | users["u-1"].info["views"] = users["u-1"].info.views1 + users["u-1"].info.views2; 25 | users["u-2"].info["views"] = 0; 26 | users["u-3"].info["views"] = 0; 27 | 28 | const checkUserViews = (user: User) => 29 | getViewsForUser(user.privID) 30 | .then(result => { 31 | assert.strictEqual(result.status, 200); 32 | assert.strictEqual(result.data.viewCount, user.info.views); 33 | }); 34 | 35 | describe("getViewsForUser", function() { 36 | before(() => { 37 | // add views for users 38 | insertSegment(db, { userID: users["u-1"].pubID, views: users["u-1"].info.views1 }); 39 | insertSegment(db, { userID: users["u-1"].pubID, views: users["u-1"].info.views2 }); 40 | insertSegment(db, { userID: users["u-2"].pubID, views: users["u-2"].info.views }); 41 | }); 42 | it("Should get back views for user one", () => 43 | checkUserViews(users["u-1"]) 44 | ); 45 | it("Should get back views for user two", () => 46 | checkUserViews(users["u-2"]) 47 | ); 48 | it("Should get 404 if no submissions", () => 49 | getViewsForUser(users["u-3"].pubID) 50 | .then(result => assert.strictEqual(result.status, 404)) 51 | ); 52 | it("Should return 400 if no userID provided", () => 53 | client({ url: endpoint }) 54 | .then(res => assert.strictEqual(res.status, 400)) 55 | ); 56 | }); -------------------------------------------------------------------------------- /test/cases/highLoad.ts: -------------------------------------------------------------------------------- 1 | import sinon from "sinon"; 2 | import { db } from "../../src/databases/databases"; 3 | import assert from "assert"; 4 | import { client } from "../utils/httpClient"; 5 | client.defaults.validateStatus = (status) => status < 600; 6 | 7 | describe("High load test", () => { 8 | before(() => { 9 | sinon.stub(db, "highLoad").returns(true); 10 | }); 11 | 12 | after(() => { 13 | sinon.restore(); 14 | }); 15 | 16 | it("Should return 503 on getTopUsers", () => 17 | client.get("/api/getTopUsers?sortType=0") 18 | .then(res => assert.strictEqual(res.status, 503)) 19 | ); 20 | 21 | it("Should return 503 on getTopCategoryUsers", () => 22 | client.get("/api/getTopCategoryUsers?sortType=0&category=sponsor") 23 | .then(res => assert.strictEqual(res.status, 503)) 24 | ); 25 | 26 | it("Should return 0 on getTotalStats", () => 27 | client.get("/api/getTotalStats") 28 | .then(res => assert.strictEqual(res.status, 200)) 29 | ); 30 | }); 31 | -------------------------------------------------------------------------------- /test/cases/innerTubeApi.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../../src/config"; 2 | import assert from "assert"; 3 | import { YouTubeAPI } from "../../src/utils/youtubeApi"; 4 | import * as innerTube from "../../src/utils/innerTubeAPI"; 5 | import { partialDeepEquals } from "../utils/partialDeepEquals"; 6 | import { getVideoDetails } from "../../src/utils/getVideoDetails"; 7 | 8 | const videoID = "BaW_jenozKc"; 9 | const expectedInnerTube = { // partial type of innerTubeVideoDetails 10 | videoId: videoID, 11 | title: "youtube-dl test video \"'/\\ä↭𝕐", 12 | lengthSeconds: "10", 13 | channelId: "UCLqxVugv74EIW3VWh2NOa3Q", 14 | isOwnerViewing: false, 15 | isCrawlable: true, 16 | allowRatings: true, 17 | author: "Philipp Hagemeister", 18 | isPrivate: false, 19 | isUnpluggedCorpus: false, 20 | isLiveContent: false 21 | }; 22 | const currentViews = 49816; 23 | 24 | xdescribe("innertube API test", function() { 25 | it("should be able to get innerTube details", async () => { 26 | const result = await innerTube.getPlayerData(videoID, true); 27 | assert.ok(partialDeepEquals(result, expectedInnerTube)); 28 | }); 29 | it("Should have more views than current", async () => { 30 | const result = await innerTube.getPlayerData(videoID, true); 31 | assert.ok(Number(result.viewCount) >= currentViews); 32 | }); 33 | it("Should have equivalent response from NewLeaf", async function () { 34 | if (!config.newLeafURLs || config.newLeafURLs.length <= 0 || config.newLeafURLs[0] == "placeholder") this.skip(); 35 | const itResponse = await innerTube.getPlayerData(videoID, true); 36 | const newLeafResponse = await YouTubeAPI.listVideos(videoID, true); 37 | // validate videoID 38 | assert.strictEqual(itResponse.videoId, videoID); 39 | assert.strictEqual(newLeafResponse.data?.videoId, videoID); 40 | // validate description 41 | assert.strictEqual(itResponse.shortDescription, newLeafResponse.data?.description); 42 | // validate authorId 43 | assert.strictEqual(itResponse.channelId, newLeafResponse.data?.authorId); 44 | }); 45 | it("Should return data from generic endpoint", async function () { 46 | const videoDetail = await getVideoDetails(videoID); 47 | assert.ok(videoDetail); 48 | }); 49 | it("Should not fail when getting data for private video", async function () { 50 | const privateVideoId = "ZuibAax0VD8"; 51 | const videoDetail = await getVideoDetails(privateVideoId); 52 | assert.ok(videoDetail); 53 | }); 54 | }); -------------------------------------------------------------------------------- /test/cases/oldSubmitSponsorTimes.ts: -------------------------------------------------------------------------------- 1 | import { partialDeepEquals } from "../utils/partialDeepEquals";import { db } from "../../src/databases/databases"; 2 | import assert from "assert"; 3 | import { client } from "../utils/httpClient"; 4 | 5 | const videoID1 = "dQw4w9WgXcQ"; 6 | const videoID2 = "dQw4w9WgXcE"; 7 | const userID = "testtesttesttesttesttesttesttesttest"; 8 | const endpoint = "/api/postVideoSponsorTimes"; 9 | 10 | describe("postVideoSponsorTime (Old submission method)", () => { 11 | it("Should be able to submit a time (GET)", (done) => { 12 | client.get(endpoint, { params: { videoID: videoID1, startTime: 1, endTime: 10, userID } }) 13 | .then(async res => { 14 | assert.strictEqual(res.status, 200); 15 | const row = await db.prepare("get", `SELECT "startTime", "endTime", "category" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID1]); 16 | const expected = { 17 | startTime: 1, 18 | endTime: 10, 19 | category: "sponsor" 20 | }; 21 | assert.ok(partialDeepEquals(row, expected)); 22 | done(); 23 | }) 24 | .catch(err => done(err)); 25 | }); 26 | 27 | it("Should be able to submit a time (POST)", (done) => { 28 | client({ 29 | url: endpoint, 30 | params: { videoID: videoID2, startTime: 1, endTime: 11, userID }, 31 | method: "post", 32 | }) 33 | .then(async res => { 34 | assert.strictEqual(res.status, 200); 35 | const row = await db.prepare("get", `SELECT "startTime", "endTime", "category" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID2]); 36 | const expected = { 37 | startTime: 1, 38 | endTime: 11, 39 | category: "sponsor" 40 | }; 41 | assert.ok(partialDeepEquals(row, expected)); 42 | done(); 43 | }) 44 | .catch(err => done(err)); 45 | }); 46 | 47 | it("Should return 400 for missing video", (done) => { 48 | client.get(endpoint, { params: { startTime: 1, endTime: 10, userID } }) 49 | .then(res => { 50 | assert.strictEqual(res.status, 400); 51 | done(); 52 | }) 53 | .catch(err => done(err)); 54 | }); 55 | 56 | it("Should return 400 for missing userID", (done) => { 57 | client.get(endpoint, { params: { videoID: videoID1, startTime: 1, endTime: 10 } }) 58 | .then(res => { 59 | assert.strictEqual(res.status, 400); 60 | done(); 61 | }) 62 | .catch(err => done(err)); 63 | }); 64 | }); 65 | -------------------------------------------------------------------------------- /test/cases/postClearCache.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import assert from "assert"; 3 | import { client } from "../utils/httpClient"; 4 | import { genUsers, User } from "../utils/genUser"; 5 | import { insertSegment, insertVip } from "../utils/queryGen"; 6 | 7 | const endpoint = "/api/clearCache"; 8 | const postClearCache = (user: User, videoID: string) => client({ method: "post", url: endpoint, params: { userID: user.privID, videoID } }); 9 | 10 | const cases = [ 11 | "vip", 12 | "normal", 13 | ]; 14 | const users = genUsers("postClearCache", cases); 15 | 16 | describe("postClearCache", () => { 17 | before(async () => { 18 | await insertVip(db, users["vip"].pubID); 19 | await insertSegment(db, { videoID: "clear-test" }); 20 | }); 21 | 22 | it("Should be able to clear cache for existing video", () => 23 | postClearCache(users["vip"], "clear-test") 24 | .then(res => assert.strictEqual(res.status, 200)) 25 | ); 26 | 27 | it("Should be able to clear cache for nonexistent video", () => 28 | postClearCache(users["vip"], "dne-video") 29 | .then(res => assert.strictEqual(res.status, 200)) 30 | ); 31 | 32 | it("Should get 403 as non-vip", () => 33 | postClearCache(users["normal"], "clear-test") 34 | .then(res => assert.strictEqual(res.status, 403)) 35 | ); 36 | 37 | it("Should give 400 with missing videoID", () => 38 | client.post(endpoint, { params: { userID: users["vip"].privID } }) 39 | .then(res => assert.strictEqual(res.status, 400)) 40 | ); 41 | 42 | it("Should give 400 with missing userID", () => 43 | client.post(endpoint, { params: { videoID: "clear-test" } }) 44 | .then(res => assert.strictEqual(res.status, 400)) 45 | ); 46 | }); 47 | -------------------------------------------------------------------------------- /test/cases/postSkipSegments400Stub.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { postSkipSegmentParam } from "./postSkipSegments"; 3 | import { config } from "../../src/config"; 4 | import sinon from "sinon"; 5 | 6 | const videoID = "postSkipSegments-404-video"; 7 | 8 | describe("postSkipSegments 400 - stubbed config", () => { 9 | const USERID_LIMIT = 30; 10 | before(() => { 11 | sinon.stub(config, "minUserIDLength").value(USERID_LIMIT); 12 | }); 13 | after(() => { 14 | sinon.restore(); 15 | }); 16 | 17 | it("Should return 400 if userID is too short", (done) => { 18 | const userID = "a".repeat(USERID_LIMIT - 10); 19 | postSkipSegmentParam({ 20 | videoID, 21 | startTime: 1, 22 | endTime: 5, 23 | category: "sponsor", 24 | userID 25 | }) 26 | .then(res => { 27 | assert.strictEqual(res.status, 400); 28 | done(); 29 | }) 30 | .catch(err => done(err)); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/cases/postSkipSegmentsLocked.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { postSkipSegmentJSON } from "./postSkipSegments"; 3 | import { getHash } from "../../src/utils/getHash"; 4 | import { db } from "../../src/databases/databases"; 5 | 6 | describe("postSkipSegments - LockedVideos", () => { 7 | const userIDOne = "postSkip-DurationUserOne"; 8 | const VIPLockUser = "VIPUser-lockCategories"; 9 | const videoID = "lockedVideo"; 10 | const userID = userIDOne; 11 | 12 | before(() => { 13 | const insertLockCategoriesQuery = `INSERT INTO "lockCategories" ("userID", "videoID", "category", "reason") VALUES(?, ?, ?, ?)`; 14 | db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "sponsor", "Custom Reason"]); 15 | db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "intro", ""]); 16 | }); 17 | 18 | it("Should return 403 and custom reason for submiting in lockedCategory", (done) => { 19 | postSkipSegmentJSON({ 20 | userID, 21 | videoID, 22 | segments: [{ 23 | segment: [1, 10], 24 | category: "sponsor", 25 | }], 26 | }) 27 | .then(res => { 28 | assert.strictEqual(res.status, 403); 29 | assert.match(res.data, /Reason: /); 30 | assert.match(res.data, /Custom Reason/); 31 | done(); 32 | }) 33 | .catch(err => done(err)); 34 | }); 35 | 36 | it("Should return not be 403 when submitting with locked category but unlocked actionType", (done) => { 37 | postSkipSegmentJSON({ 38 | userID, 39 | videoID, 40 | segments: [{ 41 | segment: [1, 10], 42 | category: "sponsor", 43 | actionType: "mute" 44 | }], 45 | }) 46 | .then(res => { 47 | assert.strictEqual(res.status, 200); 48 | done(); 49 | }) 50 | .catch(err => done(err)); 51 | }); 52 | 53 | it("Should return 403 for submiting in lockedCategory", (done) => { 54 | postSkipSegmentJSON({ 55 | userID, 56 | videoID, 57 | segments: [{ 58 | segment: [1, 10], 59 | category: "intro", 60 | }], 61 | }) 62 | .then(res => { 63 | assert.strictEqual(res.status, 403); 64 | assert.doesNotMatch(res.data, /Lock reason: /); 65 | assert.doesNotMatch(res.data, /Custom Reason/); 66 | done(); 67 | }) 68 | .catch(err => done(err)); 69 | }); 70 | }); -------------------------------------------------------------------------------- /test/cases/postSkipSegmentsShadowban.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { postSkipSegmentParam } from "./postSkipSegments"; 3 | import { getHash } from "../../src/utils/getHash"; 4 | import { db } from "../../src/databases/databases"; 5 | import { ImportMock } from "ts-mock-imports"; 6 | import * as YouTubeAPIModule from "../../src/utils/youtubeApi"; 7 | import { YouTubeApiMock } from "../mocks/youtubeMock"; 8 | 9 | const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, "YouTubeAPI"); 10 | const sinonStub = mockManager.mock("listVideos"); 11 | sinonStub.callsFake(YouTubeApiMock.listVideos); 12 | 13 | describe("postSkipSegments - shadowban", () => { 14 | const banUser01 = "postSkip-banUser01"; 15 | const banUser01Hash = getHash(banUser01); 16 | 17 | const shadowBanVideoID1 = "postSkipBan1"; 18 | const shadowBanVideoID2 = "postSkipBan2"; 19 | 20 | const queryDatabaseShadowhidden = (videoID: string) => db.prepare("get", `SELECT "startTime", "endTime", "shadowHidden", "userID" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID]); 21 | 22 | before(() => { 23 | db.prepare("run", `INSERT INTO "shadowBannedUsers" ("userID") VALUES(?)`, [banUser01Hash]); 24 | }); 25 | 26 | it("Should automatically shadowban segments if user is banned", (done) => { 27 | const videoID = shadowBanVideoID1; 28 | postSkipSegmentParam({ 29 | videoID, 30 | startTime: 0, 31 | endTime: 10, 32 | category: "sponsor", 33 | userID: banUser01 34 | }) 35 | .then(async res => { 36 | assert.strictEqual(res.status, 200); 37 | const row = await queryDatabaseShadowhidden(videoID); 38 | const expected = { 39 | startTime: 0, 40 | endTime: 10, 41 | shadowHidden: 1, 42 | userID: banUser01Hash 43 | }; 44 | assert.deepStrictEqual(row, expected); 45 | done(); 46 | }) 47 | .catch(err => done(err)); 48 | }); 49 | 50 | it("Should not add full segments to database if user if shadowbanned", (done) => { 51 | const videoID = shadowBanVideoID2; 52 | postSkipSegmentParam({ 53 | videoID, 54 | startTime: 0, 55 | endTime: 0, 56 | category: "sponsor", 57 | actionType: "full", 58 | userID: banUser01 59 | }) 60 | .then(async res => { 61 | assert.strictEqual(res.status, 200); 62 | const row = await queryDatabaseShadowhidden(videoID); 63 | assert.strictEqual(row, undefined); 64 | done(); 65 | }) 66 | .catch(err => done(err)); 67 | }); 68 | }); -------------------------------------------------------------------------------- /test/cases/redisTest.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../../src/config"; 2 | import redis from "../../src/utils/redis"; 3 | import assert from "assert"; 4 | import { genRandom } from "../utils/getRandom"; 5 | 6 | const randKey1 = genRandom(); 7 | const randValue1 = genRandom(); 8 | const randKey2 = genRandom(16); 9 | const randKey3 = genRandom(); 10 | const randValue3 = genRandom(); 11 | 12 | const redisGetCheck = (key: string, expected: string | null, done: Mocha.Done): Promise => 13 | redis.get(key) 14 | .then(res => { 15 | assert.strictEqual(res, expected); 16 | done(); 17 | }).catch(err => done(err)); 18 | 19 | describe("redis test", function() { 20 | before(async function() { 21 | if (!config.redis?.enabled) this.skip(); 22 | await redis.set(randKey1, randValue1); 23 | }); 24 | it("Should get stored value", (done) => { 25 | redisGetCheck(randKey1, randValue1, done); 26 | }); 27 | it("Should not be able to get not stored value", (done) => { 28 | redis.get(randKey2) 29 | .then(res => { 30 | if (res) done("Value should not be found"); 31 | done(); 32 | }).catch(err => done(err)); 33 | }); 34 | it("Should be able to delete stored value", (done) => { 35 | redis.del(randKey1) 36 | .catch(err => done(err)) 37 | .then(() => redisGetCheck(randKey1, null, done)); 38 | }); 39 | it("Should be able to set expiring value", (done) => { 40 | redis.setEx(randKey3, 8400, randValue3) 41 | .catch(err => done(err)) 42 | .then(() => redisGetCheck(randKey3, randValue3, done)); 43 | }); 44 | it("Should continue when undefined value is fetched", (done) => { 45 | const undefkey = `undefined.${genRandom()}`; 46 | redis.get(undefkey) 47 | .then(result => { 48 | assert.ok(!result); // result should be falsy 49 | done(); 50 | }); 51 | }); 52 | }); -------------------------------------------------------------------------------- /test/cases/shadowBanUser4xx.ts: -------------------------------------------------------------------------------- 1 | import { db } from "../../src/databases/databases"; 2 | import { getHash } from "../../src/utils/getHash"; 3 | import assert from "assert"; 4 | import { client } from "../utils/httpClient"; 5 | 6 | const endpoint = "/api/shadowBanUser"; 7 | 8 | const postShadowBan = (params: Record) => client({ 9 | method: "POST", 10 | url: endpoint, 11 | params 12 | }); 13 | 14 | describe("shadowBanUser 4xx", () => { 15 | const VIPuserID = "shadow-ban-4xx-vip"; 16 | 17 | before(async () => { 18 | await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES(?)`, [getHash(VIPuserID)]); 19 | }); 20 | 21 | it("Should return 400 if no adminUserID", (done) => { 22 | const userID = "shadowBanned"; 23 | postShadowBan({ userID }) 24 | .then(res => { 25 | assert.strictEqual(res.status, 400); 26 | done(); 27 | }) 28 | .catch(err => done(err)); 29 | }); 30 | 31 | it("Should return 400 if no userID", (done) => { 32 | postShadowBan({ adminUserID: VIPuserID }) 33 | .then(res => { 34 | assert.strictEqual(res.status, 400); 35 | done(); 36 | }) 37 | .catch(err => done(err)); 38 | }); 39 | 40 | it("Should return 403 if not authorized", (done) => { 41 | postShadowBan({ adminUserID: "notVIPUserID", userID: "shadowBanned" }) 42 | .then(res => { 43 | assert.strictEqual(res.status, 403); 44 | done(); 45 | }) 46 | .catch(err => done(err)); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /test/cases/userAgentTest.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { parseUserAgent } from "../../src/utils/userAgent"; 3 | 4 | describe("userAgent", () => { 5 | it ("Works for Vanced package", () => { 6 | assert.strictEqual("Vanced/1521081792", parseUserAgent("com.vanced.android.youtube/1521081792 (Linux; U; Android 10)")); 7 | }); 8 | 9 | it ("Works for Android package (root)", () => { 10 | assert.strictEqual("Vanced/1521081792", parseUserAgent("com.google.android.youtube/1521081792 (Linux; U; Android 10)")); 11 | }); 12 | 13 | it ("Works MPV", () => { 14 | assert.strictEqual("mpv_sponsorblock/1.0 (https://github.com/po5/mpv_sponsorblock)", parseUserAgent("mpv_sponsorblock/1.0 (https://github.com/po5/mpv_sponsorblock)")); 15 | }); 16 | 17 | it ("Blank for anything else", () => { 18 | assert.strictEqual("", parseUserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36")); 19 | }); 20 | }); -------------------------------------------------------------------------------- /test/cases/userCounter.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import assert from "assert"; 3 | import { config } from "../../src/config"; 4 | import { getHash } from "../../src/utils/getHash"; 5 | import { client } from "../utils/httpClient"; 6 | 7 | describe("userCounter", () => { 8 | it("Should return 200", function () { 9 | if (!config.userCounterURL) this.skip(); // skip if no userCounterURL is set 10 | return axios.request({ 11 | method: "POST", 12 | baseURL: config.userCounterURL, 13 | url: "/api/v1/addIP", 14 | params: { 15 | hashedIP: getHash("127.0.0.1",1) 16 | } 17 | }).then(res => assert.strictEqual(res.status, 200)); 18 | }); 19 | it("Should not incremeent counter on OPTIONS", function () { 20 | /* cannot spy test */ 21 | if (!config.userCounterURL) this.skip(); // skip if no userCounterURL is set 22 | //const spy = sinon.spy(UserCounter); 23 | return client({ method: "OPTIONS", url: "/api/status" }) 24 | .then(() => client({ method: "GET", url: "/api/status" })); 25 | //assert.strictEqual(spy.callCount, 1); 26 | }); 27 | }); -------------------------------------------------------------------------------- /test/mocks.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import { config } from "../src/config"; 3 | import { Server } from "http"; 4 | import { UserCounter } from "./mocks/UserCounter"; 5 | 6 | const app = express(); 7 | 8 | app.post("/webhook/ReportChannel", (req, res) => { 9 | res.sendStatus(200); 10 | }); 11 | 12 | app.post("/webhook/FirstTimeSubmissions", (req, res) => { 13 | res.sendStatus(200); 14 | }); 15 | 16 | app.post("/webhook/WarningWebhook", (req, res) => { 17 | res.sendStatus(200); 18 | }); 19 | 20 | app.post("/webhook/CompletelyIncorrectReport", (req, res) => { 21 | res.sendStatus(200); 22 | }); 23 | 24 | // Testing NeuralBlock 25 | app.post("/webhook/NeuralBlockReject", (req, res) => { 26 | res.sendStatus(200); 27 | }); 28 | 29 | app.get("/NeuralBlock/api/checkSponsorSegments", (req, res) => { 30 | if (req.query.vid === "LevkAjUE6d4") { 31 | res.json({ 32 | probabilities: [0.69], 33 | }); 34 | return; 35 | } 36 | res.sendStatus(500); 37 | }); 38 | 39 | //getSponsorSegments is no longer being used for automod 40 | app.get("/NeuralBlock/api/getSponsorSegments", (req, res) => { 41 | if (req.query.vid === "LevkAjUE6d4") { 42 | res.json({ 43 | sponsorSegments: [[0.47, 7.549], [264.023, 317.293]], 44 | }); 45 | return; 46 | } 47 | res.sendStatus(500); 48 | }); 49 | 50 | // Testing webhooks 51 | app.post("/CustomWebhook", (req, res) => { 52 | res.sendStatus(200); 53 | }); 54 | 55 | // mocks 56 | app.use("/UserCounter", UserCounter); 57 | 58 | export function createMockServer(callback: () => void): Server { 59 | return app.listen(config.mockPort, callback); 60 | } 61 | -------------------------------------------------------------------------------- /test/mocks/UserCounter.ts: -------------------------------------------------------------------------------- 1 | import { Router } from "express"; 2 | export const UserCounter = Router(); 3 | 4 | UserCounter.post("/api/v1/addIP", (req, res) => { 5 | res.sendStatus(200); 6 | }); 7 | UserCounter.get("/api/v1/userCount", (req, res) => { 8 | res.send({ 9 | userCount: 100 10 | }); 11 | }); -------------------------------------------------------------------------------- /test/mocks/gumroadMock.ts: -------------------------------------------------------------------------------- 1 | export const licenseSuccess = { 2 | success: true, 3 | uses: 4, 4 | purchase: {} 5 | }; 6 | 7 | export const licenseFail = { 8 | success: false, 9 | message: "That license does not exist for the provided product." 10 | }; 11 | 12 | 13 | const subCode = (length = 8) => { 14 | const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; 15 | let result = ""; 16 | for (let i = 0; i < length; i++) { 17 | result += characters[(Math.floor(Math.random() * characters.length))]; 18 | } 19 | return result; 20 | }; 21 | 22 | export const generateLicense = (): string => `${subCode()}-${subCode()}-${subCode()}-${subCode()}`; 23 | -------------------------------------------------------------------------------- /test/mocks/mockExpressRequest.ts: -------------------------------------------------------------------------------- 1 | const nullStub = (): any => null; 2 | 3 | export const createRequest = (options: any) => ({ 4 | app: {}, 5 | baseUrl: "", 6 | body: {}, 7 | cookies: {}, 8 | fresh: true, 9 | headers: {}, 10 | hostname: "example.com", 11 | ip: "", 12 | ips: [], 13 | method: "GET", 14 | originalUrl: "/", 15 | params: {}, 16 | path: "/", 17 | protocol: "https", 18 | query: {}, 19 | route: {}, 20 | secure: true, 21 | signedCookies: {}, 22 | stale: false, 23 | subdomains: [], 24 | xhr: true, 25 | accepts: nullStub(), 26 | acceptsCharsets: nullStub(), 27 | acceptsEncodings: nullStub(), 28 | acceptsLanguages: nullStub(), 29 | get: nullStub(), 30 | is: nullStub(), 31 | range: nullStub(), 32 | ...options 33 | }); 34 | -------------------------------------------------------------------------------- /test/mocks/patreonMock.ts: -------------------------------------------------------------------------------- 1 | export const activeIdentity = { 2 | data: {}, 3 | links: {}, 4 | included: [ 5 | { 6 | attributes: { 7 | is_monthly: true, 8 | currently_entitled_amount_cents: 100, 9 | patron_status: "active_patron", 10 | }, 11 | id: "id", 12 | type: "campaign" 13 | } 14 | ], 15 | }; 16 | 17 | export const invalidIdentity = { 18 | data: {}, 19 | links: {}, 20 | included: [{}], 21 | }; 22 | 23 | export const formerIdentitySucceed = { 24 | data: {}, 25 | links: {}, 26 | included: [ 27 | { 28 | attributes: { 29 | is_monthly: true, 30 | campaign_lifetime_support_cents: 500, 31 | patron_status: "former_patron", 32 | }, 33 | id: "id", 34 | type: "campaign" 35 | } 36 | ], 37 | }; 38 | 39 | export const formerIdentityFail = { 40 | data: {}, 41 | links: {}, 42 | included: [ 43 | { 44 | attributes: { 45 | is_monthly: true, 46 | campaign_lifetime_support_cents: 1, 47 | patron_status: "former_patron", 48 | }, 49 | id: "id", 50 | type: "campaign" 51 | } 52 | ], 53 | }; 54 | 55 | export const fakeOauth = { 56 | access_token: "test_access_token", 57 | refresh_token: "test_refresh_token", 58 | expires_in: 3600, 59 | }; -------------------------------------------------------------------------------- /test/test.ts: -------------------------------------------------------------------------------- 1 | import Mocha from "mocha"; 2 | import fs from "fs"; 3 | import path from "path"; 4 | import { config } from "../src/config"; 5 | import { createServer } from "../src/app"; 6 | import { createMockServer } from "./mocks"; 7 | import { Logger } from "../src/utils/logger"; 8 | import { initDb } from "../src/databases/databases"; 9 | import { ImportMock } from "ts-mock-imports"; 10 | import * as rateLimitMiddlewareModule from "../src/middleware/requestRateLimit"; 11 | import rateLimit from "express-rate-limit"; 12 | import redis from "../src/utils/redis"; 13 | import { resetRedis, resetPostgres } from "./utils/reset"; 14 | 15 | async function init() { 16 | ImportMock.mockFunction(rateLimitMiddlewareModule, "rateLimitMiddleware", rateLimit({ 17 | skip: () => true 18 | })); 19 | 20 | // delete old test database 21 | if (fs.existsSync(config.db)) fs.unlinkSync(config.db); 22 | if (fs.existsSync(config.privateDB)) fs.unlinkSync(config.privateDB); 23 | if (config?.redis?.enabled) await resetRedis(); 24 | if (config?.postgres) await resetPostgres(); 25 | 26 | await initDb(); 27 | 28 | const dbMode = config.postgres ? "postgres" 29 | : "sqlite"; 30 | Logger.info(`Database Mode: ${dbMode}`); 31 | 32 | // set commit at headCommit 33 | (global as any).HEADCOMMIT = "test"; 34 | 35 | // Instantiate a Mocha instance. 36 | const mocha = new Mocha(); 37 | 38 | const testDirs = ["./test/cases"]; 39 | 40 | // Add each .ts file to the mocha instance 41 | testDirs.forEach(testDir => { 42 | fs.readdirSync(testDir) 43 | .filter((file) => 44 | // Only keep the .ts files 45 | file.slice(-3) === ".ts" 46 | ) 47 | .forEach(function(file) { 48 | mocha.addFile( 49 | path.join(testDir, file) 50 | ); 51 | }); 52 | }); 53 | 54 | const mockServer = createMockServer(() => { 55 | Logger.info("Started mock HTTP Server"); 56 | const server = createServer(() => { 57 | Logger.info("Started main HTTP server"); 58 | // Run the tests. 59 | mocha.run((failures) => { 60 | mockServer.close(); 61 | server.close(); 62 | redis.quit(); 63 | process.exitCode = failures ? 1 : 0; // exit with non-zero status if there were failures 64 | process.exit(); 65 | }); 66 | }); 67 | }); 68 | } 69 | 70 | init(); -------------------------------------------------------------------------------- /test/utils/genUser.ts: -------------------------------------------------------------------------------- 1 | import { genRandom } from "./getRandom"; 2 | import { UserID, HashedUserID } from "../../src/types/user.model"; 3 | import { getHash } from "../../src/utils/getHash"; 4 | 5 | type info = Record 6 | 7 | export interface User { 8 | privID: UserID, 9 | pubID: HashedUserID 10 | info: info 11 | } 12 | export type userArray = Record 13 | 14 | export interface UsernameUser extends User { 15 | username: string 16 | } 17 | export type usernameUserArray = Record 18 | 19 | export const genUser = (fnname: string, testcase: string, info: info = {}): User => { 20 | const privID = `${fnname}-${testcase}-${genRandom(2)}` as UserID; 21 | const pubID = getHash(privID); 22 | return { privID, pubID, info }; 23 | }; 24 | 25 | export const genAnonUser = (info: info = {}): User => { 26 | const privID = `user-${genRandom()}` as UserID; 27 | const pubID = getHash(privID); 28 | return { privID, pubID, info }; 29 | }; 30 | 31 | export const genUsers = (fnname: string, testcase: string[]): userArray => { 32 | const users: userArray = {}; 33 | for (const tc of testcase) 34 | users[tc] = genUser(fnname, tc); 35 | return users; 36 | }; 37 | 38 | export const genUsersUsername = (fnname: string, case_usernames: Map): usernameUserArray => { 39 | const cases = Array.from(case_usernames.keys()); 40 | const users = genUsers(fnname, cases); 41 | case_usernames.forEach((username, tc) => (users[tc] as UsernameUser).username = username); 42 | return users as usernameUserArray; 43 | }; -------------------------------------------------------------------------------- /test/utils/getRandom.ts: -------------------------------------------------------------------------------- 1 | import crypto from "crypto"; 2 | 3 | export const genRandom = (bytes=8): string => crypto.pseudoRandomBytes(bytes).toString("hex"); 4 | 5 | export const genRandomValue = (prefix: string, identifier: string, bytes=8): string => `${prefix}-${identifier}-${genRandom(bytes)}`; 6 | export const multiGenRandomValue = (prefix: string, identifier: string, count: number, bytes=8): string[] => { 7 | const arr: string[] = []; 8 | for (let i = 0; i < count; i++) arr.push(genRandomValue(prefix, identifier, bytes)); 9 | return arr; 10 | }; -------------------------------------------------------------------------------- /test/utils/httpClient.ts: -------------------------------------------------------------------------------- 1 | import { config } from "../../src/config"; 2 | import axios, { AxiosRequestConfig } from "axios"; 3 | 4 | const defaultConfig: AxiosRequestConfig = { 5 | baseURL: `http://localhost:${config.port}`, 6 | validateStatus: (status) => status < 500 7 | }; 8 | 9 | export const client = axios.create(defaultConfig); 10 | -------------------------------------------------------------------------------- /test/utils/reset.ts: -------------------------------------------------------------------------------- 1 | // drop postgres tables 2 | // reset redis cache 3 | import { config } from "../../src/config"; 4 | import { createClient } from "redis"; 5 | import { Pool } from "pg"; 6 | import { Logger } from "../../src/utils/logger"; 7 | 8 | export async function resetRedis() { 9 | if (config?.redis?.enabled && config.mode === "test") { 10 | const client = createClient(config.redis); 11 | await client.connect(); 12 | await client.flushAll(); 13 | } 14 | } 15 | export async function resetPostgres() { 16 | if (process.env.TEST_POSTGRES && config.mode == "test" && config.postgres) { 17 | const pool = new Pool({ ...config.postgres }); 18 | await pool.query(`DROP DATABASE IF EXISTS "sponsorTimes"`); 19 | await pool.query(`DROP DATABASE IF EXISTS "privateDB"`); 20 | await pool.end().catch(err => Logger.error(`closing db (postgres): ${err}`)); 21 | } 22 | } --------------------------------------------------------------------------------