├── .all-contributorsrc
├── .dockerignore
├── .env
├── .github
├── FUNDING.yml
└── workflows
│ └── nodejs.yml
├── .gitignore
├── .gitlab-ci.yml
├── .husky
└── pre-commit
├── .run
└── Template Jest.run.xml
├── .vscode
├── launch.json
└── settings.json
├── Dockerfile
├── LICENSE
├── README.md
├── db-migrations
├── 0001-area-sorting.js
├── 0002-create-history-indexes.js
├── 0003-date-climbed-to-date.js
├── 0004-unique-user-climb-date-style.js
├── 0005-area-sorting.js
├── 0006-capitalize-tick-style-and-attempt-type.js
├── 0007-tick-style-nullification.js
├── 0008-fix-swapped-tick-style.js
└── 0009-fix-some-bouldering-ticks.js
├── docker-compose.override.yml
├── docker-compose.yml
├── documentation
├── layers.md
├── testing.md
└── tick_logic.md
├── export.sh
├── hacks
└── countries
│ ├── tmp-list.json
│ └── transform.cjs
├── jest.config.cjs
├── keyfile
├── kubernetes
└── export-cronjob.yml
├── migrate-db.sh
├── mongo-clean.sh
├── mongo_setup.sh
├── package.json
├── refresh-db.sh
├── scripts
└── upload-tiles.sh
├── seed-db.sh
├── src
├── Config.ts
├── GradeUtils.ts
├── __tests__
│ ├── areas.ts
│ ├── bulkImport.test.ts
│ ├── gradeUtils.ts
│ ├── history.ts
│ ├── import-example.json
│ ├── media.e2e.ts
│ ├── organizations.ts
│ ├── ticks.ts
│ └── user.e2e.ts
├── auth
│ ├── index.ts
│ ├── local-dev
│ │ ├── middleware.ts
│ │ └── permissions.ts
│ ├── middleware.ts
│ ├── permissions.ts
│ ├── rules.ts
│ └── util.ts
├── data
│ └── countries-with-lnglat.json
├── db
│ ├── AreaSchema.ts
│ ├── AreaTypes.ts
│ ├── BulkImportTypes.ts
│ ├── ChangeEventType.ts
│ ├── ChangeLogSchema.ts
│ ├── ChangeLogType.ts
│ ├── ClimbHistorySchema.ts
│ ├── ClimbHistoryType.ts
│ ├── ClimbSchema.ts
│ ├── ClimbTypes.ts
│ ├── MediaObjectSchema.ts
│ ├── MediaObjectTypes.ts
│ ├── OrganizationSchema.ts
│ ├── OrganizationTypes.ts
│ ├── PostSchema.ts
│ ├── PostTypes.ts
│ ├── TagSchema.ts
│ ├── TagTypes.ts
│ ├── TickSchema.ts
│ ├── TickTypes.ts
│ ├── UserSchema.ts
│ ├── UserTypes.ts
│ ├── XMediaSchema.ts
│ ├── XMediaTypes.ts
│ ├── edit
│ │ └── streamListener.ts
│ ├── export
│ │ ├── Typesense
│ │ │ ├── Client.ts
│ │ │ ├── Typesense.ts
│ │ │ ├── TypesenseSchemas.ts
│ │ │ ├── Utils.ts
│ │ │ └── transformers.ts
│ │ ├── common
│ │ │ ├── index.ts
│ │ │ └── processor.ts
│ │ ├── json
│ │ │ ├── area.resolver.test.ts
│ │ │ ├── area.resolver.ts
│ │ │ ├── async-file.processor.test.ts
│ │ │ ├── async-file.processor.ts
│ │ │ └── index.ts
│ │ └── queries
│ │ │ ├── defaults.ts
│ │ │ ├── get-all-areas.ts
│ │ │ ├── get-all-climbs.ts
│ │ │ └── index.ts
│ ├── import
│ │ ├── ClimbTransformer.ts
│ │ ├── __tests__
│ │ │ └── climb-data.json
│ │ └── usa
│ │ │ ├── AreaTransformer.ts
│ │ │ ├── AreaTree.ts
│ │ │ ├── LinkClimbsWithCrags.ts
│ │ │ ├── SeedState.ts
│ │ │ ├── USADay0Seed.ts
│ │ │ ├── __tests__
│ │ │ ├── Tree.test.ts
│ │ │ └── Utils.test.ts
│ │ │ └── us-states.ts
│ ├── index.ts
│ └── utils
│ │ ├── Aggregate.ts
│ │ ├── __tests__
│ │ └── Aggregate.test.ts
│ │ ├── index.ts
│ │ └── jobs
│ │ ├── AddCountriesJob.ts
│ │ ├── CragUpdater.ts
│ │ ├── MapTiles
│ │ ├── exportCmd.ts
│ │ └── init.ts
│ │ ├── TreeUpdaters
│ │ └── updateAllAreas.ts
│ │ ├── UpdateStatsJob.ts
│ │ └── migration
│ │ ├── CreateMediaMetaCollection.ts
│ │ ├── CreateUsersCollection.ts
│ │ └── SirvClient.ts
├── geo-utils.ts
├── graphql
│ ├── area
│ │ ├── AreaMutations.ts
│ │ ├── AreaQueries.ts
│ │ └── index.ts
│ ├── climb
│ │ ├── ClimbMutations.ts
│ │ └── index.ts
│ ├── common
│ │ ├── DateScalar.ts
│ │ ├── MuuidScalar.ts
│ │ ├── index.ts
│ │ ├── resolvers.ts
│ │ └── typeDef.ts
│ ├── history
│ │ ├── HistoryFieldResolvers.ts
│ │ ├── HistoryQueries.ts
│ │ └── index.ts
│ ├── media
│ │ ├── MediaResolvers.ts
│ │ ├── index.ts
│ │ ├── mutations.ts
│ │ └── queries.ts
│ ├── organization
│ │ ├── OrganizationMutations.ts
│ │ ├── OrganizationQueries.ts
│ │ └── index.ts
│ ├── resolvers.ts
│ ├── schema
│ │ ├── Area.gql
│ │ ├── AreaEdit.gql
│ │ ├── Climb.gql
│ │ ├── ClimbEdit.gql
│ │ ├── History.gql
│ │ ├── Media.gql
│ │ ├── Organization.gql
│ │ ├── OrganizationEdit.gql
│ │ ├── README.md
│ │ ├── Tag.gql
│ │ ├── Tick.gql
│ │ └── User.gql
│ ├── tag
│ │ ├── TagResolvers.ts
│ │ ├── index.ts
│ │ └── queries.ts
│ ├── tick
│ │ ├── TickImportTypeDef.ts
│ │ ├── TickMutations.ts
│ │ ├── TickQueries.ts
│ │ ├── TickResolvers.ts
│ │ └── index.ts
│ └── user
│ │ ├── UserMutations.ts
│ │ ├── UserQueries.ts
│ │ ├── UserResolvers.ts
│ │ └── index.ts
├── logger.ts
├── main.ts
├── model
│ ├── AreaDataSource.ts
│ ├── AreaHistoryDatasource.ts
│ ├── BulkImportDataSource.ts
│ ├── ChangeLogDataSource.ts
│ ├── ClimbDataSource.ts
│ ├── ExperimentalUserDataSource.ts
│ ├── MediaDataSource.ts
│ ├── MutableAreaDataSource.ts
│ ├── MutableClimbDataSource.ts
│ ├── MutableMediaDataSource.ts
│ ├── MutableOrganizationDataSource.ts
│ ├── OrganizationDataSource.ts
│ ├── OrganizationHistoryDatasource.ts
│ ├── PostDataSource.ts
│ ├── TickDataSource.ts
│ ├── UserDataSource.ts
│ ├── XMediaDataSource.ts
│ └── __tests__
│ │ ├── AreaDataSource.test.ts
│ │ ├── AreaHistoryDataSource.ts
│ │ ├── AreaUtils.ts
│ │ ├── BulkDataSource.test.ts
│ │ ├── ChangeLogDS.ts
│ │ ├── MediaDataSource.ts
│ │ ├── MutableAreaDataSource.test.ts
│ │ ├── MutableClimbDataSource.ts
│ │ ├── MutableOrganizationDataSource.ts
│ │ ├── UserDataSource.ts
│ │ ├── tickValidation.ts
│ │ ├── ticks.ts
│ │ └── updateAreas.ts
├── server.ts
├── types.ts
└── utils
│ ├── helpers.ts
│ ├── inMemoryDB.ts
│ ├── sanitize.ts
│ └── testUtils.ts
├── tsconfig.json
├── tsconfig.release.json
└── yarn.lock
/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 | docker-compose.yml
4 | Dockerfile
5 | jest.*
6 | README.md
7 | mongo-clean.sh
8 | *.log
9 | .*
10 | !src
11 | !package.json
12 | !tsconfig.*
13 | !refresh-db.sh
14 | !yarn.lock
15 |
16 |
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | MONGO_SCHEME=mongodb
2 | MONGO_INITDB_ROOT_USERNAME=admin
3 | MONGO_INITDB_ROOT_PASSWORD=0nBelay!
4 | MONGO_SERVICE=localhost:27017
5 | MONGO_AUTHDB=admin
6 | MONGO_DBNAME=openbeta
7 | MONGO_TLS=false
8 | MONGO_READ_PREFERENCE=primary
9 | MONGO_REPLICA_SET_NAME=rs0
10 | CONTENT_BASEDIR=./tmp
11 | DEPLOYMENT_ENV=development
12 | CDN_URL=https://storage.googleapis.com/openbeta-staging
13 |
14 | # Typesense
15 | TYPESENSE_NODE=typesense-01.openbeta.io
16 | TYPESENSE_API_KEY_RW=ask_us_on_Discord
17 |
18 | # Auth0
19 | AUTH0_DOMAIN=https://dev-fmjy7n5n.us.auth0.com
20 | AUTH0_KID=uciP2tJdJ4BKWoz73Fmln
21 |
22 | MAPTILES_WORKING_DIR=./maptiles
23 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | open_collective: openbeta
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .DS_Store
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 | *.gz
19 | *.geojson
20 |
21 | # Directory for instrumented libs generated by jscoverage/JSCover
22 | lib-cov
23 |
24 | # Coverage directory used by tools like istanbul
25 | coverage
26 | *.lcov
27 |
28 | # nyc test coverage
29 | .nyc_output
30 |
31 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
32 | .grunt
33 |
34 | # Bower dependency directory (https://bower.io/)
35 | bower_components
36 |
37 | # node-waf configuration
38 | .lock-wscript
39 |
40 | # Compiled binary addons (https://nodejs.org/api/addons.html)
41 | build/Release
42 |
43 | # Dependency directories
44 | node_modules/
45 | jspm_packages/
46 |
47 | # TypeScript v1 declaration files
48 | typings/
49 |
50 | # TypeScript cache
51 | *.tsbuildinfo
52 |
53 | # Optional npm cache directory
54 | .npm
55 |
56 | # Optional eslint cache
57 | .eslintcache
58 |
59 | # Microbundle cache
60 | .rpt2_cache/
61 | .rts2_cache_cjs/
62 | .rts2_cache_es/
63 | .rts2_cache_umd/
64 |
65 | # Optional REPL history
66 | .node_repl_history
67 |
68 | # Output of 'npm pack'
69 | *.tgz
70 |
71 | # Yarn Integrity file
72 | .yarn-integrity
73 |
74 | # dotenv environment variables file
75 | .env.test
76 | .env
77 |
78 | # parcel-bundler cache (https://parceljs.org/)
79 | .cache
80 |
81 | # Next.js build output
82 | .next
83 |
84 | # Nuxt.js build / generate output
85 | .nuxt
86 | dist
87 |
88 | # Gatsby files
89 | .cache/
90 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
91 | # https://nextjs.org/blog/next-9-1#public-directory-support
92 | # public
93 |
94 | # vuepress build output
95 | .vuepress/dist
96 |
97 | # Serverless directories
98 | .serverless/
99 |
100 | # FuseBox cache
101 | .fusebox/
102 |
103 | # DynamoDB Local files
104 | .dynamodb/
105 |
106 | # TernJS port file
107 | .tern-port
108 |
109 | build
110 | tmp
111 | .env.local
112 |
113 | # asdf
114 | .tool-versions
115 |
116 | # Intellij and VSCode
117 | .idea/
118 | *.iml
119 | .vscode/settings.json
120 |
121 | /export/
122 | /openbeta-export/
123 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | image: registry.gitlab.com/openbeta/openbeta-nodejs-docker:16.3
2 |
3 | variables:
4 | GIT_DEPTH: 1
5 | DOCKER_DRIVER: overlay2
6 | DOCKER_TLS_CERTDIR: ""
7 | DOCKER_HOST: tcp://docker:2375
8 | IMAGE_LATEST: $CI_REGISTRY_IMAGE:latest
9 | IMAGE_CURRENT: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
10 |
11 | docker-build-staging:
12 | stage: build
13 | only: # Only run for these branches
14 | - staging
15 |
16 | tags:
17 | - docker
18 |
19 | image: docker:20.10.10
20 |
21 | services:
22 | - docker:20.10.10-dind
23 |
24 | before_script:
25 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
26 |
27 | script:
28 | - docker pull $IMAGE_STAGING || true
29 | - docker build --cache-from $IMAGE_STAGING --tag $IMAGE_CURRENT --tag $IMAGE_STAGING .
30 | - docker push $IMAGE_CURRENT
31 | - docker push $IMAGE_STAGING
32 |
33 | docker-build:
34 | stage: build
35 | only: # Only run for these branches
36 | - develop
37 | - main
38 |
39 | tags:
40 | - docker
41 |
42 | image: docker:20.10.10
43 |
44 | services:
45 | - docker:20.10.10-dind
46 |
47 | before_script:
48 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
49 |
50 | script:
51 | - docker pull $IMAGE_LATEST || true
52 | - docker build --cache-from $IMAGE_LATEST --tag $IMAGE_CURRENT --tag $IMAGE_LATEST .
53 | - docker push $IMAGE_CURRENT
54 | - docker push $IMAGE_LATEST
55 |
56 | deploy:
57 | stage: deploy
58 | tags:
59 | - docker
60 | only: # Only run for these branches
61 | - develop
62 | - main
63 | needs:
64 | - job: docker-build
65 | artifacts: true
66 | image:
67 | name: bitnami/git:latest
68 |
69 | before_script:
70 | - mkdir -p ~/.bin
71 | - curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash -s -- ~/.bin
72 | - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )'
73 | - eval $(ssh-agent -s)
74 | - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - > /dev/null
75 | - mkdir -p ~/.ssh
76 | - ssh-keyscan gitlab.com >> ~/.ssh/known_hosts
77 | - chmod 644 ~/.ssh/known_hosts
78 |
79 | script:
80 | - git clone --quiet --depth 1 --branch main git@gitlab.com:openbeta/devops/graphql-api.git
81 | - cd graphql-api
82 | - ~/.bin/kustomize edit set image $IMAGE_CURRENT
83 | - git diff
84 | - if [[ ! -n $(git status --porcelain) ]]; then echo "No new changes. Skipping deployment."; exit 0; fi
85 | - git config --global user.name $GITLAB_USER_NAME
86 | - git config --global user.email $GITLAB_USER_EMAIL
87 | - git commit -am "Update deployment. Code=${CI_COMMIT_SHORT_SHA}"
88 | - git push
89 |
--------------------------------------------------------------------------------
/.husky/pre-commit:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | . "$(dirname -- "$0")/_/husky.sh"
3 |
4 | yarn lint
5 | yarn test
6 |
--------------------------------------------------------------------------------
/.run/Template Jest.run.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "type": "node",
9 | "request": "launch",
10 | "name": "Generate map tiles",
11 | "program": "${workspaceFolder}/src/db/utils/jobs/MapTiles/exportCmd.ts",
12 | "preLaunchTask": "tsc: build - tsconfig.json",
13 | "outFiles": [
14 | "${workspaceFolder}/build/**/*.js"
15 | ],
16 | "skipFiles": [
17 | "/**"
18 | ]
19 | },
20 | {
21 | "type": "node",
22 | "request": "launch",
23 | "name": "Typesense push",
24 | "program": "${workspaceFolder}/src/db/export/Typesense/Typesense.ts",
25 | "preLaunchTask": "tsc: build - tsconfig.json",
26 | "outFiles": [
27 | "${workspaceFolder}/build/**/*.js"
28 | ],
29 | "skipFiles": [
30 | "/**"
31 | ]
32 | },
33 | {
34 | "type": "node",
35 | "request": "launch",
36 | "name": "Update stats",
37 | "program": "${workspaceFolder}/src/db/utils/jobs/UpdateStatsJob.ts",
38 | "preLaunchTask": "tsc: build - tsconfig.json",
39 | "outFiles": [
40 | "${workspaceFolder}/build/**/*.js"
41 | ],
42 | "skipFiles": [
43 | "/**"
44 | ]
45 | },
46 | {
47 | "type": "node",
48 | "request": "launch",
49 | "name": "Launch API server",
50 | "skipFiles": [
51 | "/**"
52 | ],
53 | "program": "${workspaceFolder}/src/main.ts",
54 | "preLaunchTask": "tsc: build - tsconfig.json",
55 | "outFiles": [
56 | "${workspaceFolder}/build/**/*.js"
57 | ],
58 | "console": "integratedTerminal"
59 | },
60 | {
61 | "type": "node",
62 | "request": "launch",
63 | "name": "Launch API Server (serve-dev)",
64 | "skipFiles": [
65 | "/**"
66 | ],
67 | "program": "${workspaceFolder}/src/main.ts",
68 | "preLaunchTask": "tsc: build - tsconfig.json",
69 | "outFiles": [
70 | "${workspaceFolder}/build/**/*.js"
71 | ],
72 | "runtimeExecutable": "yarn",
73 | "runtimeArgs": [
74 | "run",
75 | "serve-dev"
76 | ],
77 | "console": "integratedTerminal"
78 | },
79 | {
80 | "name": "Debug Jest Tests",
81 | "type": "node",
82 | "request": "launch",
83 | "env": {
84 | "NODE_OPTIONS": "--experimental-vm-modules"
85 | },
86 | "runtimeArgs": [
87 | "--inspect-brk",
88 | "${workspaceRoot}/node_modules/.bin/jest",
89 | "--runInBand",
90 | "history"
91 | ],
92 | "console": "integratedTerminal",
93 | "internalConsoleOptions": "neverOpen"
94 | },
95 | {
96 | "type": "node",
97 | "name": "vscode-jest-tests.v2",
98 | "request": "launch",
99 | "env": {
100 | "NODE_OPTIONS": "--experimental-vm-modules"
101 | },
102 | "args": [
103 | "${workspaceRoot}/node_modules/.bin/jest",
104 | "--runInBand",
105 | "--watchAll=false",
106 | "--testNamePattern",
107 | "${jest.testNamePattern}",
108 | "--runTestsByPath",
109 | "${jest.testFile}"
110 | ],
111 | "console": "integratedTerminal",
112 | "internalConsoleOptions": "neverOpen"
113 | }
114 | ]
115 | }
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "git.ignoreLimitWarning": true,
3 | "[typescriptreact]": {
4 | "editor.formatOnType": true,
5 | "editor.formatOnSave": true,
6 | "editor.defaultFormatter": "standard.vscode-standard"
7 | },
8 | "[typescript]": {
9 | "editor.formatOnSave": true,
10 | "editor.defaultFormatter": "standard.vscode-standard"
11 | },
12 | "standard.enable": true,
13 | "standard.autoFixOnSave": true,
14 | "standard.engine": "ts-standard",
15 | "standard.treatErrorsAsWarnings": true,
16 | "javascript.format.enable": false,
17 | "javascript.format.semicolons": "remove",
18 | "typescript.format.enable": false,
19 | "prettier.enable": false,
20 | "editor.defaultFormatter": "standard.vscode-standard"
21 | }
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM registry.gitlab.com/openbeta/openbeta-nodejs-docker:18
2 |
3 | ENV APP_DIR=/apps/openbeta-graphql
4 |
5 | WORKDIR ${APP_DIR}
6 | EXPOSE 4000
7 |
8 | RUN mkdir -p ${APP_DIR}
9 |
10 | COPY . *.env ./
11 |
12 |
13 | RUN yarn install --no-progress && \
14 | yarn build-release
15 |
16 | CMD node --experimental-json-modules build/main.js
17 |
--------------------------------------------------------------------------------
/db-migrations/0001-area-sorting.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Issue: 221
3 | */
4 |
5 | const rs = db.areas.updateMany({}, { $rename: { 'metadata.left_right_index': 'metadata.leftRightIndex' } })
6 |
7 | printjson(rs)
8 |
--------------------------------------------------------------------------------
/db-migrations/0002-create-history-indexes.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Issue: 287
3 | */
4 |
5 | db.change_logs.createIndex({ createdAt: -1 })
6 | db.change_logs.createIndex({ 'changes.fullDocument.metadata.area_id': 1, 'changes.kind': 1 })
7 | db.change_logs.createIndex({ 'changes.kind': 1 })
--------------------------------------------------------------------------------
/db-migrations/0003-date-climbed-to-date.js:
--------------------------------------------------------------------------------
1 | /**
2 | * https://github.com/OpenBeta/openbeta-graphql/pull/301
3 | **/
4 |
5 | const rs = db.ticks.updateMany(
6 | {
7 | dateClimbed: { $exists: true }
8 | },
9 | [{
10 | $set: {
11 | dateClimbed: {
12 | $dateFromString: {
13 | dateString: '$dateClimbed',
14 | // We want to ascribe an hour of day to the climb, so it shows
15 | // up on the correct day when displayed in the user's timezone.
16 | // Most climbs are in the US, MT time is a good first approximation.
17 | timezone: 'America/Denver'
18 | }
19 | }
20 | }
21 | }]
22 | )
23 |
24 | printjson(rs)
25 |
--------------------------------------------------------------------------------
/db-migrations/0004-unique-user-climb-date-style.js:
--------------------------------------------------------------------------------
1 | /**
2 | * https://github.com/OpenBeta/open-tacos/issues/631
3 | **/
4 |
5 | rs1 = db.ticks.createIndex({ userId: -1 })
6 | rs2 = db.ticks.createIndex({ userId: -1, climbId: -1 })
7 | rs3 = db.ticks.dropIndex({ climbId: 1, dateClimbed: 1, style: 1, userId: 1, source: 1 })
8 |
9 | printjson(rs1)
10 | printjson(rs2)
11 | printjson(rs3)
12 |
--------------------------------------------------------------------------------
/db-migrations/0005-area-sorting.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Issue: 375
3 | */
4 |
5 | db.areas.dropIndexes('metadata.leftRightIndex_1')
6 |
--------------------------------------------------------------------------------
/db-migrations/0006-capitalize-tick-style-and-attempt-type.js:
--------------------------------------------------------------------------------
1 | // This migration will update all tick styles and attemptTypes with incorrect capitalization
2 | const tickCapitalizationRs = db.ticks.updateMany(
3 | {
4 | $or: [
5 | { style: 'lead' },
6 | { style: 'follow' },
7 | { style: 'tr' },
8 | { style: 'top_rope' },
9 | { style: 'solo' },
10 | { style: 'aid' },
11 | { style: 'boulder' },
12 | { attemptType: 'onsight' },
13 | { attemptType: 'redpoint' },
14 | { attemptType: 'flash' },
15 | { attemptType: 'pinkpoint' },
16 | { attemptType: 'send' },
17 | { attemptType: 'attempt' },
18 | { attemptType: 'frenchfree' },
19 | { attemptType: 'repeat' }
20 | ]
21 | },
22 | [
23 | {
24 | $set: {
25 | style: {
26 | $switch: {
27 | branches: [
28 | { case: { $eq: ['$style', 'lead'] }, then: 'Lead' },
29 | { case: { $eq: ['$style', 'follow'] }, then: 'Follow' },
30 | { case: { $eq: ['$style', 'tr'] }, then: 'TR' },
31 | { case: { $eq: ['$style', 'top_rope'] }, then: 'TR' },
32 | { case: { $eq: ['$style', 'solo'] }, then: 'Solo' },
33 | { case: { $eq: ['$style', 'aid'] }, then: 'Aid' },
34 | { case: { $eq: ['$style', 'boulder'] }, then: 'Boulder' }
35 | ],
36 | default: '$style'
37 | }
38 | },
39 | attemptType: {
40 | $switch: {
41 | branches: [
42 | { case: { $eq: ['$attemptType', 'redpoint'] }, then: 'Redpoint' },
43 | { case: { $eq: ['$attemptType', 'onsight'] }, then: 'Onsight' },
44 | { case: { $eq: ['$attemptType', 'flash'] }, then: 'Flash' },
45 | { case: { $eq: ['$attemptType', 'pinkpoint'] }, then: 'Pinkpoint' },
46 | { case: { $eq: ['$attemptType', 'send'] }, then: 'Send' },
47 | { case: { $eq: ['$attemptType', 'attempt'] }, then: 'Attempt' },
48 | { case: { $eq: ['$attemptType', 'frenchfree'] }, then: 'Frenchfree' },
49 | { case: { $eq: ['$attemptType', 'repeat'] }, then: 'Repeat' }
50 | ],
51 | default: '$attemptType'
52 | }
53 | }
54 | },
55 | }
56 | ]
57 | );
58 |
59 | printjson(tickCapitalizationRs);
--------------------------------------------------------------------------------
/db-migrations/0007-tick-style-nullification.js:
--------------------------------------------------------------------------------
1 | // This migration will update all ticks that have values of "N/A" for style or attemptType by unsetting those fields
2 | const tickStyleNullificationRs = db.ticks.updateMany(
3 | {
4 | style: "N/A"
5 | },
6 | {
7 | $unset: { style: "" }
8 | }
9 | );
10 |
11 | const tickAttemptTypeNullificationRs = db.ticks.updateMany(
12 | {
13 | attemptType: "N/A"
14 | },
15 | {
16 | $unset: { attemptType: "" }
17 | }
18 | );
19 |
20 | printjson(tickStyleNullificationRs);
21 | printjson(tickAttemptTypeNullificationRs);
--------------------------------------------------------------------------------
/db-migrations/0008-fix-swapped-tick-style.js:
--------------------------------------------------------------------------------
1 | // This migration will fix ticks where attemptType has a value that belongs in style instead.
2 | const attemptTypeToStyleRs = db.ticks.updateMany(
3 | {
4 | $or: [
5 | { attemptType: 'Lead' },
6 | { attemptType: 'TR' },
7 | { attemptyType: 'Follow' },
8 | { attemptyType: 'Solo' },
9 | { attemptyType: 'Aid' },
10 | { attemptyType: 'Boulder' }
11 | ]
12 | },
13 | [
14 | {
15 | $set: {
16 | style: {
17 | $switch: {
18 | branches: [
19 | { case: { $eq: ['$attemptType', 'Lead'] }, then: 'Lead' },
20 | { case: { $eq: ['$attemptType', 'TR'] }, then: 'TR' },
21 | { case: { $eq: ['$attemptType', 'Follow'] }, then: 'Follow' },
22 | { case: { $eq: ['$attemptType', 'Solo'] }, then: 'Solo' },
23 | { case: { $eq: ['$attemptType', 'Aid'] }, then: 'Aid' },
24 | { case: { $eq: ['$attemptType', 'Boulder'] }, then: 'Boulder' }
25 | ],
26 | default: '$style'
27 | }
28 | }
29 | }
30 | }
31 | ]
32 | );
33 |
34 | // Now nullify the attemptType field since we've moved that value to style
35 | const nullifyAttemptTypeRs = db.ticks.updateMany(
36 | {
37 | $or: [
38 | { attemptType: 'Lead' },
39 | { attemptType: 'TR' },
40 | { attemptyType: 'Follow' },
41 | { attemptyType: 'Solo' },
42 | { attemptyType: 'Aid' },
43 | { attemptyType: 'Boulder' }
44 | ]
45 | },
46 | {
47 | $unset: { attemptType: "" }
48 | }
49 | );
50 |
51 | printjson(attemptTypeToStyleRs);
52 | printjson(nullifyAttemptTypeRs);
--------------------------------------------------------------------------------
/db-migrations/0009-fix-some-bouldering-ticks.js:
--------------------------------------------------------------------------------
1 | // This migration will fix ticks where the style was set to a value that belongs in attemptType instead, specifically for boulders
2 | // this is likely specific to one user, but it can be run for all ticks that may be affected.
3 |
4 | // Move the value of style to attemptType (and capitalize it)
5 | const incorrectBoulderTick1Rs = db.ticks.updateMany(
6 | {
7 | $or: [
8 | { style: 'send' },
9 | { style: 'attempt' },
10 | { style: 'flash' }
11 | ]
12 | },
13 | [
14 | {
15 | $set: {
16 | attemptType: {
17 | $switch: {
18 | branches: [
19 | { case: { $eq: ['$style', 'send'] }, then: 'Send' },
20 | { case: { $eq: ['$style', 'attempt'] }, then: 'Attempt' },
21 | { case: { $eq: ['$style', 'flash'] }, then: 'Flash' }
22 | ]
23 | }
24 | }
25 | }
26 | }
27 | ]
28 | );
29 |
30 | // Set style to 'Boulder'
31 | const incorrectBoulderTick2Rs = db.ticks.updateMany(
32 | {
33 | $or: [
34 | { style: 'send' },
35 | { style: 'attempt' },
36 | { style: 'flash' }
37 | ]
38 | },
39 | [
40 | {
41 | $set: { style: 'Boulder' }
42 | }
43 | ]
44 | );
45 |
46 | printjson(incorrectBoulderTick1Rs);
47 | printjson(incorrectBoulderTick2Rs);
--------------------------------------------------------------------------------
/docker-compose.override.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | mongo-express:
4 | image: mongo-express
5 | restart: always
6 | env_file:
7 | - .env
8 | ports:
9 | - 8081:8081
10 | environment:
11 | ME_CONFIG_MONGODB_URL: mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo_opentacos:27017/opentacos?authSource=admin&replicaSet=rs0
12 | depends_on:
13 | - mongo_opentacos
14 | - mongosetup
15 | volumes:
16 | opentacos_mongodb_data:
17 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | mongo_opentacos:
4 | hostname: mongodb
5 | image: mongo:5
6 | env_file:
7 | - .env
8 | ports:
9 | - 27017:27017
10 | volumes:
11 | - opentacos_mongodb_data:/data/db
12 | - ./:/opt/keyfile/
13 | healthcheck:
14 | test: test $$(echo "rs.initiate().ok || rs.status().ok" | mongo -u $${MONGO_INITDB_ROOT_USERNAME} -p $${MONGO_INITDB_ROOT_PASSWORD} --quiet) -eq 1
15 | interval: 10s
16 | start_period: 30s
17 | entrypoint:
18 | - bash
19 | - -c
20 | - |
21 | cp /opt/keyfile/keyfile /data/db/replica.key
22 | chmod 400 /data/db/replica.key
23 | chown 999:999 /data/db/replica.key
24 | exec docker-entrypoint.sh $$@
25 | command: "mongod --bind_ip_all --replSet rs0 --keyFile /data/db/replica.key"
26 | mongosetup:
27 | image: mongo:5
28 | depends_on:
29 | - mongo_opentacos
30 | env_file:
31 | - .env
32 | volumes:
33 | - .:/scripts
34 | restart: "no"
35 | entrypoint: [ "bash", "/scripts/mongo_setup.sh" ]
36 | volumes:
37 | opentacos_mongodb_data:
38 |
--------------------------------------------------------------------------------
/documentation/layers.md:
--------------------------------------------------------------------------------
1 | # Layers
2 | *Updated 2023-06-15*
3 |
4 | ## Overview
5 | One way to think of our backend is comprising three layers wrapping the raw data sitting in Mongo DB.
6 | 1. GraphQL
7 | 2. Datasources
8 | 3. Models
9 |
10 | Incoming data (API requests) pass through GraphQL > Datasource > Model and then the resulting data exit in reverse order from Model > Datasource > GraphQL.
11 |
12 | When you change our data model, eg adding a new field to a climb object, you should expect to update each of the three layes as well.
13 |
14 | ## GraphQL
15 | The outermost GraphQL layer that receives API calls. Our big integration tests (see [Testing](documentation/testing.md)) call this layer.
16 |
17 | Code is in `src/graphql/`.
18 |
19 | ## Datasources
20 | The middle Mongoose datastore objects that expose commands to the GraphQL resolvers. Mongoose is our MongoDB NodeJS ORM. Our small integration tests test this layer down.
21 |
22 | Code is in `src/model/`.
23 |
24 | ## Models
25 | The inner Mongoose models/schemas that represent how data is stored in the MongoDB.
26 |
27 | Code is in `src/db/`.
28 |
--------------------------------------------------------------------------------
/documentation/testing.md:
--------------------------------------------------------------------------------
1 | # Testing
2 | *Updated 2023-06-15*
3 |
4 | ## Overview
5 | There are currently two broad classes of tests in this repo: Big integration tests and small ones. Both sets are integration tests because they validate long chains of functionality as opposed to single classes or functions (unit tests).
6 |
7 | The big set is called "big" because it is truly end-to-end. It posts GraphQL queries and checks their output, which is literally what the API does in production. The small set skips the GraphQL layer (you might want to read more about layers [here](documentation/layers.md)) and instead calls datasource functions directly.
8 |
9 | ## Big Integration Tests
10 | These tests mock up a GraphQL backend, and make HTTP calls to it. Since these tests are so realistic, they are immensely protective, illustrative and confidence-building. Open-tacos developers can cut and paste the GraphQL queries in these tests and use them to build the frontend.
11 |
12 | These tests are stored in `src/__tests__/`. The setup code is in `src/utils/testUtils.ts`. Note how most of the code is oriented around setting up and tearing down a GraphQL server and an in-memory Mongo DB.
13 |
14 | We rely on `mongo-memory-server` (a node package) for the in-memory Mongo DB. By running it in memory, it is lightweight and easily setup during `beforeAll`. Early on, we were hampered by the fact that the standard Mongo server that `mongo-memory-server` offers doesn't support Mongo transactions, which we use extensively. This is why we wrote small integration tests which rely on a local instance of MongoDB. However, in 2021, the package started to offer an in-memory replset which does support Mongo transactions. From then on, we've been able to write big integration tests which set up a replset which supports everything we need to do.
15 |
16 |
17 | ## Small Integration Tests
18 | These essentially test datasource functions. Eg. the key line in such a test could be `await users.createOrUpdateUserProfile(updater, input)`([Source](src/model/__tests__/UserDataSource.ts)). This tests the `createOrUpdateUserProfile` function of the `user` datasource. Datasources sit one layer below the GraphQL layer (another plug to read [Layers]((documentation/layers.md))). In `src/graphql/resolvers.ts`, you can see how the GraphQL layer calls datasource functions to resolve entities in the queries.
19 |
20 | Other than their inability to test how the GraphQL layer resolves queries, the main shortcoming of these tests are their poor portability. To use them, you need to set up a Mongo DB locally for the tests to read and write from. This is why the the main [README](README.md) page gets developers to spin up a Docker instance and edit `/etc/hosts` mongod mappings.
21 |
22 | In general, we should phase these out in favor of big integration tests. In case you need to debug them or, god forbid, write new ones, they reside in `src/model/__tests__/`.
23 |
--------------------------------------------------------------------------------
/export.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ -z ${GITHUB_ACCESS_TOKEN} ]
4 | then
5 | echo "GITHUB_ACCESS_TOKEN not defined."
6 | exit 1
7 | fi
8 |
9 | echo "cloning openbeta-export repository"
10 | git clone --depth 1 --branch production https://ob-bot-user:${GITHUB_ACCESS_TOKEN}@github.com/OpenBeta/openbeta-export || exit 1
11 | cd openbeta-export
12 | git config user.name "db-export-bot"
13 | git config user.email "db-export-bot@noreply"
14 | cd ..
15 |
16 | echo "start exporting database..."
17 | yarn export:json:full --output openbeta-export
18 |
19 | echo "... finished export. Committing data..."
20 |
21 | cd openbeta-export || exit 1
22 | git add -A
23 | git commit -am "export openbeta data"
24 | git push origin production
25 |
--------------------------------------------------------------------------------
/hacks/countries/transform.cjs:
--------------------------------------------------------------------------------
1 | const list = require('./tmp-list.json')
2 | const _ = require('underscore')
3 |
4 | const l = list.map(entry => ({
5 | alpha3: entry['Alpha-3 code'],
6 | lnglat: [entry['Longitude (average)'], entry['Latitude (average)']]
7 | }))
8 |
9 | const obj = _.indexBy(l, 'alpha3')
10 | console.log(JSON.stringify(obj, null, 2))
11 |
--------------------------------------------------------------------------------
/jest.config.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | testTimeout: 2 * 60 * 1000,
3 | moduleNameMapper: {
4 | '^(\\.{1,2}/.*)\\.js$': '$1'
5 | },
6 | extensionsToTreatAsEsm: ['.ts'],
7 | transform: {
8 | '^.+\\.(mt|t|cj|j)s$': [
9 | 'ts-jest',
10 | {
11 | useESM: true
12 | }
13 | ]
14 | },
15 | testEnvironment: 'node',
16 | testMatch: [
17 | '/**/__tests__/*.ts'
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/keyfile:
--------------------------------------------------------------------------------
1 | EE5d34CFwGOHs2YvZZJ7b/ki0flbGG+zKoYKEvwho8TXIq5bQM1FXg7a0zZPFcYX
2 | kCGdNSldTqdYpjtBxKBqId7t6kFs5S3XKfC7BMC5xm7VDIGkDY+xbwg+zivKAsF5
3 | 8HW7h5oibt9e3lQQVugtA+MdXoeH7eZbWckjVKQ26/odsc0zVV9dAsjjbmpCDRBq
4 | zQvTrP38urQajy9LCEITeToQrgbKkoovygKivWXVsNn4iFqwNpch/LLYihKi09tm
5 | eq1/RRHEIkDJz7cHx/FY2pXnzd6VbOSP59JCGR57wOlEmy5ZRCUjKXMCbE+mb5VS
6 | v3T2EllZKqyBSDUkg5ZkHHgiZ4XuzFJp6SQZMlX1hDo/+If+s8TyYClACr5aXqN5
7 | 0+qNl1pTd+eipsvpGSy1seh6MBy2hgd91ft1asYqZ87TACSvJmPy43PV1hmfgDQ1
8 | W/SLYciy+W1RW2fIkkukjRTEeDWlPDxmYvXdZCImYBJeE/Wf3OPKn46pJZ+gG2cx
9 | iapfRDILIiqUQNuieE6ArvSGIbEnuLGgZH5ktCZmnlTsL32TzL8hskhAUgzgEpKn
10 | qegE7U5W5ACLm6KuxyfvvrcfOuxYAdYlJq+nzaUqkMPkkGissBSYiJv17qdVpptC
11 | piAFYx7UB+03h6n3e3DXaVNVo9dbOHAXT4BLTHdCT9b5UUUzCk6ebfGx08rmSBcY
12 | fLniVXdVNGjaUE6sAEKftdvn53cS5EzYkpxHt4DXFFktR6V+KKpuud3wf7GyerEL
13 | uB6llOjl9o2I+SUKg87tPLl7p4YRpg6Q00Naxgf+e1SkBdf4LBuFwW8yWwtTs58a
14 | //pw2+ZB3rnZIxql4i/y6KutBtZKH00t5Wpl+pl1OogoGEy/B+mNi/1trdCmaWbW
15 | +VNp17ljBIfPQlhIibgvDrY1XVCuX6um+tfobZQI1Klc6cC+Bh+2OeEKgp6Imq4H
16 | 031FyeuyBZ/06DirIDS98B1alXWT
17 |
--------------------------------------------------------------------------------
/kubernetes/export-cronjob.yml:
--------------------------------------------------------------------------------
1 | apiVersion: batch/v1
2 | kind: CronJob
3 | metadata:
4 | name: export-db-prod
5 | spec:
6 | schedule: "@daily"
7 | concurrencyPolicy: Forbid
8 | successfulJobsHistoryLimit: 1
9 | jobTemplate:
10 | spec:
11 | backoffLimit: 0
12 | template:
13 | spec:
14 | containers:
15 | - name: openbeta-graph-api
16 | image: vnguyen/openbeta-graph-api:latest
17 | imagePullPolicy: Always
18 | command: ["yarn", "export-prod"]
19 | envFrom:
20 | - configMapRef:
21 | name: graph-api-prod
22 | resources:
23 | requests:
24 | memory: 128Mi
25 | restartPolicy: Never
26 |
--------------------------------------------------------------------------------
/migrate-db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ -z $1 ]
4 | then
5 | echo "Missing migration file"
6 | exit 1
7 | fi
8 |
9 | . .env
10 |
11 | connStr="${MONGO_SCHEME}://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@${MONGO_SERVICE}/${MONGO_DBNAME}?authSource=${MONGO_AUTHDB}&tls=${MONGO_TLS}&replicaSet=${MONGO_REPLICA_SET_NAME}"
12 |
13 | mongo "$connStr" $1
14 |
--------------------------------------------------------------------------------
/mongo-clean.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | docker compose down
4 |
5 | # MongoDb container won't run initializing scripts if there's
6 | # already a data volume.
7 | docker volume rm openbeta-graphql_opentacos_mongodb_data --force
8 |
9 | docker compose up -d
10 |
--------------------------------------------------------------------------------
/mongo_setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "sleeping for 10 seconds"
3 | sleep 10
4 |
5 | echo mongo_setup.sh time now: `date +"%T" `
6 | mongosh --username "${MONGO_INITDB_ROOT_USERNAME}" --password "${MONGO_INITDB_ROOT_PASSWORD}" --host mongodb:27017 <",
8 | "license": "AGPL-3.0-or-later",
9 | "devDependencies": {
10 | "@types/auth0": "^3.3.2",
11 | "@types/jest": "^29.4.0",
12 | "@types/node": "^18.13.0",
13 | "@types/supertest": "^2.0.12",
14 | "@types/underscore": "^1.11.4",
15 | "cross-env": "^7.0.3",
16 | "husky": "^8.0.1",
17 | "jest": "^29.7.0",
18 | "jest-extended": "^4.0.2",
19 | "mongodb-memory-server": "^10.1.2",
20 | "nock": "^13.3.0",
21 | "supertest": "^6.3.3",
22 | "ts-jest": "^29.2.5",
23 | "ts-standard": "^12.0.0",
24 | "typescript": "4.9.5",
25 | "wait-for-expect": "^3.0.2"
26 | },
27 | "dependencies": {
28 | "@apollo/server": "^4.11.2",
29 | "@babel/runtime": "^7.17.2",
30 | "@google-cloud/storage": "^6.9.5",
31 | "@graphql-tools/schema": "^8.3.1",
32 | "@openbeta/sandbag": "^0.0.51",
33 | "@turf/area": "^6.5.0",
34 | "@turf/bbox": "^6.5.0",
35 | "@turf/bbox-polygon": "^6.5.0",
36 | "@turf/circle": "^6.5.0",
37 | "@turf/convex": "^6.5.0",
38 | "@turf/helpers": "^6.5.0",
39 | "@types/uuid": "^8.3.3",
40 | "apollo-datasource-mongodb": "^0.6.0",
41 | "auth0": "^3.4.0",
42 | "axios": "^1.3.6",
43 | "body-parser": "^1.20.2",
44 | "cors": "^2.8.5",
45 | "date-fns": "^2.30.0",
46 | "dot-object": "^2.1.4",
47 | "dotenv": "^16.4.4",
48 | "express": "^4.18.2",
49 | "glob": "^10.2.2",
50 | "graphql": "^16.9.0",
51 | "graphql-middleware": "^6.1.31",
52 | "graphql-shield": "^7.5.0",
53 | "graphql-tag": "^2.12.6",
54 | "graphql-type-json": "^0.3.2",
55 | "i18n-iso-countries": "^7.5.0",
56 | "immer": "^9.0.15",
57 | "jsonwebtoken": "^8.5.1",
58 | "jwks-rsa": "^2.1.4",
59 | "mongoose": "^7.8.3",
60 | "mongoose-lean-virtuals": "^1.0.0",
61 | "node-fetch": "2",
62 | "p-limit": "^4.0.0",
63 | "pino": "^9.5.0",
64 | "pino-logflare": "^0.4.2",
65 | "sanitize-html": "^2.7.2",
66 | "sharp": "^0.32.0",
67 | "typesense": "^1.8.2",
68 | "underscore": "^1.13.2",
69 | "uuid": "^8.3.2",
70 | "uuid-mongodb": "^2.6.0",
71 | "yup": "^1.1.1"
72 | },
73 | "scripts": {
74 | "lint": "yarn ts-standard",
75 | "fix": "yarn ts-standard --fix",
76 | "test": "cross-env NODE_OPTIONS=\"--experimental-vm-modules\" jest --runInBand",
77 | "build": "tsc -p tsconfig.json",
78 | "build-release": "tsc -p tsconfig.release.json",
79 | "clean": "tsc -b --clean && rm -rf build/*",
80 | "serve": "yarn build && node --experimental-json-modules build/main.js",
81 | "serve-dev": "echo \"🚨 LOCAL_DEV_BYPASS_AUTH enabled 🚨\" && LOCAL_DEV_BYPASS_AUTH=true yarn serve",
82 | "seed-db": "./seed-db.sh",
83 | "add-countries": "yarn build && node build/db/utils/jobs/AddCountriesJob.js",
84 | "update-stats": "yarn build && node build/db/utils/jobs/UpdateStatsJob.js",
85 | "update-climb-search": "tsc ; node build/db/export/Typesense/Typesense.js --climbs",
86 | "update-area-search": "tsc ; node build/db/export/Typesense/Typesense.js --areas",
87 | "update-search": "tsc ; node build/db/export/Typesense/Typesense.js --areas --climbs",
88 | "export:json:full": "yarn build && node build/db/export/json/index.js",
89 | "export-prod": "./export.sh",
90 | "prepare": "husky install",
91 | "import-users": "yarn build && node build/db/utils/jobs/migration/CreateUsersCollection.js",
92 | "maptiles:export-db": "node build/db/utils/jobs/MapTiles/exportCmd.js",
93 | "maptiles:upload": "./scripts/upload-tiles.sh",
94 | "maptiles:full": "yarn build && yarn maptiles:export-db && yarn maptiles:upload"
95 | },
96 | "standard": {
97 | "plugins": [
98 | "html"
99 | ],
100 | "parser": "babel-eslint"
101 | },
102 | "ts-standard": {
103 | "ignore": [
104 | "build",
105 | "hacks",
106 | "**/*.test.ts",
107 | "db-migrations"
108 | ]
109 | },
110 | "type": "module",
111 | "engines": {
112 | "node": ">=18.20.0 <19"
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/refresh-db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Download seed files and start seeding
3 | # Syntax: ./refresh-db.sh [full]
4 | # Specify 'full' to download the entire data set.
5 |
6 | rm -rf tmp
7 | mkdir tmp
8 | cd tmp
9 |
10 | GITHUB="https://raw.githubusercontent.com/OpenBeta/climbing-data/main"
11 | wget --content-disposition \
12 | ${GITHUB}/openbeta-routes-westcoast.zip
13 |
14 | if [[ "$1" == "full" ]];
15 | then
16 |
17 | wget --content-disposition \
18 | ${GITHUB}/openbeta-routes-mountains2.zip
19 |
20 | wget --content-disposition \
21 | ${GITHUB}/openbeta-routes-mountains1.zip
22 |
23 | wget --content-disposition \
24 | ${GITHUB}/openbeta-routes-ca.zip
25 |
26 | wget --content-disposition \
27 | ${GITHUB}/openbeta-routes-northeast.zip
28 |
29 | wget --content-disposition \
30 | ${GITHUB}/openbeta-routes-southeast.zip
31 |
32 | wget --content-disposition \
33 | ${GITHUB}/openbeta-routes-midwest.zip
34 | fi
35 |
36 | unzip '*.zip'
37 |
38 | cd ..
39 | export CONTENT_BASEDIR=./tmp
40 |
41 | echo "NODE_OPTIONS=${NODE_OPTIONS}"
42 | yarn seed-usa
43 |
44 | yarn add-countries
45 |
--------------------------------------------------------------------------------
/scripts/upload-tiles.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # 1. Generate pmtiles tiles from geojson exports
3 | # 2. Upload to S3-compatible storage
4 | # See also https://github.com/felt/tippecanoe
5 |
6 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
7 |
8 | set -a
9 | . ${SCRIPT_DIR}/../.env 2> /dev/null
10 | . ${SCRIPT_DIR}/../.env.local 2> /dev/null
11 | set +a
12 |
13 | # Define Cloudflare-R2 backend for rclone
14 | S3_DEST=':s3,provider=Cloudflare,no_check_bucket=true,env_auth=true,acl=private:maptiles'
15 |
16 | echo "------ Generating crags tiles file ------"
17 | tippecanoe --force -o ${MAPTILES_WORKING_DIR}/crags.pmtiles \
18 | -l crags -n "Crags" \
19 | --coalesce-densest-as-needed \
20 | -z11 ${MAPTILES_WORKING_DIR}/crags.*.geojson
21 |
22 | echo "**Uploading to remote storage"
23 | rclone copy ${MAPTILES_WORKING_DIR}/crags.pmtiles ${S3_DEST}
24 |
25 | echo "------ Generating crag group tiles file ------"
26 | tippecanoe --force -o ${MAPTILES_WORKING_DIR}/areas.pmtiles \
27 | -l areas -n "Areas" \
28 | --drop-densest-as-needed \
29 | -z8 ${MAPTILES_WORKING_DIR}/areas.geojson
30 |
31 | echo "**Uploading to remote storage"
32 | rclone copy ${MAPTILES_WORKING_DIR}/areas.pmtiles ${S3_DEST}
33 |
34 | exit $?
35 |
--------------------------------------------------------------------------------
/seed-db.sh:
--------------------------------------------------------------------------------
1 | # Rebuild your local database with a copy of OpenBeta staging database.
2 | #
3 | # To keep running time short, the script only downloads the remote
4 | # database dump file once. Specify 'download' argument to force download.
5 | #
6 | # Syntax:
7 | # ./seed-db.sh [download]
8 | #
9 | #!/bin/bash
10 |
11 | FILE_NAME="openbeta-stg-db.tar.gz"
12 | REMOTE_FILE="https://storage.googleapis.com/openbeta-dev-dbs/$FILE_NAME"
13 |
14 | if [[ ! -f ${FILE_NAME} || ${1} == "download" ]]; then
15 | echo "Downloading db file(s)..."
16 | wget --content-disposition $REMOTE_FILE
17 | fi
18 |
19 | rm -rf ./db-dumps/staging/openbeta
20 |
21 | tar xzf $FILE_NAME
22 |
23 | . .env
24 |
25 | connStr="${MONGO_SCHEME}://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@${MONGO_SERVICE}/${MONGO_DBNAME}?authSource=${MONGO_AUTHDB}&tls=${MONGO_TLS}&replicaSet=${MONGO_REPLICA_SET_NAME}"
26 |
27 | mongorestore --uri "$connStr" -d=${MONGO_DBNAME} --gzip --drop ./db-dumps/staging/openbeta
28 |
29 |
--------------------------------------------------------------------------------
/src/Config.ts:
--------------------------------------------------------------------------------
1 | import { config } from 'dotenv'
2 |
3 | config({ path: '.env.local' })
4 | config() // initialize dotenv
5 |
6 | const checkAndPrintWarning = (name: string, value?: string): string => {
7 | if (value == null) {
8 | throw new Error(`## Error: '${name}' not defined ##`)
9 | }
10 | return value
11 | }
12 |
13 | type DeploymentType = 'production' | 'staging'
14 | interface ConfigType {
15 | DEPLOYMENT_ENV: DeploymentType
16 | TYPESENSE_NODE: string
17 | TYPESENSE_API_KEY_RW: string
18 | }
19 | // Todo: add other props in .env
20 | const Config: ConfigType = {
21 | DEPLOYMENT_ENV: checkAndPrintWarning('DEPLOYMENT_ENV', process.env.DEPLOYMENT_ENV) as DeploymentType,
22 | TYPESENSE_NODE: checkAndPrintWarning('TYPESENSE_NODE', process.env.TYPESENSE_NODE),
23 | TYPESENSE_API_KEY_RW: checkAndPrintWarning('TYPESENSE_API_KEY_RW', process.env.TYPESENSE_API_KEY_RW)
24 | }
25 |
26 | export default Config
27 |
--------------------------------------------------------------------------------
/src/__tests__/import-example.json:
--------------------------------------------------------------------------------
1 | {
2 | "areas": [
3 | {
4 | "areaName": "Utah",
5 | "countryCode": "us",
6 | "children": [
7 | {
8 | "areaName": "Southeast Utah",
9 | "children": [
10 | {
11 | "areaName": "Indian Creek",
12 | "description": "Indian Creek is a crack climbing mecca in the southeastern region of Utah, USA. Located within the [Bears Ears National Monument](https://en.wikipedia.org/wiki/Bears_Ears_National_Monument).",
13 | "lng": -109.5724044642857,
14 | "lat": 38.069429035714286,
15 | "children": [
16 | {
17 | "areaName": "Supercrack Buttress",
18 | "gradeContext": "US",
19 | "description": "",
20 | "lng": -109.54552,
21 | "lat": 38.03635,
22 | "bbox": [
23 | -109.54609091005857,
24 | 38.03590033981814,
25 | -109.54494908994141,
26 | 38.03679966018186
27 | ],
28 | "climbs": [
29 | {
30 | "name": "The Key Flake",
31 | "grade": "5.10",
32 | "fa": "unknown",
33 | "disciplines": {
34 | "trad": true
35 | },
36 | "safety": "UNSPECIFIED",
37 | "lng": -109.54552,
38 | "lat": 38.03635,
39 | "leftRightIndex": 1,
40 | "description": "Cool off-width that requires off-width and face skills.",
41 | "protection": "Anchors hidden up top. Need 80m to make it all the way down.",
42 | "location": "Opposite keyhole flake. Obvious right leaning offwidth that starts atop 20 ft boulder."
43 | },
44 | {
45 | "name": "Incredible Hand Crack",
46 | "grade": "5.10",
47 | "fa": "Rich Perch, John Bragg, Doug Snively, and Anne Tarver, 1978",
48 | "disciplines": {
49 | "trad": true
50 | },
51 | "leftRightIndex": 2,
52 | "description": "Route starts at the top of the trail from the parking lot to Supercrack Buttress.",
53 | "protection": "Cams from 2-2.5\". Heavy on 2.5\" (#2 Camalot)",
54 | "pitches": [
55 | {
56 | "pitchNumber": 1,
57 | "grade": "5.10",
58 | "length": 100,
59 | "boltsCount": 0,
60 | "description": "A classic hand crack that widens slightly towards the top. Requires a range of cam sizes. Sustained and excellent quality."
61 | },
62 | {
63 | "pitchNumber": 2,
64 | "grade": "5.9",
65 | "length": 30,
66 | "description": "Easier climbing with good protection. Features a mix of crack sizes. Shorter than the first pitch but equally enjoyable."
67 | }
68 | ]
69 | }
70 | ]
71 | }
72 | ]
73 | }
74 | ]
75 | }
76 | ]
77 | }
78 | ]
79 | }
--------------------------------------------------------------------------------
/src/auth/index.ts:
--------------------------------------------------------------------------------
1 | import permissions from './permissions.js'
2 | import { createContext } from './middleware.js'
3 | export { permissions, createContext }
4 |
--------------------------------------------------------------------------------
/src/auth/local-dev/middleware.ts:
--------------------------------------------------------------------------------
1 | import muuid, { MUUID } from 'uuid-mongodb'
2 | import { AuthUserType } from '../../types.js'
3 | import { logger } from '../../logger.js'
4 |
5 | const testUUID: MUUID = muuid.v4()
6 |
7 | /*
8 | * This file is a mod of src/auth/middleware.ts and is used when starting the server via `yarn serve-dev`
9 | * It bypasses the authentication for local development
10 | */
11 | export const localDevBypassAuthContext = async ({ req }): Promise => {
12 | const user: AuthUserType = {
13 | roles: ['user_admin', 'org_admin', 'editor'],
14 | uuid: testUUID,
15 | isBuilder: false
16 | }
17 | logger.info(`The user.roles for this session is: ${user.roles.toString()}`)
18 | return { user }
19 | }
20 |
--------------------------------------------------------------------------------
/src/auth/local-dev/permissions.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * This file is a mod of src/auth/permissions.ts and is used when starting the server via `yarn serve-dev`
3 | * It bypasses the authorization for local development and allows all queries and mutations
4 | */
5 | import { shield, allow } from 'graphql-shield'
6 |
7 | const localDevBypassAuthPermissions = shield({
8 | Query: {
9 | '*': allow
10 | },
11 | Mutation: {
12 | '*': allow
13 | }
14 | }, {
15 | allowExternalErrors: true,
16 | fallbackRule: allow
17 | })
18 |
19 | export default localDevBypassAuthPermissions
20 |
--------------------------------------------------------------------------------
/src/auth/middleware.ts:
--------------------------------------------------------------------------------
1 | import muid from 'uuid-mongodb'
2 | import { Request } from 'express'
3 | import { AuthUserType } from '../types.js'
4 | import { verifyJWT } from './util.js'
5 | import { logger } from '../logger.js'
6 |
7 | export interface CustomContext {
8 | user: AuthUserType
9 | token?: string
10 | }
11 |
12 | const EMTPY_USER: AuthUserType = {
13 | isBuilder: false,
14 | roles: [],
15 | uuid: undefined
16 | }
17 |
18 | /**
19 | * Create a middleware context for Apollo server
20 | */
21 | export const createContext = async ({ req }: { req: Request }): Promise => {
22 | try {
23 | return await validateTokenAndExtractUser(req)
24 | } catch (e) {
25 | logger.error(`Can't validate token and extract user ${e.toString() as string}`)
26 | throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.')
27 | }
28 | }
29 |
30 | async function validateTokenAndExtractUser (req: Request): Promise {
31 | const { headers } = req
32 | // eslint-disable-next-line @typescript-eslint/dot-notation
33 | const authHeader = String(headers?.['authorization'] ?? '')
34 | if (authHeader.startsWith('Bearer ')) {
35 | const token = authHeader.substring(7, authHeader.length).trim()
36 | try {
37 | const payload = await verifyJWT(token)
38 | return {
39 | user: {
40 | isBuilder: payload?.scope?.includes('builder:default') ?? false,
41 | roles: payload?.['https://tacos.openbeta.io/roles'] ?? [],
42 | uuid: payload?.['https://tacos.openbeta.io/uuid'] != null ? muid.from(payload['https://tacos.openbeta.io/uuid']) : undefined
43 | },
44 | token
45 | }
46 | } catch (e) {
47 | logger.error(`Can't verify JWT token ${e.toString() as string}`)
48 | throw new Error("Unauthorized. Can't verify JWT token")
49 | }
50 | }
51 |
52 | return {
53 | user: EMTPY_USER
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/auth/permissions.ts:
--------------------------------------------------------------------------------
1 | import { allow, and, or, shield } from 'graphql-shield'
2 | import { isEditor, isMediaOwner, isOwner, isUserAdmin, isValidEmail } from './rules.js'
3 |
4 | const permissions = shield({
5 | Query: {
6 | '*': allow
7 | },
8 | Mutation: {
9 | addOrganization: isUserAdmin,
10 | setDestinationFlag: isEditor,
11 | removeArea: isEditor,
12 | addArea: isEditor,
13 | updateArea: isEditor,
14 | updateClimbs: isEditor,
15 | deleteClimbs: isEditor,
16 | bulkImportAreas: isEditor,
17 | updateUserProfile: and(isOwner, isValidEmail),
18 | addEntityTag: or(isMediaOwner, isUserAdmin),
19 | removeEntityTag: or(isMediaOwner, isUserAdmin),
20 | addMediaObjects: or(isOwner),
21 | deleteMediaObject: or(isMediaOwner, isUserAdmin)
22 | }
23 | },
24 | {
25 | allowExternalErrors: true,
26 | fallbackRule: allow
27 | })
28 |
29 | export default permissions
30 |
--------------------------------------------------------------------------------
/src/auth/rules.ts:
--------------------------------------------------------------------------------
1 | import { inputRule, rule } from 'graphql-shield'
2 |
3 | import MediaDataSource from '../model/MutableMediaDataSource.js'
4 | import { MediaObjectGQLInput } from '../db/MediaObjectTypes.js'
5 |
6 | export const isEditor = rule()(async (parent, args, ctx, info) => {
7 | return _hasUserUuid(ctx) && ctx.user.roles.includes('editor')
8 | })
9 |
10 | export const isUserAdmin = rule()(async (parent, args, ctx, info) => {
11 | return _hasUserUuid(ctx) && ctx.user.roles.includes('user_admin')
12 | })
13 |
14 | /**
15 | * True when JWT payload 'uuid' is the same as `input.userUuid`.
16 | *
17 | * If input is an array, check every element of input.
18 | */
19 | export const isOwner = rule()(async (parent, args, ctx, info) => {
20 | if (!_hasUserUuid(ctx)) return false
21 | if (Array.isArray(args.input)) {
22 | return (args.input as MediaObjectGQLInput[]).every(
23 | ({ userUuid }) => ctx.user.uuid.toUUID().toString() === userUuid)
24 | }
25 | return ctx.user.uuid.toUUID().toString() === args.input.userUuid
26 | })
27 |
28 | /**
29 | * True when the media identified by input.mediaId has the same owner uuid as the JWT payload uuid.
30 | */
31 | export const isMediaOwner = rule()(async (parent, args, ctx, info): Promise => {
32 | const hasUserUuid = _hasUserUuid(ctx)
33 | const isMediaOwner = await MediaDataSource.getInstance().isMediaOwner(ctx.user.uuid, args.input?.mediaId)
34 | return hasUserUuid && isMediaOwner
35 | })
36 |
37 | export const isBuilderServiceAccount = rule()(async (parent, args, ctx: Context, info) => {
38 | return _hasUserUuid(ctx) && ctx.user.isBuilder
39 | })
40 |
41 | export const isValidEmail = inputRule()(
42 | (yup) =>
43 | yup.object({
44 | email: yup.string().email('Please provide a valid email')
45 | }),
46 | { abortEarly: false }
47 | )
48 |
49 | interface Context {
50 | user: {
51 | uuid?: string
52 | isBuilder: boolean
53 | }
54 | }
55 |
56 | const _hasUserUuid = (ctx: Context): boolean => ctx.user.uuid != null
57 |
--------------------------------------------------------------------------------
/src/auth/util.ts:
--------------------------------------------------------------------------------
1 | import jwksClient from 'jwks-rsa'
2 | import jwt from 'jsonwebtoken'
3 |
4 | import { checkVar } from '../db/index.js'
5 |
6 | const auth0Domain = checkVar('AUTH0_DOMAIN')
7 | const auth0Kid = checkVar('AUTH0_KID')
8 |
9 | const client = jwksClient({
10 | jwksUri: `${auth0Domain}/.well-known/jwks.json`
11 | })
12 |
13 | export const verifyJWT = async (token): Promise => {
14 | const key = await client.getSigningKey(auth0Kid)
15 | return jwt.verify(token, key.getPublicKey())
16 | }
17 |
--------------------------------------------------------------------------------
/src/db/BulkImportTypes.ts:
--------------------------------------------------------------------------------
1 | import { AreaType } from './AreaTypes.js'
2 | import { ClimbType, DisciplineType, SafetyType } from './ClimbTypes.js'
3 | import { MUUID } from 'uuid-mongodb'
4 | import { ExperimentalAuthorType } from './UserTypes.js'
5 |
6 | export interface BulkImportResultType {
7 | addedAreas: AreaType[]
8 | updatedAreas: AreaType[]
9 | addedOrUpdatedClimbs: ClimbType[]
10 | }
11 |
12 | export interface BulkImportInputType {
13 | areas: BulkImportAreaInputType[]
14 | }
15 |
16 | export interface BulkImportAreaInputType {
17 | uuid?: MUUID
18 | areaName?: string
19 | description?: string
20 | countryCode?: string
21 | gradeContext?: string
22 | leftRightIndex?: number
23 | lng?: number
24 | lat?: number
25 | bbox?: [number, number, number, number]
26 | children?: BulkImportAreaInputType[]
27 | climbs?: BulkImportClimbInputType[]
28 | }
29 |
30 | export interface BulkImportClimbInputType {
31 | uuid?: MUUID
32 | name?: string
33 | grade: string
34 | disciplines: DisciplineType
35 | safety?: SafetyType
36 | lng?: number
37 | lat?: number
38 | leftRightIndex?: number
39 | description?: string
40 | location?: string
41 | protection?: string
42 | fa?: string
43 | length?: number
44 | boltsCount?: number
45 | experimentalAuthor?: ExperimentalAuthorType
46 | pitches?: BulkImportPitchesInputType[]
47 | }
48 |
49 | export interface BulkImportPitchesInputType {
50 | id?: MUUID
51 | pitchNumber: number
52 | grade: string
53 | disciplines?: DisciplineType
54 | description?: string
55 | length?: number
56 | boltsCount?: number
57 | }
58 |
--------------------------------------------------------------------------------
/src/db/ChangeEventType.ts:
--------------------------------------------------------------------------------
1 | import { ResumeToken } from 'mongodb'
2 |
3 | import { AreaType } from './AreaTypes'
4 | import { ClimbType } from './ClimbTypes'
5 |
6 | export default interface ChangeEventType {
7 | _id: ResumeToken
8 | dbOp: string
9 | fullDocument: FDocumentType
10 | }
11 | export type AreaChangeType = ChangeEventType
12 | export type ClimbChangeType = ChangeEventType
13 | export type SupportedChangeTypes = AreaChangeType | ClimbChangeType
14 |
15 | export type TrackableTypes = (AreaType & WithDiscriminator) | (ClimbType & WithDiscriminator)
16 | export interface WithDiscriminator {
17 | kind: string
18 | }
19 |
--------------------------------------------------------------------------------
/src/db/ChangeLogSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 |
3 | import { ChangeLogType, SupportedCollectionTypes } from './ChangeLogType.js'
4 | import { OperationType } from './AreaTypes.js'
5 |
6 | const { Schema, connection } = mongoose
7 |
8 | const ChangeLogSchema = new Schema>({
9 | editedBy: {
10 | type: 'object',
11 | value: { type: 'Buffer' },
12 | required: true,
13 | unique: false,
14 | index: true
15 | },
16 | operation: {
17 | type: Schema.Types.Mixed,
18 | enum: Object.values(OperationType),
19 | required: true
20 | },
21 | changes: [{ type: Schema.Types.Mixed }]
22 | }, { timestamps: { createdAt: true, updatedAt: false } })
23 |
24 | ChangeLogSchema.index({ createdAt: -1 })
25 | ChangeLogSchema.index({ 'changes.fullDocument.metadata.area_id': 1, 'changes.kind': 1 })
26 | ChangeLogSchema.index({ 'changes.kind': 1 })
27 |
28 | export const getChangeLogModel = (): mongoose.Model> => {
29 | return connection.model('change_logs', ChangeLogSchema)
30 | }
31 |
--------------------------------------------------------------------------------
/src/db/ChangeLogType.ts:
--------------------------------------------------------------------------------
1 | import mongose from 'mongoose'
2 | import { MUUID } from 'uuid-mongodb'
3 |
4 | import { OperationType as AreaOpType, AreaType } from './AreaTypes.js'
5 | import { ClimbEditOperationType, ClimbType } from './ClimbTypes.js'
6 | import { OperationType as OrganizationOpType, OrganizationType } from './OrganizationTypes.js'
7 |
8 | export type DBOperation = 'insert' | 'update' | 'delete'
9 | export enum DocumentKind {
10 | areas = 'areas',
11 | climbs = 'climbs',
12 | organizations = 'organizations'
13 | }
14 |
15 | export interface ChangeLogType {
16 | _id: mongose.Types.ObjectId
17 | editedBy: MUUID
18 | operation: OpType
19 | changes: Array>
20 | }
21 |
22 | // DIY since ResumeToken is defined as unknown in mongo TS
23 | export interface ResumeToken {
24 | _data: string
25 | }
26 |
27 | export interface UpdateDescription {
28 | updatedFields?: string[]
29 | removedFields?: string[]
30 | truncatedArrays?: any[]
31 | }
32 | export interface BaseChangeRecordType {
33 | _id: ResumeToken
34 | dbOp: DBOperation
35 | fullDocument: FullDocumentType
36 | updateDescription: UpdateDescription
37 | kind: DocumentKind
38 | }
39 |
40 | export type OpType = AreaOpType | ClimbEditOperationType | OrganizationOpType
41 |
42 | export interface ChangeRecordMetadataType {
43 | /** The UUID of the user to whom this change of the document is attributed */
44 | user: MUUID
45 | operation: OpType
46 | /**
47 | * We identify history entries in the audit trail by assigning it an ObjectID.
48 | **/
49 | historyId: mongose.Types.ObjectId
50 | prevHistoryId?: mongose.Types.ObjectId
51 | seq: number
52 | }
53 |
54 | export interface WithDiscriminator {
55 | kind: DocumentKind
56 | }
57 |
58 | export type AreaChangeLogType = ChangeLogType
59 | export type AreaChangeRecordType = BaseChangeRecordType
60 |
61 | export type ClimbChangeLogType = ChangeLogType
62 | export type OrganizationChangeLogType = ChangeLogType
63 |
64 | export type SupportedCollectionTypes =
65 | | AreaType & WithDiscriminator
66 | | ClimbType & WithDiscriminator
67 | | OrganizationType & WithDiscriminator
68 |
69 | export interface GetHistoryInputFilterType {
70 | uuidList: string[]
71 | userUuid: string
72 | fromDate: Date
73 | toDate: Date
74 | }
75 |
76 | export interface GetAreaHistoryInputFilterType {
77 | areaId: string
78 | }
79 |
80 | export interface GetOrganizationHistoryInputFilterType {
81 | orgId: MUUID
82 | }
83 |
--------------------------------------------------------------------------------
/src/db/ClimbHistorySchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import muuid from 'uuid-mongodb'
3 | import { ClimbSchema } from './ClimbSchema.js'
4 | import { AreaSchema } from './AreaSchema.js'
5 | import ClimbHistoryType, { AreaHistoryType } from './ClimbHistoryType.js'
6 | import ChangeEventType from './ChangeEventType.js'
7 | import { ClimbType } from './ClimbTypes'
8 | import { AreaType } from './AreaTypes'
9 |
10 | const { Schema } = mongoose
11 |
12 | const schemaOptions = {
13 | timestamps: { createdAt: 'true' },
14 | _id: false
15 | }
16 |
17 | const ClimbChangeEventSchema = new mongoose.Schema>({
18 | _id: {
19 | _data: Object
20 | },
21 | dbOp: String,
22 | fullDocument: ClimbSchema
23 | }, schemaOptions)
24 |
25 | ClimbChangeEventSchema.index({ _id: 1 }, { unique: true })
26 |
27 | const AreaChangeEventSchema = new mongoose.Schema>({
28 | _id: {
29 | _data: Object
30 | },
31 | dbOp: String,
32 | fullDocument: AreaSchema
33 | }, schemaOptions)
34 |
35 | AreaChangeEventSchema.index({ _id: 1 }, { unique: true })
36 |
37 | export const ClimbHistorySchema = new Schema({
38 | uid: {
39 | type: 'object',
40 | value: { type: 'Buffer' },
41 | default: () => muuid.v4()
42 | },
43 | actionType: {
44 | type: String
45 | },
46 | change: ClimbChangeEventSchema
47 | }, {
48 | _id: true,
49 | writeConcern: {
50 | w: 'majority',
51 | j: true,
52 | wtimeout: 5000
53 | }
54 | })
55 |
56 | export const AreaHistorySchema = new Schema({
57 | uid: {
58 | type: 'object',
59 | value: { type: 'Buffer' },
60 | default: () => muuid.v4()
61 | },
62 | actionType: {
63 | type: String
64 | },
65 | change: AreaChangeEventSchema
66 | }, {
67 | _id: true,
68 | writeConcern: {
69 | w: 'majority',
70 | j: true,
71 | wtimeout: 5000
72 | }
73 | })
74 |
75 | export const getClimbHistoryModel = (): mongoose.Model => {
76 | return mongoose.model('climb_history', ClimbHistorySchema)
77 | }
78 |
79 | export const getAreaHistoryModel = (): mongoose.Model => {
80 | return mongoose.model('area_history', AreaHistorySchema)
81 | }
82 |
--------------------------------------------------------------------------------
/src/db/ClimbHistoryType.ts:
--------------------------------------------------------------------------------
1 | import { Document } from 'mongodb'
2 | import { MUUID } from 'uuid-mongodb'
3 | import ChangeEventType from './ChangeEventType'
4 | import { ClimbType } from './ClimbTypes'
5 | import { AreaType } from './AreaTypes'
6 |
7 | export default interface ClimbHistoryType extends Document {
8 | uid: MUUID
9 | actionType: ActionType
10 | change: ChangeEventType
11 | }
12 |
13 | export interface AreaHistoryType extends Document {
14 | uid: MUUID
15 | actionType: ActionType
16 | change: ChangeEventType
17 | }
18 |
19 | export type ActionType = 'update' | 'add' | 'delete'
20 |
--------------------------------------------------------------------------------
/src/db/MediaObjectSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { MediaObject, EntityTag } from './MediaObjectTypes.js'
3 | import { PointSchema } from './ClimbSchema.js'
4 | import { MUUID } from 'uuid-mongodb'
5 |
6 | const { Schema } = mongoose
7 |
8 | const UUID_TYPE = {
9 | type: 'object', value: { type: 'Buffer' }
10 | }
11 |
12 | const muuidTransform = (v: MUUID): string => {
13 | return v.toUUID().toString()
14 | }
15 |
16 | const EntitySchema = new Schema({
17 | targetId: { ...UUID_TYPE, index: true, transform: muuidTransform },
18 | climbName: { type: Schema.Types.String },
19 | areaName: { type: Schema.Types.String, required: true },
20 | type: { type: Schema.Types.Number, required: true },
21 | ancestors: { type: Schema.Types.String, required: true, index: true },
22 | lnglat: {
23 | type: PointSchema,
24 | index: '2dsphere',
25 | required: false
26 | },
27 | topoData: { type: Schema.Types.Mixed }
28 | }, { _id: true, toObject: { versionKey: false } })
29 |
30 | const schema = new Schema({
31 | userUuid: { ...UUID_TYPE, index: true, transform: (v: any) => v.toUUID().toString() },
32 | mediaUrl: { type: Schema.Types.String, unique: true, index: true },
33 | width: { type: Schema.Types.Number, required: true },
34 | height: { type: Schema.Types.Number, required: true },
35 | size: { type: Schema.Types.Number, required: true },
36 | format: { type: Schema.Types.String, required: true },
37 | entityTags: [EntitySchema]
38 | }, { _id: true, timestamps: true, toJSON: { versionKey: false }, toObject: { versionKey: false } })
39 |
40 | /**
41 | * Additional indices
42 | */
43 | schema.index({
44 | /**
45 | * For filtering media objects with/without tags
46 | */
47 | entityTags: 1,
48 | /**
49 | * For sorting media objects by insertion order
50 | */
51 | createdAt: -1 // ascending, more recent first
52 | })
53 |
54 | /**
55 | * For checking media ownership
56 | */
57 | schema.index({
58 | _id: 1,
59 | userUuid: 1
60 | })
61 |
62 | /**
63 | * For removeEntityTag update query
64 | */
65 | schema.index({
66 | _id: 1,
67 | 'entityTag._id': 1
68 | })
69 |
70 | /**
71 | * For addEntityTag() update query to check whether an existing entity already exists.
72 | */
73 | schema.index({
74 | _id: 1,
75 | 'entityTag.targetId': 1
76 | })
77 |
78 | /**
79 | * Get media object model
80 | * @returns MediaObjectType
81 | */
82 | export const getMediaObjectModel = (): mongoose.Model => {
83 | return mongoose.model('media_objects', schema)
84 | }
85 |
--------------------------------------------------------------------------------
/src/db/MediaObjectTypes.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { MUUID } from 'uuid-mongodb'
3 | import { Point } from '@turf/helpers'
4 |
5 | export type ImageFormatType = 'jpeg' | 'png' | 'webp' | 'avif'
6 |
7 | export interface MediaObject {
8 | _id: mongoose.Types.ObjectId
9 | userUuid: MUUID
10 | mediaUrl: string
11 | width: number
12 | height: number
13 | format: ImageFormatType
14 | createdAt: Date
15 | size: number
16 | entityTags?: EntityTag[]
17 | }
18 |
19 | export interface EntityTag {
20 | _id: mongoose.Types.ObjectId
21 | targetId: MUUID
22 | type: number
23 | ancestors: string
24 | climbName?: string
25 | areaName: string
26 | lnglat?: Point
27 | topoData?: object
28 | }
29 |
30 | export interface MediaByUsers {
31 | username: string
32 | userUuid: MUUID
33 | mediaWithTags: MediaObject[]
34 | }
35 | export interface MediaForFeedInput {
36 | uuidStr?: string
37 | maxUsers?: number
38 | maxFiles?: number
39 | includesNoEntityTags?: boolean
40 | }
41 |
42 | export interface TagByUser {
43 | username?: string
44 | userUuid: MUUID
45 | total: number
46 | }
47 |
48 | export interface AllTimeTagStats {
49 | totalMediaWithTags: number
50 | byUsers: TagByUser[]
51 | }
52 | export interface TagsLeaderboardType {
53 | allTime: AllTimeTagStats
54 | }
55 |
56 | /**
57 | * For creating a new Media object doc
58 | */
59 | export type NewMediaObjectDoc = Omit
60 |
61 | /**
62 | * GQL input type for getting paginated media for an "Entity", which is either a user, an area, or a climb.
63 | * The userUuid is omitted from the Area and Climb versions of this type, which are defined below
64 | * as AreaMediaQueryInput and ClimbMediaQueryInput
65 | * @param maxFiles - the maximum number of media files to return
66 | * @param first - the number of media files to return
67 | * @param after - the cursor to start from
68 | */
69 | export interface EntityMediaGQLQueryInput {
70 | maxFiles?: number
71 | first?: number
72 | after?: string
73 | }
74 |
75 | export type UserMediaQueryInput = EntityMediaGQLQueryInput & {
76 | userUuid: MUUID
77 | }
78 |
79 | export type AreaMediaQueryInput = EntityMediaGQLQueryInput & {
80 | areaUuid: MUUID
81 | }
82 |
83 | export type ClimbMediaQueryInput = EntityMediaGQLQueryInput & {
84 | climbUuid: MUUID
85 | }
86 |
87 | /**
88 | * GQL user input type for remove tag mutation
89 | */
90 | export interface EntityTagDeleteGQLInput {
91 | mediaId: string
92 | tagId: string
93 | }
94 |
95 | /**
96 | * Formal input type for remove tag function
97 | */
98 | export interface EntityTagDeleteInput {
99 | mediaId: mongoose.Types.ObjectId
100 | tagId: mongoose.Types.ObjectId
101 | }
102 |
103 | /**
104 | * GQL user input type for add media mutation
105 | */
106 | export type MediaObjectGQLInput = Pick & {
107 | userUuid: string
108 | entityTag?: Omit
109 | }
110 |
111 | /**
112 | * GQL user input for addEntityTag mutation
113 | */
114 | export interface AddEntityTagGQLInput {
115 | mediaId: string
116 | entityId: string
117 | entityType: number
118 | topoData?: object
119 | }
120 |
121 | /**
122 | * Formal input type for addEntityTag function
123 | */
124 | export type AddTagEntityInput = Pick & {
125 | mediaId: mongoose.Types.ObjectId
126 | entityUuid: MUUID
127 | }
128 |
129 | export interface UserMedia {
130 | userUuid: string
131 | mediaConnection: {
132 | edges: MediaEdge[]
133 | pageInfo: {
134 | hasNextPage: boolean
135 | totalItems: number
136 | endCursor: string | null
137 | }
138 | }
139 | }
140 |
141 | export type AreaMedia = Omit & {
142 | areaUuid: string
143 | }
144 |
145 | export type ClimbMedia = Omit & {
146 | climbUuid: string
147 | }
148 |
149 | interface MediaEdge {
150 | node: MediaObject
151 | cursor: string
152 | }
153 |
154 | export interface DeleteMediaGQLInput {
155 | mediaId: string
156 | }
157 |
--------------------------------------------------------------------------------
/src/db/OrganizationSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import muuid from 'uuid-mongodb'
3 |
4 | import { OrganizationType, OrgType, IOrganizationContent, OperationType } from './OrganizationTypes.js'
5 | import { ChangeRecordMetadataType } from './ChangeLogType.js'
6 |
7 | const { Schema, connection } = mongoose
8 |
9 | const ChangeRecordMetadata = new Schema({
10 | user: {
11 | type: 'object',
12 | value: { type: 'Buffer' },
13 | required: true
14 | },
15 | historyId: { type: Schema.Types.ObjectId, ref: 'change_logs' },
16 | operation: {
17 | type: Schema.Types.Mixed,
18 | enum: Object.values(OperationType),
19 | required: true
20 | },
21 | seq: { type: Number, required: true, default: 0 }
22 | }, { _id: false, timestamps: false })
23 |
24 | const ContentSchema = new Schema({
25 | website: { type: Schema.Types.String },
26 | email: { type: Schema.Types.String },
27 | donationLink: { type: Schema.Types.String },
28 | instagramLink: { type: Schema.Types.String },
29 | facebookLink: { type: Schema.Types.String },
30 | hardwareReportLink: { type: Schema.Types.String },
31 | description: { type: Schema.Types.String }
32 | }, { _id: false })
33 |
34 | export const OrganizationSchema = new Schema({
35 | orgId: {
36 | type: 'object',
37 | value: { type: 'Buffer' },
38 | default: () => muuid.v4(),
39 | required: true,
40 | unique: true,
41 | index: true
42 | },
43 | displayName: { type: String, required: true, index: true },
44 | orgType: {
45 | type: Schema.Types.Mixed,
46 | enum: Object.values(OrgType),
47 | required: true
48 | },
49 | associatedAreaIds: [{ type: 'Buffer' }],
50 | excludedAreaIds: [{ type: 'Buffer' }],
51 | content: ContentSchema,
52 | _change: ChangeRecordMetadata,
53 | _deleting: { type: Date },
54 | updatedBy: {
55 | type: 'object',
56 | value: { type: 'Buffer' }
57 | },
58 | createdBy: {
59 | type: 'object',
60 | value: { type: 'Buffer' }
61 | }
62 | }, { timestamps: true })
63 |
64 | OrganizationSchema.index({ _deleting: 1 }, { expireAfterSeconds: 0 })
65 |
66 | export const createOrganizationModel = (name: string = 'organizations'): mongoose.Model => {
67 | return connection.model(name, OrganizationSchema)
68 | }
69 |
70 | export const getOrganizationModel = (name: string = 'organizations'): mongoose.Model =>
71 | connection.model(name, OrganizationSchema)
72 |
--------------------------------------------------------------------------------
/src/db/PostSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals'
3 | import muuid from 'uuid-mongodb'
4 | import { PostType } from './PostTypes.js'
5 | import { XMediaSchema } from './XMediaSchema.js'
6 |
7 | const { Schema } = mongoose
8 |
9 | const PostSchema = new Schema({
10 | userId: {
11 | type: 'object',
12 | value: { type: 'Buffer' },
13 | default: () => muuid.v4(),
14 | required: true,
15 | unique: false,
16 | index: true
17 | },
18 | xMedia: {
19 | type: [XMediaSchema],
20 | required: true
21 | },
22 | description: { type: String }
23 | }, {
24 | _id: true,
25 | strictQuery: 'throw',
26 | toObject: {
27 | virtuals: true
28 | },
29 | toJSON: { virtuals: true }
30 | })
31 |
32 | PostSchema.plugin(mongooseLeanVirtuals)
33 |
34 | export const getPostModel = (name: string = 'post'): mongoose.Model => {
35 | return mongoose.model(name, PostSchema)
36 | }
37 |
--------------------------------------------------------------------------------
/src/db/PostTypes.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { MUUID } from 'uuid-mongodb'
3 | import { XMediaType } from './XMediaTypes'
4 | export interface PostType {
5 | userId: MUUID
6 | xMedia: XMediaType[]
7 | description?: string
8 | }
9 |
10 | export interface AddPostInputType {
11 | photoUrls: string[]
12 | userId: string
13 | description?: string
14 | mediaType: number
15 | }
16 |
17 | export interface RemovePostInputType {
18 | postId: mongoose.Types.ObjectId
19 | }
20 |
21 | export interface GetPostsInputType {
22 | postIds: mongoose.Types.ObjectId[]
23 | }
24 |
--------------------------------------------------------------------------------
/src/db/TagSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals'
3 | import muuid from 'uuid-mongodb'
4 |
5 | import { TagType, RefModelType } from './TagTypes.js'
6 |
7 | const { Schema } = mongoose
8 |
9 | const TagSchema = new Schema({
10 | mediaUrl: {
11 | type: Schema.Types.String,
12 | required: true
13 | },
14 | mediaUuid: {
15 | type: 'object',
16 | value: { type: 'Buffer' },
17 | default: () => muuid.v4(),
18 | required: true,
19 | unique: false,
20 | index: true
21 | },
22 | destinationId: {
23 | type: Schema.Types.Mixed,
24 | value: { type: 'Buffer' },
25 | required: true,
26 | refPath: 'onModel'
27 | },
28 | destinationType: {
29 | type: Number,
30 | required: true
31 | },
32 | onModel: {
33 | type: String,
34 | required: true,
35 | enum: Object.values(RefModelType)
36 | }
37 | }, {
38 | _id: true,
39 | strictQuery: 'throw',
40 | toObject: {
41 | virtuals: true
42 | },
43 | toJSON: { virtuals: true }
44 | })
45 |
46 | TagSchema.virtual('climb', {
47 | ref: 'climbs',
48 | localField: 'destinationId',
49 | foreignField: '_id',
50 | justOne: true
51 | })
52 |
53 | TagSchema.virtual('area', {
54 | ref: 'areas',
55 | localField: 'destinationId',
56 | foreignField: 'metadata.area_id',
57 | justOne: true
58 | })
59 |
60 | TagSchema.plugin(mongooseLeanVirtuals)
61 | TagSchema.index({ mediaUuid: 1, destinationId: 1 }, { unique: true })
62 |
63 | export const getTagModel = (name: string = 'tags'): mongoose.Model => {
64 | return mongoose.model(name, TagSchema)
65 | }
66 |
--------------------------------------------------------------------------------
/src/db/TagTypes.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { MUUID } from 'uuid-mongodb'
3 |
4 | export interface TagType {
5 | _id?: mongoose.Types.ObjectId
6 | mediaUrl: string
7 | mediaUuid: MUUID
8 | destinationId: MUUID
9 | destinationType: number
10 | onModel: RefModelType
11 | }
12 |
13 | export enum RefModelType {
14 | climbs = 'climbs',
15 | areas = 'areas'
16 | }
17 |
18 | export interface RemoveTagInputType {
19 | tagId: mongoose.Types.ObjectId
20 | }
21 |
22 | export interface GetTagsInputType {
23 | tagIds: mongoose.Types.ObjectId[]
24 | }
25 |
--------------------------------------------------------------------------------
/src/db/TickSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { TickSource, TickType } from './TickTypes'
3 |
4 | const { Schema } = mongoose
5 |
6 | /**
7 | * Tick Schema
8 | *
9 | * The tick schema defines how ticks are stored and serialized in the mongo database.
10 | * see the TickTypes.ts file for the typescript interface that defines types as they
11 | * are used within the application. Getting documents from this schema should kick out
12 | * TickType objects.
13 | */
14 | export const TickSchema = new Schema({
15 | name: { type: Schema.Types.String, required: true, index: true },
16 | notes: { type: Schema.Types.String, required: false },
17 | climbId: { type: Schema.Types.String, required: true, index: true },
18 | userId: { type: Schema.Types.String, required: true, index: true },
19 | style: { type: Schema.Types.String, enum: ['Lead', 'Solo', 'TR', 'Follow', 'Aid', 'Boulder'], required: false },
20 | attemptType: { type: Schema.Types.String, enum: ['Onsight', 'Flash', 'Pinkpoint', 'Frenchfree', 'Redpoint', 'Send', 'Attempt', 'Repeat'], required: false, index: true },
21 | dateClimbed: { type: Schema.Types.Date },
22 | grade: { type: Schema.Types.String, required: false, index: true },
23 | // Bear in mind that these enum types must be kept in sync with the TickSource enum
24 | source: { type: Schema.Types.String, enum: ['MP', 'OB'] as TickSource[], required: true, index: true }
25 | })
26 |
27 | TickSchema.index({ userId: 1 }) // for ticksByUser()
28 | TickSchema.index({ userId: 1, climbId: 1 }) // for ticksByUserIdAndClimb()
29 |
30 | export const getTickModel = (name: string = 'ticks'): mongoose.Model => {
31 | return mongoose.model(name, TickSchema)
32 | }
33 |
--------------------------------------------------------------------------------
/src/db/UserSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import muuid from 'uuid-mongodb'
3 |
4 | import { ExperimentalUserType, User, UsernameInfo } from './UserTypes.js'
5 |
6 | const { Schema } = mongoose
7 |
8 | export const ExperimentalUserSchema = new Schema({
9 | _id: {
10 | type: 'object',
11 | value: { type: 'Buffer' },
12 | default: () => muuid.v4()
13 | },
14 | displayName: { type: Schema.Types.String, required: true, index: true },
15 | url: { type: Schema.Types.String, required: true, index: true }
16 | }, {
17 | _id: false,
18 | timestamps: true
19 | })
20 |
21 | /**
22 | * Temporary model used to capture user profile during bulk import of Canada data.
23 | * Use the standard User model instead.
24 | */
25 | export const getExperimentalUserModel = (): mongoose.Model => {
26 | return mongoose.model('exp_users', ExperimentalUserSchema)
27 | }
28 |
29 | const UsernameSchema = new Schema({
30 | username: { type: Schema.Types.String, required: true },
31 | canonicalName: { type: Schema.Types.String, required: true }
32 | }, {
33 | _id: false,
34 | timestamps: {
35 | updatedAt: true,
36 | createdAt: false
37 | }
38 | })
39 |
40 | export const UserSchema = new Schema({
41 | _id: {
42 | type: 'object',
43 | value: { type: 'Buffer' }
44 | },
45 | email: { type: Schema.Types.String },
46 | emailVerified: { type: Schema.Types.Boolean },
47 | displayName: { type: Schema.Types.String },
48 | bio: { type: Schema.Types.String },
49 | website: { type: Schema.Types.String },
50 | avatar: { type: Schema.Types.String },
51 | usernameInfo: { type: UsernameSchema, required: false },
52 | createdBy: {
53 | type: 'object',
54 | value: { type: 'Buffer' }
55 | },
56 | updatedBy: {
57 | type: 'object',
58 | value: { type: 'Buffer' }
59 | }
60 | }, {
61 | _id: false,
62 | timestamps: true
63 | })
64 |
65 | /**
66 | * For sorting by most recent
67 | */
68 | UserSchema.index({ createdAt: -1 })
69 | UserSchema.index({ 'usernameInfo.canonicalName': 1 }, { sparse: true, unique: true })
70 | UserSchema.index({ 'usernameInfo.username': 1 }, { sparse: true, unique: true })
71 |
72 | export const getUserModel = (): mongoose.Model => {
73 | return mongoose.model('users', UserSchema)
74 | }
75 |
--------------------------------------------------------------------------------
/src/db/UserTypes.ts:
--------------------------------------------------------------------------------
1 | import { MUUID } from 'uuid-mongodb'
2 | import { UserMedia } from './MediaObjectTypes.js'
3 | export interface ExperimentalUserType {
4 | _id: MUUID
5 | displayName: string
6 | nickname: string
7 | url: string
8 | createdAt: Date
9 | updatedAt: Date
10 | }
11 |
12 | export interface ExperimentalAuthorType {
13 | displayName: string
14 | url: string
15 | }
16 |
17 | export interface UsernameInfo {
18 | username: string
19 | canonicalName: string
20 | updatedAt: Date
21 | }
22 | export interface User {
23 | _id: MUUID
24 | email?: string
25 | emailVerified?: boolean
26 | displayName?: string
27 | usernameInfo?: UsernameInfo
28 | website?: string
29 | bio?: string
30 | avatar?: string
31 | createdAt: Date
32 | updatedAt: Date
33 | createdBy: MUUID
34 | updatedBy?: MUUID
35 | }
36 |
37 | export interface UpdateProfileGQLInput {
38 | username?: string
39 | userUuid: string
40 | displayName?: string
41 | bio?: string
42 | website?: string
43 | email?: string
44 | avatar?: string
45 | }
46 |
47 | export interface UsernameGQLInput {
48 | username: string
49 | }
50 |
51 | export interface UserIdGQLInput {
52 | userUuid: string
53 | }
54 |
55 | export interface GetUsernameReturn {
56 | _id: MUUID
57 | username: string
58 | updatedAt: Date
59 | }
60 |
61 | export type UserPublicProfile = Pick & {
62 | username: string
63 | }
64 |
65 | export interface UserPublicPage {
66 | profile: UserPublicProfile
67 | media: UserMedia
68 | }
69 |
--------------------------------------------------------------------------------
/src/db/XMediaSchema.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals'
3 | import muuid from 'uuid-mongodb'
4 |
5 | import { XMediaType } from './XMediaTypes.js'
6 |
7 | const { Schema } = mongoose
8 |
9 | export const XMediaSchema = new Schema({
10 | userId: {
11 | type: 'object',
12 | value: { type: 'Buffer' },
13 | default: () => muuid.v4(),
14 | required: true,
15 | unique: false,
16 | index: true
17 | },
18 | mediaType: {
19 | type: Schema.Types.Number,
20 | required: true
21 | },
22 | mediaUrl: {
23 | type: Schema.Types.String,
24 | required: true
25 | },
26 | tagIds: {
27 | type: [Schema.Types.ObjectId],
28 | required: true
29 | }
30 | }, {
31 | _id: true,
32 | strictQuery: 'throw',
33 | toObject: {
34 | virtuals: true
35 | },
36 | toJSON: { virtuals: true }
37 | })
38 |
39 | XMediaSchema.plugin(mongooseLeanVirtuals)
40 |
41 | /**
42 | * @deprecated Superceded by MediaObjects
43 | * @param name
44 | * @returns
45 | */
46 | export const getXMediaModel = (name: string = 'xmedia'): mongoose.Model => {
47 | return mongoose.model(name, XMediaSchema)
48 | }
49 |
--------------------------------------------------------------------------------
/src/db/XMediaTypes.ts:
--------------------------------------------------------------------------------
1 | import mongoose from 'mongoose'
2 | import { MUUID } from 'uuid-mongodb'
3 |
4 | export interface XMediaType {
5 | _id?: mongoose.Types.ObjectId
6 | userId: MUUID
7 | mediaType: number // 0: photo
8 | mediaUrl: string
9 | tagIds?: mongoose.Types.ObjectId[]
10 | }
11 |
12 | export interface RemoveXMediaInputType {
13 | xMediaId: mongoose.Types.ObjectId
14 | }
15 |
16 | export interface GetXMediaInputType {
17 | xMediaIds: mongoose.Types.ObjectId[]
18 | }
19 |
--------------------------------------------------------------------------------
/src/db/export/Typesense/Client.ts:
--------------------------------------------------------------------------------
1 | import Typesense, { Client } from 'typesense'
2 |
3 | import { areaSchema, climbSchema } from './TypesenseSchemas.js'
4 | import { mongoAreaToTypeSense, mongoClimbToTypeSense } from './transformers.js'
5 | import { logger } from '../../../logger.js'
6 | import { AreaType } from '../../AreaTypes.js'
7 | import { DBOperation } from '../../ChangeLogType.js'
8 | import Config from '../../../Config.js'
9 | import { ClimbExtType, ClimbType } from '../../ClimbTypes.js'
10 | import MutableAreaDataSource from '../../../model/MutableAreaDataSource.js'
11 |
12 | /**
13 | * Return a Typesense client.
14 | * See https://typesense.org/docs/0.23.1/api/
15 | * @returns Typesense Client object
16 | */
17 | export default function typesense (): Client | undefined {
18 | const client = new Typesense.Client({
19 | nodes: [
20 | {
21 | host: Config.TYPESENSE_NODE,
22 | port: 443,
23 | protocol: 'https'
24 | }
25 | ],
26 | apiKey: Config.TYPESENSE_API_KEY_RW,
27 | numRetries: 3, // A total of 4 tries (1 original try + 3 retries)
28 | connectionTimeoutSeconds: 120, // Set a longer timeout for large imports
29 | logLevel: 'info'
30 | })
31 | return client
32 | }
33 |
34 | /**
35 | * Update/remove a record in Area index
36 | * @param area
37 | * @param op
38 | */
39 | export const updateAreaIndex = async (area: AreaType | null, op: DBOperation): Promise => {
40 | if (area == null) return
41 | try {
42 | if (Config.DEPLOYMENT_ENV !== 'production') {
43 | return
44 | }
45 | switch (op) {
46 | case 'insert':
47 | case 'update':
48 | await typesense()?.collections(areaSchema.name).documents().upsert(mongoAreaToTypeSense(area))
49 | break
50 | case 'delete':
51 | await typesense()?.collections(areaSchema.name).documents(area.metadata.area_id.toUUID().toString()).delete()
52 | break
53 | }
54 | } catch (e) {
55 | logger.error({ exception: e.toString() }, 'Can\'t update Typesense Area index: ' + area.area_name)
56 | }
57 | }
58 |
59 | /**
60 | * Update/remove a record in Climb index
61 | * @param area
62 | * @param op
63 | */
64 | export const updateClimbIndex = async (climb: ClimbType | null, op: DBOperation): Promise => {
65 | if (climb == null) return
66 | try {
67 | if (Config.DEPLOYMENT_ENV !== 'production') {
68 | return
69 | }
70 |
71 | // Look up additional attrs required by Climb index in Typesense.
72 | const { pathTokens, ancestors } = await MutableAreaDataSource.getInstance().findOneAreaByUUID(climb.metadata.areaRef)
73 |
74 | const climbExt: ClimbExtType = {
75 | ...climb,
76 | pathTokens,
77 | ancestors
78 | }
79 |
80 | switch (op) {
81 | case 'insert':
82 | case 'update':
83 | await typesense()?.collections(climbSchema.name).documents().upsert(mongoClimbToTypeSense(climbExt))
84 | break
85 | case 'delete':
86 | await typesense()?.collections(climbSchema.name).documents(climb._id.toUUID().toString()).delete()
87 | break
88 | }
89 | } catch (e) {
90 | logger.error({ exception: e.toString() }, 'Can\'t update Typesense Climb index: ' + climb.name)
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/src/db/export/Typesense/Typesense.ts:
--------------------------------------------------------------------------------
1 | import { Client } from 'typesense'
2 |
3 | import typesenseClient from './Client.js'
4 | import { connectDB, gracefulExit } from '../../index.js'
5 | import { ClimbExtType } from '../../ClimbTypes.js'
6 | import { logger } from '../../../logger.js'
7 | import { areaSchema, AreaTypeSenseItem, climbSchema, ClimbTypeSenseItem } from './TypesenseSchemas.js'
8 | import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections.js'
9 | import { AreaType } from '../../AreaTypes.js'
10 | import { mongoAreaToTypeSense, mongoClimbToTypeSense } from './transformers.js'
11 | import { processMongoCollection } from '../common/index.js'
12 | import { getAllAreas, getAllClimbs } from '../queries/index.js'
13 |
14 | /**
15 | * For a given collection that might exist in typesense, drop it (if it exists)
16 | * and then create it again with the set schema.
17 | * This keeps schema up to date, and pre-empts duplicates.
18 | */
19 | async function checkCollection (
20 | client: Client,
21 | schema: CollectionCreateSchema
22 | ): Promise {
23 | try {
24 | // Delete if the collection already exists from a previous run
25 | await client.collections(schema.name).delete()
26 | logger.info(`dropped ${schema.name} collection from typesense`)
27 | } catch (error) {
28 | logger.error(error)
29 | }
30 |
31 | // Create a collection matching the specified schema
32 | try {
33 | await client.collections().create(schema)
34 | logger.info(`created ${schema.name} typesense collection`)
35 | } catch (error) {
36 | logger.error(error)
37 | await gracefulExit()
38 | }
39 | }
40 |
41 | async function uploadChunk (client: Client, schema: CollectionCreateSchema, chunk: Object[]): Promise {
42 | // Chunk entries may not exceed chunkSize
43 | if (chunk.length === 0) return
44 |
45 | try {
46 | logger.info(`pushing ${chunk.length} documents to typesense`)
47 | // This is safe enough. If anyone's gonna pass a non-object type then
48 | // they haven't been paying attention
49 | await client.collections(schema.name).documents().import(chunk, { action: 'upsert' })
50 | } catch (e) {
51 | logger.error(e)
52 | }
53 | }
54 |
55 | async function updateClimbTypesense (client: Client): Promise {
56 | await processMongoCollection({
57 | preProcess: async () => await checkCollection(client, climbSchema),
58 | converter: mongoClimbToTypeSense,
59 | dataGenerator: getAllClimbs,
60 | processChunk: async (chunk) => await uploadChunk(client, climbSchema, chunk)
61 | })
62 | }
63 |
64 | async function updateAreaTypesense (client: Client): Promise {
65 | await processMongoCollection({
66 | preProcess: async () => await checkCollection(client, areaSchema),
67 | converter: mongoAreaToTypeSense,
68 | dataGenerator: getAllAreas,
69 | processChunk: async (chunk) => await uploadChunk(client, areaSchema, chunk)
70 | })
71 | }
72 |
73 | async function onDBConnected (): Promise {
74 | const node = process.env.TYPESENSE_NODE ?? ''
75 | const apiKey = process.env.TYPESENSE_API_KEY_RW ?? ''
76 |
77 | if (node === '' || apiKey === '') {
78 | logger.error('Missing env keys')
79 | await gracefulExit(1)
80 | }
81 |
82 | const typesense = typesenseClient()
83 | if (typesense == null) {
84 | process.exit(1)
85 | }
86 |
87 | logger.info('Start pushing data to TypeSense')
88 |
89 | if (process.argv.includes('--climbs')) {
90 | // Update climb data in typesense
91 | await updateClimbTypesense(typesense)
92 | logger.info('Climbs pushed to typesense')
93 | }
94 |
95 | if (process.argv.includes('--areas')) {
96 | // Update area data in typesense
97 | await updateAreaTypesense(typesense)
98 | logger.info('areas pushed to typesense')
99 | }
100 |
101 | await gracefulExit()
102 | }
103 |
104 | void connectDB(onDBConnected)
105 |
--------------------------------------------------------------------------------
/src/db/export/Typesense/TypesenseSchemas.ts:
--------------------------------------------------------------------------------
1 | import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections'
2 |
3 | export interface ClimbTypeSenseItem {
4 | climbUUID: string
5 | climbName: string
6 | climbDesc: string
7 | fa: string
8 | areaNames: string[]
9 | disciplines: string[]
10 | grade?: string // Todo: switch to grade context
11 | safety: string
12 | cragLatLng?: [number, number]
13 | }
14 |
15 | /**
16 | * Typesense schema for climbs, includes most data that someone might search with,
17 | * as well as some metadata to help trim the set based on context
18 | */
19 | export const climbSchema: CollectionCreateSchema = {
20 | name: 'climbs',
21 | fields: [
22 | {
23 | name: 'climbName',
24 | type: 'string' as const,
25 | facet: false
26 | },
27 | {
28 | name: 'climbDesc',
29 | type: 'string' as const,
30 | facet: false
31 | },
32 | {
33 | name: 'fa',
34 | type: 'string' as const,
35 | facet: false
36 | },
37 | {
38 | name: 'disciplines',
39 | type: 'string[]' as const,
40 | facet: true
41 | },
42 | {
43 | name: 'areaNames',
44 | type: 'string[]' as const,
45 | facet: false
46 | },
47 | {
48 | name: 'climbUUID',
49 | type: 'string' as const,
50 | index: false,
51 | optional: true
52 | },
53 | {
54 | name: 'grade',
55 | type: 'string' as const,
56 | index: false,
57 | optional: true
58 | },
59 | {
60 | name: 'safety',
61 | type: 'string' as const,
62 | index: false,
63 | optional: true
64 | },
65 | {
66 | name: 'cragLatLng',
67 | type: 'geopoint' as const,
68 | index: true
69 | }
70 | ],
71 | token_separators: ['(', ')', '-', '.']
72 | // TBD: need to have better tie-breakers (star/popularity ratings)
73 | // default_sorting_field: 'climb_name'
74 | }
75 |
76 | export interface AreaTypeSenseItem {
77 | id: string
78 | name: string
79 | pathTokens: string[]
80 | areaUUID: string
81 | areaLatLng?: [number, number]
82 | leaf: boolean
83 | isDestination: boolean
84 | totalClimbs: number
85 | density: number
86 | }
87 |
88 | /**
89 | * Typesense schema for areas. Areas are slightly easier to
90 | */
91 | export const areaSchema: CollectionCreateSchema = {
92 | name: 'areas',
93 | fields: [
94 | {
95 | name: 'name',
96 | type: 'string' as const,
97 | facet: false
98 | },
99 | {
100 | // Ancestor area names of this area
101 | name: 'pathTokens',
102 | type: 'string[]' as const,
103 | facet: false
104 | },
105 | {
106 | name: 'areaUUID',
107 | type: 'string' as const,
108 | index: false,
109 | optional: true
110 | },
111 | {
112 | name: 'totalClimbs',
113 | type: 'int32' as const,
114 | facet: false
115 | },
116 | {
117 | name: 'density',
118 | type: 'float' as const,
119 | facet: false
120 | },
121 | {
122 | name: 'isDestination',
123 | type: 'bool' as const,
124 | index: true
125 | },
126 | {
127 | name: 'leaf',
128 | type: 'bool' as const,
129 | index: true
130 | },
131 | {
132 | name: 'areaLatLng',
133 | type: 'geopoint' as const,
134 | index: true
135 | }
136 | ],
137 | token_separators: ['(', ')', '-', '.']
138 | }
139 |
--------------------------------------------------------------------------------
/src/db/export/Typesense/Utils.ts:
--------------------------------------------------------------------------------
1 | import { Point } from '@turf/helpers'
2 |
3 | import { DisciplineType } from '../../ClimbTypes'
4 |
5 | export interface IFlatClimbTypes {
6 | typeSport: boolean
7 | typeTrad: boolean
8 | typeTR: boolean
9 | typeBouldering: boolean
10 | typeDeepWaterSolo: boolean
11 | typeMixed: boolean
12 | typeAlpine: boolean
13 | typeSnow: boolean
14 | typeIce: boolean
15 | typeAid: boolean
16 | }
17 |
18 | export const flattenDisciplines = (type: DisciplineType): IFlatClimbTypes => {
19 | return {
20 | typeSport: type?.sport ?? false,
21 | typeTrad: type?.trad ?? false,
22 | typeTR: type?.tr ?? false,
23 | typeBouldering: type?.bouldering ?? false,
24 | typeDeepWaterSolo: type?.deepwatersolo ?? false,
25 | typeMixed: type?.mixed ?? false,
26 | typeAlpine: type?.alpine ?? false,
27 | typeSnow: type?.snow ?? false,
28 | typeIce: type?.ice ?? false,
29 | typeAid: type?.aid ?? false
30 | }
31 | }
32 |
33 | export const disciplinesToArray = (type: DisciplineType): any => {
34 | const z: string[] = []
35 | for (const property in type) {
36 | if (type[property] as boolean) {
37 | z.push(property)
38 | }
39 | }
40 | return z
41 | }
42 |
43 | /**
44 | * Convert mongo db geo point type to [lat,lng] for typesense geo search
45 | * @param geoPoint
46 | * @returns
47 | */
48 | export const geoToLatLng = (geoPoint?: Point): [number, number] | undefined => {
49 | if (geoPoint == null) {
50 | return undefined
51 | }
52 | const { coordinates } = geoPoint
53 | return [coordinates[1], coordinates[0]]
54 | }
55 |
--------------------------------------------------------------------------------
/src/db/export/Typesense/transformers.ts:
--------------------------------------------------------------------------------
1 | import { AreaTypeSenseItem, ClimbTypeSenseItem } from './TypesenseSchemas.js'
2 | import { AreaType } from '../../AreaTypes.js'
3 | import { disciplinesToArray, geoToLatLng } from './Utils.js'
4 | import { ClimbExtType, SafetyType } from '../../ClimbTypes.js'
5 |
6 | /**
7 | * Convert an Area object to a Typesense object
8 | * @param doc AreaType
9 | */
10 |
11 | export function mongoAreaToTypeSense (doc: AreaType): AreaTypeSenseItem {
12 | return {
13 | id: doc.metadata.area_id.toUUID().toString(),
14 | areaUUID: doc.metadata.area_id.toUUID().toString(),
15 | name: doc.area_name ?? '',
16 | pathTokens: doc.pathTokens,
17 | areaLatLng: geoToLatLng(doc.metadata.lnglat),
18 | leaf: doc.metadata.leaf,
19 | isDestination: doc.metadata.isDestination,
20 | totalClimbs: doc.totalClimbs,
21 | density: doc.density
22 | }
23 | }
24 |
25 | /**
26 | * Convert a Climb object to a Typesense object
27 | * @param doc Climb type
28 | */
29 | export function mongoClimbToTypeSense (doc: ClimbExtType): ClimbTypeSenseItem {
30 | return {
31 | climbUUID: doc._id.toUUID().toString(),
32 | climbName: doc.name,
33 | climbDesc: doc.content?.description ?? '',
34 | fa: doc.fa ?? '',
35 | areaNames: doc.pathTokens,
36 | disciplines: disciplinesToArray(doc.type),
37 | grade: doc?.yds ?? '',
38 | safety: doc?.safety ?? SafetyType.UNSPECIFIED.toString(),
39 | cragLatLng: geoToLatLng(doc.metadata.lnglat)
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/db/export/common/index.ts:
--------------------------------------------------------------------------------
1 | export {
2 | MongoCollectionProcessorOptions,
3 | processMongoCollection
4 | } from './processor.js'
5 |
--------------------------------------------------------------------------------
/src/db/export/common/processor.ts:
--------------------------------------------------------------------------------
1 | export type Processor = (data: T[], chunkCount: number) => Promise
2 |
3 | export interface MongoCollectionProcessorOptions {
4 | /**
5 | * A callback that is called before the data is processed.
6 | * Use it to do any setup that needs to be done before the data is processed, e.g.
7 | * creating a new Typesense collection, or deleting an existing one.
8 | */
9 | preProcess?: () => Promise
10 | /**
11 | * A converter function that converts the data from the source format to the
12 | * target format.
13 | * It is called per chunk, so it should be fast.
14 | * @param data The data to convert
15 | * @returns The converted data
16 | */
17 | converter: (data: SourceDataType) => ChunkType
18 | /**
19 | * A generator function that yields chunks of data.
20 | * Common queries can be found in src/db/export/queries/
21 | * @returns A generator that yields chunks of data
22 | */
23 | dataGenerator: () => AsyncGenerator
24 | /**
25 | * A function that is called for every batch of data
26 | * after is has been converted.
27 | * Use it to upload the data to some external service or database.
28 | * @param chunk the chunk of data to process
29 | */
30 | processChunk: Processor
31 | }
32 |
33 | /**
34 | * Uses the provided data generator, converters and processors to process
35 | * data from the database and upload it to an external service provided by the processor.
36 | *
37 | * ChunkType just needs to be any Object type that conforms to whatever
38 | * schema this method is supposed to be satisfying.
39 | */
40 | export async function processMongoCollection (
41 | options: MongoCollectionProcessorOptions
42 | ): Promise {
43 | // start by completely refreshing this collection. (Delete and stand back up)
44 | await options.preProcess?.()
45 |
46 | let chunkCount = 0
47 | for await (const chunk of options.dataGenerator()) {
48 | // upload the chunk as an array of translated objects
49 | await options.processChunk(chunk.map(options.converter), chunkCount++)
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/db/export/json/area.resolver.test.ts:
--------------------------------------------------------------------------------
1 | import { resolveAreaFileName, resolveAreaSubPath } from './area.resolver'
2 | import path from 'path'
3 |
4 | describe('area resolvers', () => {
5 | describe('area name resolver', () => {
6 | const testCases = [
7 | { name: 'should trim whitespace', input: ' test ', expected: 'test' },
8 | { name: 'should lowercase', input: 'TEST', expected: 'test' },
9 | { name: 'should replace spaces with underscores', input: 'test test', expected: 'test_test' },
10 | { name: 'should replace special characters', input: 'test!@#$%^&*()_+{}:"<>?[]\';,./', expected: 'test' },
11 | {
12 | name: 'should replace multiple spaces with single underscore',
13 | input: ' test test ',
14 | expected: 'test_test'
15 | },
16 | { name: 'should return unknown for undefined', input: undefined, expected: 'unknown' },
17 | { name: 'should return unknown for empty string', input: '', expected: 'unknown' },
18 | { name: 'should return unknown for whitespace', input: ' ', expected: 'unknown' },
19 | {
20 | name: 'acceptance test',
21 | input: '(Home Crag) Boulders a.k.a. Sherriff Boulders 12',
22 | expected: 'home_crag_boulders_aka_sherriff_boulders_12'
23 | }
24 | ]
25 |
26 | function assertNameResolver (areaName: string | undefined, expected: string) {
27 | expect(resolveAreaFileName({ area_name: areaName })).toBe(expected)
28 | }
29 |
30 | testCases.forEach(testCase => {
31 | it(testCase.name, () => {
32 | assertNameResolver(testCase.input, testCase.expected)
33 | })
34 | })
35 | })
36 |
37 | describe('area sub path resolver', () => {
38 | const testCases = [
39 | { name: 'should return current path for empty array', input: [], expected: '.' },
40 | { name: 'should return path for single element', input: ['test'], expected: 'test' },
41 | { name: 'should return path for multiple elements', input: ['test', 'test2'], expected: path.join('test', 'test2') },
42 | { name: 'should ignore slashes in names', input: ['test/', 'test2\\'], expected: path.join('test', 'test2') }
43 | ]
44 |
45 | function assertSubPathResolver (path: string[], expected: string) {
46 | expect(resolveAreaSubPath({ pathTokens: path })).toBe(expected)
47 | }
48 |
49 | testCases.forEach(testCase => {
50 | it(testCase.name, () => {
51 | assertSubPathResolver(testCase.input, testCase.expected)
52 | })
53 | })
54 | })
55 | })
56 |
--------------------------------------------------------------------------------
/src/db/export/json/area.resolver.ts:
--------------------------------------------------------------------------------
1 | import { AreaType } from '../../AreaTypes.js'
2 | import path from 'path'
3 |
4 | export function resolveAreaFileName (area: Partial): string {
5 | const name = normalizeName(area.area_name)
6 | if (name === undefined || name === '') { return 'unknown' } else { return name }
7 | }
8 |
9 | export function resolveAreaSubPath (area: Partial): string {
10 | const paths: string[] = area.pathTokens?.map(normalizeName)
11 | .map(token => token ?? '')
12 | .filter(token => token !== '') ?? []
13 | return path.join(...paths)
14 | }
15 |
16 | function normalizeName (name?: string): string | undefined {
17 | return name?.trim()
18 | .toLowerCase()
19 | .replace(/[^a-zA-Z0-9 -]/g, '')
20 | .replace(/\s\s+/g, ' ')
21 | .replace(/ /g, '_')
22 | }
23 |
--------------------------------------------------------------------------------
/src/db/export/json/async-file.processor.test.ts:
--------------------------------------------------------------------------------
1 | import { asyncFileProcessor, Writer } from './async-file.processor'
2 | import path from 'path'
3 |
4 | interface TestType { name: string, path?: string[] }
5 |
6 | describe('file processor', () => {
7 | const writer = jest.fn(async (_data, _path) => await Promise.resolve())
8 | const testData: TestType[] = [{ name: 'test', path: ['one', 'two'] }, { name: 'test2' }]
9 | const testPath = 'testPath'
10 |
11 | function assertWriterCalledFor (data: TestType) {
12 | expect(writer).toHaveBeenCalledWith(JSON.stringify(data), path.resolve(testPath, ...data.path ?? '', `${data.name}.json`))
13 | }
14 |
15 | function createProcessor (w: Writer = writer) {
16 | return asyncFileProcessor({
17 | basePath: testPath,
18 | fileNameResolver: (data: TestType) => data.name,
19 | subPathResolver: (data: TestType) => data.path?.join(path.sep) ?? '',
20 | writer: w
21 | })
22 | }
23 |
24 | function withFailedWriteOn (failingData: { name: string }) {
25 | return async (data, path) => {
26 | if (data === JSON.stringify(failingData)) {
27 | return await Promise.reject('error')
28 | }
29 | return await writer(data, path)
30 | }
31 | }
32 |
33 | it('should write the correct data to a file', async () => {
34 | const processor = createProcessor()
35 |
36 | await processor(testData, 2)
37 |
38 | assertWriterCalledFor(testData[0])
39 | assertWriterCalledFor(testData[1])
40 | })
41 |
42 | it('should continue batch processing on error', async () => {
43 | const processor = createProcessor(withFailedWriteOn(testData[0]))
44 |
45 | await expect(processor(testData, 0)).rejects.toContain('Failed to write 1/2 files')
46 |
47 | assertWriterCalledFor(testData[1])
48 | })
49 | })
50 |
--------------------------------------------------------------------------------
/src/db/export/json/async-file.processor.ts:
--------------------------------------------------------------------------------
1 | import { promises } from 'fs'
2 | import { Processor } from '../common/processor.js'
3 | import path, { dirname } from 'path'
4 | import { logger } from '../../../logger.js'
5 |
6 | export type Writer = (data: string, path: string) => Promise
7 | export type PathResolver = (data: T) => string
8 |
9 | export interface FileProcessorOptions {
10 | basePath: string
11 | subPathResolver?: PathResolver
12 | fileNameResolver: PathResolver
13 | writer?: Writer
14 | }
15 |
16 | export function asyncFileProcessor ({
17 | writer = async (data, path) => await promises.writeFile(path, data, 'utf-8'),
18 | ...options
19 | }: FileProcessorOptions): Processor {
20 | return async (data: T[]): Promise => {
21 | return await Promise.allSettled(data.map(async (item) => {
22 | const filePath = resolveFilePath(item, options)
23 | logger.info(`saving to file ${filePath}`)
24 | await promises.mkdir(dirname(filePath), { recursive: true })
25 | return await writer(JSON.stringify(item), filePath)
26 | })).then(async results => {
27 | const errorCount = results.filter(result => result.status === 'rejected').length
28 | const errors = joinErrors(results, data, options)
29 |
30 | if (errorCount > 0) { throw new Error(`Failed to write ${errorCount}/${results.length} files: ${errors}`) } else { return await Promise.resolve() }
31 | })
32 | }
33 | }
34 |
35 | function resolveFilePath (item: T, {
36 | basePath,
37 | fileNameResolver,
38 | subPathResolver
39 | }: { basePath: string, fileNameResolver: PathResolver, subPathResolver?: PathResolver }): string {
40 | if (subPathResolver != null) {
41 | basePath = path.join(basePath, subPathResolver(item))
42 | }
43 | return path.resolve(basePath, `${fileNameResolver(item)}.json`)
44 | }
45 |
46 | function joinErrors (results: Array>>>, data: T[], options: Omit, 'writer'>): string {
47 | return results.map(extractError(data, options))
48 | .filter(error => error !== undefined)
49 | .join(', ')
50 | }
51 |
52 | function extractError (data: T[], options: Omit, 'writer'>) {
53 | return (result: PromiseSettledResult, index: number) => {
54 | if (result.status === 'rejected') { return `${resolveFilePath(data[index], options)} (${result.reason as string})` } else { return undefined }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/db/export/json/index.ts:
--------------------------------------------------------------------------------
1 | import { connectDB, gracefulExit } from '../../index.js'
2 | import { logger } from '../../../logger.js'
3 | import { processMongoCollection, Processor } from '../common/processor.js'
4 | import { getAllAreas } from '../queries/get-all-areas.js'
5 | import { AreaType } from '../../AreaTypes.js'
6 |
7 | import { asyncFileProcessor } from './async-file.processor.js'
8 | import { fileURLToPath } from 'url'
9 | import path, { dirname } from 'path'
10 | import fs from 'fs'
11 | import { resolveAreaFileName, resolveAreaSubPath } from './area.resolver.js'
12 |
13 | const filename = fileURLToPath(import.meta.url)
14 | const workingDirectory = dirname(filename)
15 |
16 | export interface JsonExportOptions {
17 | /**
18 | * A function that processes an outputted chunk of data and writes it somewhere.
19 | * @param data the data emitted from the database
20 | */
21 | processor: Processor