├── .all-contributorsrc ├── .dockerignore ├── .env ├── .github ├── FUNDING.yml └── workflows │ └── nodejs.yml ├── .gitignore ├── .gitlab-ci.yml ├── .husky └── pre-commit ├── .run └── Template Jest.run.xml ├── .vscode ├── launch.json └── settings.json ├── Dockerfile ├── LICENSE ├── README.md ├── db-migrations ├── 0001-area-sorting.js ├── 0002-create-history-indexes.js ├── 0003-date-climbed-to-date.js ├── 0004-unique-user-climb-date-style.js ├── 0005-area-sorting.js ├── 0006-capitalize-tick-style-and-attempt-type.js ├── 0007-tick-style-nullification.js ├── 0008-fix-swapped-tick-style.js └── 0009-fix-some-bouldering-ticks.js ├── docker-compose.override.yml ├── docker-compose.yml ├── documentation ├── layers.md ├── testing.md └── tick_logic.md ├── export.sh ├── hacks └── countries │ ├── tmp-list.json │ └── transform.cjs ├── jest.config.cjs ├── keyfile ├── kubernetes └── export-cronjob.yml ├── migrate-db.sh ├── mongo-clean.sh ├── mongo_setup.sh ├── package.json ├── refresh-db.sh ├── scripts └── upload-tiles.sh ├── seed-db.sh ├── src ├── Config.ts ├── GradeUtils.ts ├── __tests__ │ ├── areas.ts │ ├── bulkImport.test.ts │ ├── gradeUtils.ts │ ├── history.ts │ ├── import-example.json │ ├── media.e2e.ts │ ├── organizations.ts │ ├── ticks.ts │ └── user.e2e.ts ├── auth │ ├── index.ts │ ├── local-dev │ │ ├── middleware.ts │ │ └── permissions.ts │ ├── middleware.ts │ ├── permissions.ts │ ├── rules.ts │ └── util.ts ├── data │ └── countries-with-lnglat.json ├── db │ ├── AreaSchema.ts │ ├── AreaTypes.ts │ ├── BulkImportTypes.ts │ ├── ChangeEventType.ts │ ├── ChangeLogSchema.ts │ ├── ChangeLogType.ts │ ├── ClimbHistorySchema.ts │ ├── ClimbHistoryType.ts │ ├── ClimbSchema.ts │ ├── ClimbTypes.ts │ ├── MediaObjectSchema.ts │ ├── MediaObjectTypes.ts │ ├── OrganizationSchema.ts │ ├── OrganizationTypes.ts │ ├── PostSchema.ts │ ├── PostTypes.ts │ ├── TagSchema.ts │ ├── TagTypes.ts │ ├── TickSchema.ts │ ├── TickTypes.ts │ ├── UserSchema.ts │ ├── UserTypes.ts │ ├── XMediaSchema.ts │ ├── XMediaTypes.ts │ ├── edit │ │ └── streamListener.ts │ ├── export │ │ ├── Typesense │ │ │ ├── Client.ts │ │ │ ├── Typesense.ts │ │ │ ├── TypesenseSchemas.ts │ │ │ ├── Utils.ts │ │ │ └── transformers.ts │ │ ├── common │ │ │ ├── index.ts │ │ │ └── processor.ts │ │ ├── json │ │ │ ├── area.resolver.test.ts │ │ │ ├── area.resolver.ts │ │ │ ├── async-file.processor.test.ts │ │ │ ├── async-file.processor.ts │ │ │ └── index.ts │ │ └── queries │ │ │ ├── defaults.ts │ │ │ ├── get-all-areas.ts │ │ │ ├── get-all-climbs.ts │ │ │ └── index.ts │ ├── import │ │ ├── ClimbTransformer.ts │ │ ├── __tests__ │ │ │ └── climb-data.json │ │ └── usa │ │ │ ├── AreaTransformer.ts │ │ │ ├── AreaTree.ts │ │ │ ├── LinkClimbsWithCrags.ts │ │ │ ├── SeedState.ts │ │ │ ├── USADay0Seed.ts │ │ │ ├── __tests__ │ │ │ ├── Tree.test.ts │ │ │ └── Utils.test.ts │ │ │ └── us-states.ts │ ├── index.ts │ └── utils │ │ ├── Aggregate.ts │ │ ├── __tests__ │ │ └── Aggregate.test.ts │ │ ├── index.ts │ │ └── jobs │ │ ├── AddCountriesJob.ts │ │ ├── CragUpdater.ts │ │ ├── MapTiles │ │ ├── exportCmd.ts │ │ └── init.ts │ │ ├── TreeUpdaters │ │ └── updateAllAreas.ts │ │ ├── UpdateStatsJob.ts │ │ └── migration │ │ ├── CreateMediaMetaCollection.ts │ │ ├── CreateUsersCollection.ts │ │ └── SirvClient.ts ├── geo-utils.ts ├── graphql │ ├── area │ │ ├── AreaMutations.ts │ │ ├── AreaQueries.ts │ │ └── index.ts │ ├── climb │ │ ├── ClimbMutations.ts │ │ └── index.ts │ ├── common │ │ ├── DateScalar.ts │ │ ├── MuuidScalar.ts │ │ ├── index.ts │ │ ├── resolvers.ts │ │ └── typeDef.ts │ ├── history │ │ ├── HistoryFieldResolvers.ts │ │ ├── HistoryQueries.ts │ │ └── index.ts │ ├── media │ │ ├── MediaResolvers.ts │ │ ├── index.ts │ │ ├── mutations.ts │ │ └── queries.ts │ ├── organization │ │ ├── OrganizationMutations.ts │ │ ├── OrganizationQueries.ts │ │ └── index.ts │ ├── resolvers.ts │ ├── schema │ │ ├── Area.gql │ │ ├── AreaEdit.gql │ │ ├── Climb.gql │ │ ├── ClimbEdit.gql │ │ ├── History.gql │ │ ├── Media.gql │ │ ├── Organization.gql │ │ ├── OrganizationEdit.gql │ │ ├── README.md │ │ ├── Tag.gql │ │ ├── Tick.gql │ │ └── User.gql │ ├── tag │ │ ├── TagResolvers.ts │ │ ├── index.ts │ │ └── queries.ts │ ├── tick │ │ ├── TickImportTypeDef.ts │ │ ├── TickMutations.ts │ │ ├── TickQueries.ts │ │ ├── TickResolvers.ts │ │ └── index.ts │ └── user │ │ ├── UserMutations.ts │ │ ├── UserQueries.ts │ │ ├── UserResolvers.ts │ │ └── index.ts ├── logger.ts ├── main.ts ├── model │ ├── AreaDataSource.ts │ ├── AreaHistoryDatasource.ts │ ├── BulkImportDataSource.ts │ ├── ChangeLogDataSource.ts │ ├── ClimbDataSource.ts │ ├── ExperimentalUserDataSource.ts │ ├── MediaDataSource.ts │ ├── MutableAreaDataSource.ts │ ├── MutableClimbDataSource.ts │ ├── MutableMediaDataSource.ts │ ├── MutableOrganizationDataSource.ts │ ├── OrganizationDataSource.ts │ ├── OrganizationHistoryDatasource.ts │ ├── PostDataSource.ts │ ├── TickDataSource.ts │ ├── UserDataSource.ts │ ├── XMediaDataSource.ts │ └── __tests__ │ │ ├── AreaDataSource.test.ts │ │ ├── AreaHistoryDataSource.ts │ │ ├── AreaUtils.ts │ │ ├── BulkDataSource.test.ts │ │ ├── ChangeLogDS.ts │ │ ├── MediaDataSource.ts │ │ ├── MutableAreaDataSource.test.ts │ │ ├── MutableClimbDataSource.ts │ │ ├── MutableOrganizationDataSource.ts │ │ ├── UserDataSource.ts │ │ ├── tickValidation.ts │ │ ├── ticks.ts │ │ └── updateAreas.ts ├── server.ts ├── types.ts └── utils │ ├── helpers.ts │ ├── inMemoryDB.ts │ ├── sanitize.ts │ └── testUtils.ts ├── tsconfig.json ├── tsconfig.release.json └── yarn.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | docker-compose.yml 4 | Dockerfile 5 | jest.* 6 | README.md 7 | mongo-clean.sh 8 | *.log 9 | .* 10 | !src 11 | !package.json 12 | !tsconfig.* 13 | !refresh-db.sh 14 | !yarn.lock 15 | 16 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | MONGO_SCHEME=mongodb 2 | MONGO_INITDB_ROOT_USERNAME=admin 3 | MONGO_INITDB_ROOT_PASSWORD=0nBelay! 4 | MONGO_SERVICE=localhost:27017 5 | MONGO_AUTHDB=admin 6 | MONGO_DBNAME=openbeta 7 | MONGO_TLS=false 8 | MONGO_READ_PREFERENCE=primary 9 | MONGO_REPLICA_SET_NAME=rs0 10 | CONTENT_BASEDIR=./tmp 11 | DEPLOYMENT_ENV=development 12 | CDN_URL=https://storage.googleapis.com/openbeta-staging 13 | 14 | # Typesense 15 | TYPESENSE_NODE=typesense-01.openbeta.io 16 | TYPESENSE_API_KEY_RW=ask_us_on_Discord 17 | 18 | # Auth0 19 | AUTH0_DOMAIN=https://dev-fmjy7n5n.us.auth0.com 20 | AUTH0_KID=uciP2tJdJ4BKWoz73Fmln 21 | 22 | MAPTILES_WORKING_DIR=./maptiles 23 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | open_collective: openbeta 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .DS_Store 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | *.gz 19 | *.geojson 20 | 21 | # Directory for instrumented libs generated by jscoverage/JSCover 22 | lib-cov 23 | 24 | # Coverage directory used by tools like istanbul 25 | coverage 26 | *.lcov 27 | 28 | # nyc test coverage 29 | .nyc_output 30 | 31 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 32 | .grunt 33 | 34 | # Bower dependency directory (https://bower.io/) 35 | bower_components 36 | 37 | # node-waf configuration 38 | .lock-wscript 39 | 40 | # Compiled binary addons (https://nodejs.org/api/addons.html) 41 | build/Release 42 | 43 | # Dependency directories 44 | node_modules/ 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Microbundle cache 60 | .rpt2_cache/ 61 | .rts2_cache_cjs/ 62 | .rts2_cache_es/ 63 | .rts2_cache_umd/ 64 | 65 | # Optional REPL history 66 | .node_repl_history 67 | 68 | # Output of 'npm pack' 69 | *.tgz 70 | 71 | # Yarn Integrity file 72 | .yarn-integrity 73 | 74 | # dotenv environment variables file 75 | .env.test 76 | .env 77 | 78 | # parcel-bundler cache (https://parceljs.org/) 79 | .cache 80 | 81 | # Next.js build output 82 | .next 83 | 84 | # Nuxt.js build / generate output 85 | .nuxt 86 | dist 87 | 88 | # Gatsby files 89 | .cache/ 90 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 91 | # https://nextjs.org/blog/next-9-1#public-directory-support 92 | # public 93 | 94 | # vuepress build output 95 | .vuepress/dist 96 | 97 | # Serverless directories 98 | .serverless/ 99 | 100 | # FuseBox cache 101 | .fusebox/ 102 | 103 | # DynamoDB Local files 104 | .dynamodb/ 105 | 106 | # TernJS port file 107 | .tern-port 108 | 109 | build 110 | tmp 111 | .env.local 112 | 113 | # asdf 114 | .tool-versions 115 | 116 | # Intellij and VSCode 117 | .idea/ 118 | *.iml 119 | .vscode/settings.json 120 | 121 | /export/ 122 | /openbeta-export/ 123 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | image: registry.gitlab.com/openbeta/openbeta-nodejs-docker:16.3 2 | 3 | variables: 4 | GIT_DEPTH: 1 5 | DOCKER_DRIVER: overlay2 6 | DOCKER_TLS_CERTDIR: "" 7 | DOCKER_HOST: tcp://docker:2375 8 | IMAGE_LATEST: $CI_REGISTRY_IMAGE:latest 9 | IMAGE_CURRENT: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA 10 | 11 | docker-build-staging: 12 | stage: build 13 | only: # Only run for these branches 14 | - staging 15 | 16 | tags: 17 | - docker 18 | 19 | image: docker:20.10.10 20 | 21 | services: 22 | - docker:20.10.10-dind 23 | 24 | before_script: 25 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY 26 | 27 | script: 28 | - docker pull $IMAGE_STAGING || true 29 | - docker build --cache-from $IMAGE_STAGING --tag $IMAGE_CURRENT --tag $IMAGE_STAGING . 30 | - docker push $IMAGE_CURRENT 31 | - docker push $IMAGE_STAGING 32 | 33 | docker-build: 34 | stage: build 35 | only: # Only run for these branches 36 | - develop 37 | - main 38 | 39 | tags: 40 | - docker 41 | 42 | image: docker:20.10.10 43 | 44 | services: 45 | - docker:20.10.10-dind 46 | 47 | before_script: 48 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY 49 | 50 | script: 51 | - docker pull $IMAGE_LATEST || true 52 | - docker build --cache-from $IMAGE_LATEST --tag $IMAGE_CURRENT --tag $IMAGE_LATEST . 53 | - docker push $IMAGE_CURRENT 54 | - docker push $IMAGE_LATEST 55 | 56 | deploy: 57 | stage: deploy 58 | tags: 59 | - docker 60 | only: # Only run for these branches 61 | - develop 62 | - main 63 | needs: 64 | - job: docker-build 65 | artifacts: true 66 | image: 67 | name: bitnami/git:latest 68 | 69 | before_script: 70 | - mkdir -p ~/.bin 71 | - curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash -s -- ~/.bin 72 | - 'which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )' 73 | - eval $(ssh-agent -s) 74 | - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - > /dev/null 75 | - mkdir -p ~/.ssh 76 | - ssh-keyscan gitlab.com >> ~/.ssh/known_hosts 77 | - chmod 644 ~/.ssh/known_hosts 78 | 79 | script: 80 | - git clone --quiet --depth 1 --branch main git@gitlab.com:openbeta/devops/graphql-api.git 81 | - cd graphql-api 82 | - ~/.bin/kustomize edit set image $IMAGE_CURRENT 83 | - git diff 84 | - if [[ ! -n $(git status --porcelain) ]]; then echo "No new changes. Skipping deployment."; exit 0; fi 85 | - git config --global user.name $GITLAB_USER_NAME 86 | - git config --global user.email $GITLAB_USER_EMAIL 87 | - git commit -am "Update deployment. Code=${CI_COMMIT_SHORT_SHA}" 88 | - git push 89 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | . "$(dirname -- "$0")/_/husky.sh" 3 | 4 | yarn lint 5 | yarn test 6 | -------------------------------------------------------------------------------- /.run/Template Jest.run.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Generate map tiles", 11 | "program": "${workspaceFolder}/src/db/utils/jobs/MapTiles/exportCmd.ts", 12 | "preLaunchTask": "tsc: build - tsconfig.json", 13 | "outFiles": [ 14 | "${workspaceFolder}/build/**/*.js" 15 | ], 16 | "skipFiles": [ 17 | "/**" 18 | ] 19 | }, 20 | { 21 | "type": "node", 22 | "request": "launch", 23 | "name": "Typesense push", 24 | "program": "${workspaceFolder}/src/db/export/Typesense/Typesense.ts", 25 | "preLaunchTask": "tsc: build - tsconfig.json", 26 | "outFiles": [ 27 | "${workspaceFolder}/build/**/*.js" 28 | ], 29 | "skipFiles": [ 30 | "/**" 31 | ] 32 | }, 33 | { 34 | "type": "node", 35 | "request": "launch", 36 | "name": "Update stats", 37 | "program": "${workspaceFolder}/src/db/utils/jobs/UpdateStatsJob.ts", 38 | "preLaunchTask": "tsc: build - tsconfig.json", 39 | "outFiles": [ 40 | "${workspaceFolder}/build/**/*.js" 41 | ], 42 | "skipFiles": [ 43 | "/**" 44 | ] 45 | }, 46 | { 47 | "type": "node", 48 | "request": "launch", 49 | "name": "Launch API server", 50 | "skipFiles": [ 51 | "/**" 52 | ], 53 | "program": "${workspaceFolder}/src/main.ts", 54 | "preLaunchTask": "tsc: build - tsconfig.json", 55 | "outFiles": [ 56 | "${workspaceFolder}/build/**/*.js" 57 | ], 58 | "console": "integratedTerminal" 59 | }, 60 | { 61 | "type": "node", 62 | "request": "launch", 63 | "name": "Launch API Server (serve-dev)", 64 | "skipFiles": [ 65 | "/**" 66 | ], 67 | "program": "${workspaceFolder}/src/main.ts", 68 | "preLaunchTask": "tsc: build - tsconfig.json", 69 | "outFiles": [ 70 | "${workspaceFolder}/build/**/*.js" 71 | ], 72 | "runtimeExecutable": "yarn", 73 | "runtimeArgs": [ 74 | "run", 75 | "serve-dev" 76 | ], 77 | "console": "integratedTerminal" 78 | }, 79 | { 80 | "name": "Debug Jest Tests", 81 | "type": "node", 82 | "request": "launch", 83 | "env": { 84 | "NODE_OPTIONS": "--experimental-vm-modules" 85 | }, 86 | "runtimeArgs": [ 87 | "--inspect-brk", 88 | "${workspaceRoot}/node_modules/.bin/jest", 89 | "--runInBand", 90 | "history" 91 | ], 92 | "console": "integratedTerminal", 93 | "internalConsoleOptions": "neverOpen" 94 | }, 95 | { 96 | "type": "node", 97 | "name": "vscode-jest-tests.v2", 98 | "request": "launch", 99 | "env": { 100 | "NODE_OPTIONS": "--experimental-vm-modules" 101 | }, 102 | "args": [ 103 | "${workspaceRoot}/node_modules/.bin/jest", 104 | "--runInBand", 105 | "--watchAll=false", 106 | "--testNamePattern", 107 | "${jest.testNamePattern}", 108 | "--runTestsByPath", 109 | "${jest.testFile}" 110 | ], 111 | "console": "integratedTerminal", 112 | "internalConsoleOptions": "neverOpen" 113 | } 114 | ] 115 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "git.ignoreLimitWarning": true, 3 | "[typescriptreact]": { 4 | "editor.formatOnType": true, 5 | "editor.formatOnSave": true, 6 | "editor.defaultFormatter": "standard.vscode-standard" 7 | }, 8 | "[typescript]": { 9 | "editor.formatOnSave": true, 10 | "editor.defaultFormatter": "standard.vscode-standard" 11 | }, 12 | "standard.enable": true, 13 | "standard.autoFixOnSave": true, 14 | "standard.engine": "ts-standard", 15 | "standard.treatErrorsAsWarnings": true, 16 | "javascript.format.enable": false, 17 | "javascript.format.semicolons": "remove", 18 | "typescript.format.enable": false, 19 | "prettier.enable": false, 20 | "editor.defaultFormatter": "standard.vscode-standard" 21 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.gitlab.com/openbeta/openbeta-nodejs-docker:18 2 | 3 | ENV APP_DIR=/apps/openbeta-graphql 4 | 5 | WORKDIR ${APP_DIR} 6 | EXPOSE 4000 7 | 8 | RUN mkdir -p ${APP_DIR} 9 | 10 | COPY . *.env ./ 11 | 12 | 13 | RUN yarn install --no-progress && \ 14 | yarn build-release 15 | 16 | CMD node --experimental-json-modules build/main.js 17 | -------------------------------------------------------------------------------- /db-migrations/0001-area-sorting.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Issue: 221 3 | */ 4 | 5 | const rs = db.areas.updateMany({}, { $rename: { 'metadata.left_right_index': 'metadata.leftRightIndex' } }) 6 | 7 | printjson(rs) 8 | -------------------------------------------------------------------------------- /db-migrations/0002-create-history-indexes.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Issue: 287 3 | */ 4 | 5 | db.change_logs.createIndex({ createdAt: -1 }) 6 | db.change_logs.createIndex({ 'changes.fullDocument.metadata.area_id': 1, 'changes.kind': 1 }) 7 | db.change_logs.createIndex({ 'changes.kind': 1 }) -------------------------------------------------------------------------------- /db-migrations/0003-date-climbed-to-date.js: -------------------------------------------------------------------------------- 1 | /** 2 | * https://github.com/OpenBeta/openbeta-graphql/pull/301 3 | **/ 4 | 5 | const rs = db.ticks.updateMany( 6 | { 7 | dateClimbed: { $exists: true } 8 | }, 9 | [{ 10 | $set: { 11 | dateClimbed: { 12 | $dateFromString: { 13 | dateString: '$dateClimbed', 14 | // We want to ascribe an hour of day to the climb, so it shows 15 | // up on the correct day when displayed in the user's timezone. 16 | // Most climbs are in the US, MT time is a good first approximation. 17 | timezone: 'America/Denver' 18 | } 19 | } 20 | } 21 | }] 22 | ) 23 | 24 | printjson(rs) 25 | -------------------------------------------------------------------------------- /db-migrations/0004-unique-user-climb-date-style.js: -------------------------------------------------------------------------------- 1 | /** 2 | * https://github.com/OpenBeta/open-tacos/issues/631 3 | **/ 4 | 5 | rs1 = db.ticks.createIndex({ userId: -1 }) 6 | rs2 = db.ticks.createIndex({ userId: -1, climbId: -1 }) 7 | rs3 = db.ticks.dropIndex({ climbId: 1, dateClimbed: 1, style: 1, userId: 1, source: 1 }) 8 | 9 | printjson(rs1) 10 | printjson(rs2) 11 | printjson(rs3) 12 | -------------------------------------------------------------------------------- /db-migrations/0005-area-sorting.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Issue: 375 3 | */ 4 | 5 | db.areas.dropIndexes('metadata.leftRightIndex_1') 6 | -------------------------------------------------------------------------------- /db-migrations/0006-capitalize-tick-style-and-attempt-type.js: -------------------------------------------------------------------------------- 1 | // This migration will update all tick styles and attemptTypes with incorrect capitalization 2 | const tickCapitalizationRs = db.ticks.updateMany( 3 | { 4 | $or: [ 5 | { style: 'lead' }, 6 | { style: 'follow' }, 7 | { style: 'tr' }, 8 | { style: 'top_rope' }, 9 | { style: 'solo' }, 10 | { style: 'aid' }, 11 | { style: 'boulder' }, 12 | { attemptType: 'onsight' }, 13 | { attemptType: 'redpoint' }, 14 | { attemptType: 'flash' }, 15 | { attemptType: 'pinkpoint' }, 16 | { attemptType: 'send' }, 17 | { attemptType: 'attempt' }, 18 | { attemptType: 'frenchfree' }, 19 | { attemptType: 'repeat' } 20 | ] 21 | }, 22 | [ 23 | { 24 | $set: { 25 | style: { 26 | $switch: { 27 | branches: [ 28 | { case: { $eq: ['$style', 'lead'] }, then: 'Lead' }, 29 | { case: { $eq: ['$style', 'follow'] }, then: 'Follow' }, 30 | { case: { $eq: ['$style', 'tr'] }, then: 'TR' }, 31 | { case: { $eq: ['$style', 'top_rope'] }, then: 'TR' }, 32 | { case: { $eq: ['$style', 'solo'] }, then: 'Solo' }, 33 | { case: { $eq: ['$style', 'aid'] }, then: 'Aid' }, 34 | { case: { $eq: ['$style', 'boulder'] }, then: 'Boulder' } 35 | ], 36 | default: '$style' 37 | } 38 | }, 39 | attemptType: { 40 | $switch: { 41 | branches: [ 42 | { case: { $eq: ['$attemptType', 'redpoint'] }, then: 'Redpoint' }, 43 | { case: { $eq: ['$attemptType', 'onsight'] }, then: 'Onsight' }, 44 | { case: { $eq: ['$attemptType', 'flash'] }, then: 'Flash' }, 45 | { case: { $eq: ['$attemptType', 'pinkpoint'] }, then: 'Pinkpoint' }, 46 | { case: { $eq: ['$attemptType', 'send'] }, then: 'Send' }, 47 | { case: { $eq: ['$attemptType', 'attempt'] }, then: 'Attempt' }, 48 | { case: { $eq: ['$attemptType', 'frenchfree'] }, then: 'Frenchfree' }, 49 | { case: { $eq: ['$attemptType', 'repeat'] }, then: 'Repeat' } 50 | ], 51 | default: '$attemptType' 52 | } 53 | } 54 | }, 55 | } 56 | ] 57 | ); 58 | 59 | printjson(tickCapitalizationRs); -------------------------------------------------------------------------------- /db-migrations/0007-tick-style-nullification.js: -------------------------------------------------------------------------------- 1 | // This migration will update all ticks that have values of "N/A" for style or attemptType by unsetting those fields 2 | const tickStyleNullificationRs = db.ticks.updateMany( 3 | { 4 | style: "N/A" 5 | }, 6 | { 7 | $unset: { style: "" } 8 | } 9 | ); 10 | 11 | const tickAttemptTypeNullificationRs = db.ticks.updateMany( 12 | { 13 | attemptType: "N/A" 14 | }, 15 | { 16 | $unset: { attemptType: "" } 17 | } 18 | ); 19 | 20 | printjson(tickStyleNullificationRs); 21 | printjson(tickAttemptTypeNullificationRs); -------------------------------------------------------------------------------- /db-migrations/0008-fix-swapped-tick-style.js: -------------------------------------------------------------------------------- 1 | // This migration will fix ticks where attemptType has a value that belongs in style instead. 2 | const attemptTypeToStyleRs = db.ticks.updateMany( 3 | { 4 | $or: [ 5 | { attemptType: 'Lead' }, 6 | { attemptType: 'TR' }, 7 | { attemptyType: 'Follow' }, 8 | { attemptyType: 'Solo' }, 9 | { attemptyType: 'Aid' }, 10 | { attemptyType: 'Boulder' } 11 | ] 12 | }, 13 | [ 14 | { 15 | $set: { 16 | style: { 17 | $switch: { 18 | branches: [ 19 | { case: { $eq: ['$attemptType', 'Lead'] }, then: 'Lead' }, 20 | { case: { $eq: ['$attemptType', 'TR'] }, then: 'TR' }, 21 | { case: { $eq: ['$attemptType', 'Follow'] }, then: 'Follow' }, 22 | { case: { $eq: ['$attemptType', 'Solo'] }, then: 'Solo' }, 23 | { case: { $eq: ['$attemptType', 'Aid'] }, then: 'Aid' }, 24 | { case: { $eq: ['$attemptType', 'Boulder'] }, then: 'Boulder' } 25 | ], 26 | default: '$style' 27 | } 28 | } 29 | } 30 | } 31 | ] 32 | ); 33 | 34 | // Now nullify the attemptType field since we've moved that value to style 35 | const nullifyAttemptTypeRs = db.ticks.updateMany( 36 | { 37 | $or: [ 38 | { attemptType: 'Lead' }, 39 | { attemptType: 'TR' }, 40 | { attemptyType: 'Follow' }, 41 | { attemptyType: 'Solo' }, 42 | { attemptyType: 'Aid' }, 43 | { attemptyType: 'Boulder' } 44 | ] 45 | }, 46 | { 47 | $unset: { attemptType: "" } 48 | } 49 | ); 50 | 51 | printjson(attemptTypeToStyleRs); 52 | printjson(nullifyAttemptTypeRs); -------------------------------------------------------------------------------- /db-migrations/0009-fix-some-bouldering-ticks.js: -------------------------------------------------------------------------------- 1 | // This migration will fix ticks where the style was set to a value that belongs in attemptType instead, specifically for boulders 2 | // this is likely specific to one user, but it can be run for all ticks that may be affected. 3 | 4 | // Move the value of style to attemptType (and capitalize it) 5 | const incorrectBoulderTick1Rs = db.ticks.updateMany( 6 | { 7 | $or: [ 8 | { style: 'send' }, 9 | { style: 'attempt' }, 10 | { style: 'flash' } 11 | ] 12 | }, 13 | [ 14 | { 15 | $set: { 16 | attemptType: { 17 | $switch: { 18 | branches: [ 19 | { case: { $eq: ['$style', 'send'] }, then: 'Send' }, 20 | { case: { $eq: ['$style', 'attempt'] }, then: 'Attempt' }, 21 | { case: { $eq: ['$style', 'flash'] }, then: 'Flash' } 22 | ] 23 | } 24 | } 25 | } 26 | } 27 | ] 28 | ); 29 | 30 | // Set style to 'Boulder' 31 | const incorrectBoulderTick2Rs = db.ticks.updateMany( 32 | { 33 | $or: [ 34 | { style: 'send' }, 35 | { style: 'attempt' }, 36 | { style: 'flash' } 37 | ] 38 | }, 39 | [ 40 | { 41 | $set: { style: 'Boulder' } 42 | } 43 | ] 44 | ); 45 | 46 | printjson(incorrectBoulderTick1Rs); 47 | printjson(incorrectBoulderTick2Rs); -------------------------------------------------------------------------------- /docker-compose.override.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | mongo-express: 4 | image: mongo-express 5 | restart: always 6 | env_file: 7 | - .env 8 | ports: 9 | - 8081:8081 10 | environment: 11 | ME_CONFIG_MONGODB_URL: mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo_opentacos:27017/opentacos?authSource=admin&replicaSet=rs0 12 | depends_on: 13 | - mongo_opentacos 14 | - mongosetup 15 | volumes: 16 | opentacos_mongodb_data: 17 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | mongo_opentacos: 4 | hostname: mongodb 5 | image: mongo:5 6 | env_file: 7 | - .env 8 | ports: 9 | - 27017:27017 10 | volumes: 11 | - opentacos_mongodb_data:/data/db 12 | - ./:/opt/keyfile/ 13 | healthcheck: 14 | test: test $$(echo "rs.initiate().ok || rs.status().ok" | mongo -u $${MONGO_INITDB_ROOT_USERNAME} -p $${MONGO_INITDB_ROOT_PASSWORD} --quiet) -eq 1 15 | interval: 10s 16 | start_period: 30s 17 | entrypoint: 18 | - bash 19 | - -c 20 | - | 21 | cp /opt/keyfile/keyfile /data/db/replica.key 22 | chmod 400 /data/db/replica.key 23 | chown 999:999 /data/db/replica.key 24 | exec docker-entrypoint.sh $$@ 25 | command: "mongod --bind_ip_all --replSet rs0 --keyFile /data/db/replica.key" 26 | mongosetup: 27 | image: mongo:5 28 | depends_on: 29 | - mongo_opentacos 30 | env_file: 31 | - .env 32 | volumes: 33 | - .:/scripts 34 | restart: "no" 35 | entrypoint: [ "bash", "/scripts/mongo_setup.sh" ] 36 | volumes: 37 | opentacos_mongodb_data: 38 | -------------------------------------------------------------------------------- /documentation/layers.md: -------------------------------------------------------------------------------- 1 | # Layers 2 | *Updated 2023-06-15* 3 | 4 | ## Overview 5 | One way to think of our backend is comprising three layers wrapping the raw data sitting in Mongo DB. 6 | 1. GraphQL 7 | 2. Datasources 8 | 3. Models 9 | 10 | Incoming data (API requests) pass through GraphQL > Datasource > Model and then the resulting data exit in reverse order from Model > Datasource > GraphQL. 11 | 12 | When you change our data model, eg adding a new field to a climb object, you should expect to update each of the three layes as well. 13 | 14 | ## GraphQL 15 | The outermost GraphQL layer that receives API calls. Our big integration tests (see [Testing](documentation/testing.md)) call this layer. 16 | 17 | Code is in `src/graphql/`. 18 | 19 | ## Datasources 20 | The middle Mongoose datastore objects that expose commands to the GraphQL resolvers. Mongoose is our MongoDB NodeJS ORM. Our small integration tests test this layer down. 21 | 22 | Code is in `src/model/`. 23 | 24 | ## Models 25 | The inner Mongoose models/schemas that represent how data is stored in the MongoDB. 26 | 27 | Code is in `src/db/`. 28 | -------------------------------------------------------------------------------- /documentation/testing.md: -------------------------------------------------------------------------------- 1 | # Testing 2 | *Updated 2023-06-15* 3 | 4 | ## Overview 5 | There are currently two broad classes of tests in this repo: Big integration tests and small ones. Both sets are integration tests because they validate long chains of functionality as opposed to single classes or functions (unit tests). 6 | 7 | The big set is called "big" because it is truly end-to-end. It posts GraphQL queries and checks their output, which is literally what the API does in production. The small set skips the GraphQL layer (you might want to read more about layers [here](documentation/layers.md)) and instead calls datasource functions directly. 8 | 9 | ## Big Integration Tests 10 | These tests mock up a GraphQL backend, and make HTTP calls to it. Since these tests are so realistic, they are immensely protective, illustrative and confidence-building. Open-tacos developers can cut and paste the GraphQL queries in these tests and use them to build the frontend. 11 | 12 | These tests are stored in `src/__tests__/`. The setup code is in `src/utils/testUtils.ts`. Note how most of the code is oriented around setting up and tearing down a GraphQL server and an in-memory Mongo DB. 13 | 14 | We rely on `mongo-memory-server` (a node package) for the in-memory Mongo DB. By running it in memory, it is lightweight and easily setup during `beforeAll`. Early on, we were hampered by the fact that the standard Mongo server that `mongo-memory-server` offers doesn't support Mongo transactions, which we use extensively. This is why we wrote small integration tests which rely on a local instance of MongoDB. However, in 2021, the package started to offer an in-memory replset which does support Mongo transactions. From then on, we've been able to write big integration tests which set up a replset which supports everything we need to do. 15 | 16 | 17 | ## Small Integration Tests 18 | These essentially test datasource functions. Eg. the key line in such a test could be `await users.createOrUpdateUserProfile(updater, input)`([Source](src/model/__tests__/UserDataSource.ts)). This tests the `createOrUpdateUserProfile` function of the `user` datasource. Datasources sit one layer below the GraphQL layer (another plug to read [Layers]((documentation/layers.md))). In `src/graphql/resolvers.ts`, you can see how the GraphQL layer calls datasource functions to resolve entities in the queries. 19 | 20 | Other than their inability to test how the GraphQL layer resolves queries, the main shortcoming of these tests are their poor portability. To use them, you need to set up a Mongo DB locally for the tests to read and write from. This is why the the main [README](README.md) page gets developers to spin up a Docker instance and edit `/etc/hosts` mongod mappings. 21 | 22 | In general, we should phase these out in favor of big integration tests. In case you need to debug them or, god forbid, write new ones, they reside in `src/model/__tests__/`. 23 | -------------------------------------------------------------------------------- /export.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z ${GITHUB_ACCESS_TOKEN} ] 4 | then 5 | echo "GITHUB_ACCESS_TOKEN not defined." 6 | exit 1 7 | fi 8 | 9 | echo "cloning openbeta-export repository" 10 | git clone --depth 1 --branch production https://ob-bot-user:${GITHUB_ACCESS_TOKEN}@github.com/OpenBeta/openbeta-export || exit 1 11 | cd openbeta-export 12 | git config user.name "db-export-bot" 13 | git config user.email "db-export-bot@noreply" 14 | cd .. 15 | 16 | echo "start exporting database..." 17 | yarn export:json:full --output openbeta-export 18 | 19 | echo "... finished export. Committing data..." 20 | 21 | cd openbeta-export || exit 1 22 | git add -A 23 | git commit -am "export openbeta data" 24 | git push origin production 25 | -------------------------------------------------------------------------------- /hacks/countries/transform.cjs: -------------------------------------------------------------------------------- 1 | const list = require('./tmp-list.json') 2 | const _ = require('underscore') 3 | 4 | const l = list.map(entry => ({ 5 | alpha3: entry['Alpha-3 code'], 6 | lnglat: [entry['Longitude (average)'], entry['Latitude (average)']] 7 | })) 8 | 9 | const obj = _.indexBy(l, 'alpha3') 10 | console.log(JSON.stringify(obj, null, 2)) 11 | -------------------------------------------------------------------------------- /jest.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | testTimeout: 2 * 60 * 1000, 3 | moduleNameMapper: { 4 | '^(\\.{1,2}/.*)\\.js$': '$1' 5 | }, 6 | extensionsToTreatAsEsm: ['.ts'], 7 | transform: { 8 | '^.+\\.(mt|t|cj|j)s$': [ 9 | 'ts-jest', 10 | { 11 | useESM: true 12 | } 13 | ] 14 | }, 15 | testEnvironment: 'node', 16 | testMatch: [ 17 | '/**/__tests__/*.ts' 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /keyfile: -------------------------------------------------------------------------------- 1 | EE5d34CFwGOHs2YvZZJ7b/ki0flbGG+zKoYKEvwho8TXIq5bQM1FXg7a0zZPFcYX 2 | kCGdNSldTqdYpjtBxKBqId7t6kFs5S3XKfC7BMC5xm7VDIGkDY+xbwg+zivKAsF5 3 | 8HW7h5oibt9e3lQQVugtA+MdXoeH7eZbWckjVKQ26/odsc0zVV9dAsjjbmpCDRBq 4 | zQvTrP38urQajy9LCEITeToQrgbKkoovygKivWXVsNn4iFqwNpch/LLYihKi09tm 5 | eq1/RRHEIkDJz7cHx/FY2pXnzd6VbOSP59JCGR57wOlEmy5ZRCUjKXMCbE+mb5VS 6 | v3T2EllZKqyBSDUkg5ZkHHgiZ4XuzFJp6SQZMlX1hDo/+If+s8TyYClACr5aXqN5 7 | 0+qNl1pTd+eipsvpGSy1seh6MBy2hgd91ft1asYqZ87TACSvJmPy43PV1hmfgDQ1 8 | W/SLYciy+W1RW2fIkkukjRTEeDWlPDxmYvXdZCImYBJeE/Wf3OPKn46pJZ+gG2cx 9 | iapfRDILIiqUQNuieE6ArvSGIbEnuLGgZH5ktCZmnlTsL32TzL8hskhAUgzgEpKn 10 | qegE7U5W5ACLm6KuxyfvvrcfOuxYAdYlJq+nzaUqkMPkkGissBSYiJv17qdVpptC 11 | piAFYx7UB+03h6n3e3DXaVNVo9dbOHAXT4BLTHdCT9b5UUUzCk6ebfGx08rmSBcY 12 | fLniVXdVNGjaUE6sAEKftdvn53cS5EzYkpxHt4DXFFktR6V+KKpuud3wf7GyerEL 13 | uB6llOjl9o2I+SUKg87tPLl7p4YRpg6Q00Naxgf+e1SkBdf4LBuFwW8yWwtTs58a 14 | //pw2+ZB3rnZIxql4i/y6KutBtZKH00t5Wpl+pl1OogoGEy/B+mNi/1trdCmaWbW 15 | +VNp17ljBIfPQlhIibgvDrY1XVCuX6um+tfobZQI1Klc6cC+Bh+2OeEKgp6Imq4H 16 | 031FyeuyBZ/06DirIDS98B1alXWT 17 | -------------------------------------------------------------------------------- /kubernetes/export-cronjob.yml: -------------------------------------------------------------------------------- 1 | apiVersion: batch/v1 2 | kind: CronJob 3 | metadata: 4 | name: export-db-prod 5 | spec: 6 | schedule: "@daily" 7 | concurrencyPolicy: Forbid 8 | successfulJobsHistoryLimit: 1 9 | jobTemplate: 10 | spec: 11 | backoffLimit: 0 12 | template: 13 | spec: 14 | containers: 15 | - name: openbeta-graph-api 16 | image: vnguyen/openbeta-graph-api:latest 17 | imagePullPolicy: Always 18 | command: ["yarn", "export-prod"] 19 | envFrom: 20 | - configMapRef: 21 | name: graph-api-prod 22 | resources: 23 | requests: 24 | memory: 128Mi 25 | restartPolicy: Never 26 | -------------------------------------------------------------------------------- /migrate-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z $1 ] 4 | then 5 | echo "Missing migration file" 6 | exit 1 7 | fi 8 | 9 | . .env 10 | 11 | connStr="${MONGO_SCHEME}://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@${MONGO_SERVICE}/${MONGO_DBNAME}?authSource=${MONGO_AUTHDB}&tls=${MONGO_TLS}&replicaSet=${MONGO_REPLICA_SET_NAME}" 12 | 13 | mongo "$connStr" $1 14 | -------------------------------------------------------------------------------- /mongo-clean.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker compose down 4 | 5 | # MongoDb container won't run initializing scripts if there's 6 | # already a data volume. 7 | docker volume rm openbeta-graphql_opentacos_mongodb_data --force 8 | 9 | docker compose up -d 10 | -------------------------------------------------------------------------------- /mongo_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "sleeping for 10 seconds" 3 | sleep 10 4 | 5 | echo mongo_setup.sh time now: `date +"%T" ` 6 | mongosh --username "${MONGO_INITDB_ROOT_USERNAME}" --password "${MONGO_INITDB_ROOT_PASSWORD}" --host mongodb:27017 <", 8 | "license": "AGPL-3.0-or-later", 9 | "devDependencies": { 10 | "@types/auth0": "^3.3.2", 11 | "@types/jest": "^29.4.0", 12 | "@types/node": "^18.13.0", 13 | "@types/supertest": "^2.0.12", 14 | "@types/underscore": "^1.11.4", 15 | "cross-env": "^7.0.3", 16 | "husky": "^8.0.1", 17 | "jest": "^29.7.0", 18 | "jest-extended": "^4.0.2", 19 | "mongodb-memory-server": "^10.1.2", 20 | "nock": "^13.3.0", 21 | "supertest": "^6.3.3", 22 | "ts-jest": "^29.2.5", 23 | "ts-standard": "^12.0.0", 24 | "typescript": "4.9.5", 25 | "wait-for-expect": "^3.0.2" 26 | }, 27 | "dependencies": { 28 | "@apollo/server": "^4.11.2", 29 | "@babel/runtime": "^7.17.2", 30 | "@google-cloud/storage": "^6.9.5", 31 | "@graphql-tools/schema": "^8.3.1", 32 | "@openbeta/sandbag": "^0.0.51", 33 | "@turf/area": "^6.5.0", 34 | "@turf/bbox": "^6.5.0", 35 | "@turf/bbox-polygon": "^6.5.0", 36 | "@turf/circle": "^6.5.0", 37 | "@turf/convex": "^6.5.0", 38 | "@turf/helpers": "^6.5.0", 39 | "@types/uuid": "^8.3.3", 40 | "apollo-datasource-mongodb": "^0.6.0", 41 | "auth0": "^3.4.0", 42 | "axios": "^1.3.6", 43 | "body-parser": "^1.20.2", 44 | "cors": "^2.8.5", 45 | "date-fns": "^2.30.0", 46 | "dot-object": "^2.1.4", 47 | "dotenv": "^16.4.4", 48 | "express": "^4.18.2", 49 | "glob": "^10.2.2", 50 | "graphql": "^16.9.0", 51 | "graphql-middleware": "^6.1.31", 52 | "graphql-shield": "^7.5.0", 53 | "graphql-tag": "^2.12.6", 54 | "graphql-type-json": "^0.3.2", 55 | "i18n-iso-countries": "^7.5.0", 56 | "immer": "^9.0.15", 57 | "jsonwebtoken": "^8.5.1", 58 | "jwks-rsa": "^2.1.4", 59 | "mongoose": "^7.8.3", 60 | "mongoose-lean-virtuals": "^1.0.0", 61 | "node-fetch": "2", 62 | "p-limit": "^4.0.0", 63 | "pino": "^9.5.0", 64 | "pino-logflare": "^0.4.2", 65 | "sanitize-html": "^2.7.2", 66 | "sharp": "^0.32.0", 67 | "typesense": "^1.8.2", 68 | "underscore": "^1.13.2", 69 | "uuid": "^8.3.2", 70 | "uuid-mongodb": "^2.6.0", 71 | "yup": "^1.1.1" 72 | }, 73 | "scripts": { 74 | "lint": "yarn ts-standard", 75 | "fix": "yarn ts-standard --fix", 76 | "test": "cross-env NODE_OPTIONS=\"--experimental-vm-modules\" jest --runInBand", 77 | "build": "tsc -p tsconfig.json", 78 | "build-release": "tsc -p tsconfig.release.json", 79 | "clean": "tsc -b --clean && rm -rf build/*", 80 | "serve": "yarn build && node --experimental-json-modules build/main.js", 81 | "serve-dev": "echo \"🚨 LOCAL_DEV_BYPASS_AUTH enabled 🚨\" && LOCAL_DEV_BYPASS_AUTH=true yarn serve", 82 | "seed-db": "./seed-db.sh", 83 | "add-countries": "yarn build && node build/db/utils/jobs/AddCountriesJob.js", 84 | "update-stats": "yarn build && node build/db/utils/jobs/UpdateStatsJob.js", 85 | "update-climb-search": "tsc ; node build/db/export/Typesense/Typesense.js --climbs", 86 | "update-area-search": "tsc ; node build/db/export/Typesense/Typesense.js --areas", 87 | "update-search": "tsc ; node build/db/export/Typesense/Typesense.js --areas --climbs", 88 | "export:json:full": "yarn build && node build/db/export/json/index.js", 89 | "export-prod": "./export.sh", 90 | "prepare": "husky install", 91 | "import-users": "yarn build && node build/db/utils/jobs/migration/CreateUsersCollection.js", 92 | "maptiles:export-db": "node build/db/utils/jobs/MapTiles/exportCmd.js", 93 | "maptiles:upload": "./scripts/upload-tiles.sh", 94 | "maptiles:full": "yarn build && yarn maptiles:export-db && yarn maptiles:upload" 95 | }, 96 | "standard": { 97 | "plugins": [ 98 | "html" 99 | ], 100 | "parser": "babel-eslint" 101 | }, 102 | "ts-standard": { 103 | "ignore": [ 104 | "build", 105 | "hacks", 106 | "**/*.test.ts", 107 | "db-migrations" 108 | ] 109 | }, 110 | "type": "module", 111 | "engines": { 112 | "node": ">=18.20.0 <19" 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /refresh-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Download seed files and start seeding 3 | # Syntax: ./refresh-db.sh [full] 4 | # Specify 'full' to download the entire data set. 5 | 6 | rm -rf tmp 7 | mkdir tmp 8 | cd tmp 9 | 10 | GITHUB="https://raw.githubusercontent.com/OpenBeta/climbing-data/main" 11 | wget --content-disposition \ 12 | ${GITHUB}/openbeta-routes-westcoast.zip 13 | 14 | if [[ "$1" == "full" ]]; 15 | then 16 | 17 | wget --content-disposition \ 18 | ${GITHUB}/openbeta-routes-mountains2.zip 19 | 20 | wget --content-disposition \ 21 | ${GITHUB}/openbeta-routes-mountains1.zip 22 | 23 | wget --content-disposition \ 24 | ${GITHUB}/openbeta-routes-ca.zip 25 | 26 | wget --content-disposition \ 27 | ${GITHUB}/openbeta-routes-northeast.zip 28 | 29 | wget --content-disposition \ 30 | ${GITHUB}/openbeta-routes-southeast.zip 31 | 32 | wget --content-disposition \ 33 | ${GITHUB}/openbeta-routes-midwest.zip 34 | fi 35 | 36 | unzip '*.zip' 37 | 38 | cd .. 39 | export CONTENT_BASEDIR=./tmp 40 | 41 | echo "NODE_OPTIONS=${NODE_OPTIONS}" 42 | yarn seed-usa 43 | 44 | yarn add-countries 45 | -------------------------------------------------------------------------------- /scripts/upload-tiles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 1. Generate pmtiles tiles from geojson exports 3 | # 2. Upload to S3-compatible storage 4 | # See also https://github.com/felt/tippecanoe 5 | 6 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 7 | 8 | set -a 9 | . ${SCRIPT_DIR}/../.env 2> /dev/null 10 | . ${SCRIPT_DIR}/../.env.local 2> /dev/null 11 | set +a 12 | 13 | # Define Cloudflare-R2 backend for rclone 14 | S3_DEST=':s3,provider=Cloudflare,no_check_bucket=true,env_auth=true,acl=private:maptiles' 15 | 16 | echo "------ Generating crags tiles file ------" 17 | tippecanoe --force -o ${MAPTILES_WORKING_DIR}/crags.pmtiles \ 18 | -l crags -n "Crags" \ 19 | --coalesce-densest-as-needed \ 20 | -z11 ${MAPTILES_WORKING_DIR}/crags.*.geojson 21 | 22 | echo "**Uploading to remote storage" 23 | rclone copy ${MAPTILES_WORKING_DIR}/crags.pmtiles ${S3_DEST} 24 | 25 | echo "------ Generating crag group tiles file ------" 26 | tippecanoe --force -o ${MAPTILES_WORKING_DIR}/areas.pmtiles \ 27 | -l areas -n "Areas" \ 28 | --drop-densest-as-needed \ 29 | -z8 ${MAPTILES_WORKING_DIR}/areas.geojson 30 | 31 | echo "**Uploading to remote storage" 32 | rclone copy ${MAPTILES_WORKING_DIR}/areas.pmtiles ${S3_DEST} 33 | 34 | exit $? 35 | -------------------------------------------------------------------------------- /seed-db.sh: -------------------------------------------------------------------------------- 1 | # Rebuild your local database with a copy of OpenBeta staging database. 2 | # 3 | # To keep running time short, the script only downloads the remote 4 | # database dump file once. Specify 'download' argument to force download. 5 | # 6 | # Syntax: 7 | # ./seed-db.sh [download] 8 | # 9 | #!/bin/bash 10 | 11 | FILE_NAME="openbeta-stg-db.tar.gz" 12 | REMOTE_FILE="https://storage.googleapis.com/openbeta-dev-dbs/$FILE_NAME" 13 | 14 | if [[ ! -f ${FILE_NAME} || ${1} == "download" ]]; then 15 | echo "Downloading db file(s)..." 16 | wget --content-disposition $REMOTE_FILE 17 | fi 18 | 19 | rm -rf ./db-dumps/staging/openbeta 20 | 21 | tar xzf $FILE_NAME 22 | 23 | . .env 24 | 25 | connStr="${MONGO_SCHEME}://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@${MONGO_SERVICE}/${MONGO_DBNAME}?authSource=${MONGO_AUTHDB}&tls=${MONGO_TLS}&replicaSet=${MONGO_REPLICA_SET_NAME}" 26 | 27 | mongorestore --uri "$connStr" -d=${MONGO_DBNAME} --gzip --drop ./db-dumps/staging/openbeta 28 | 29 | -------------------------------------------------------------------------------- /src/Config.ts: -------------------------------------------------------------------------------- 1 | import { config } from 'dotenv' 2 | 3 | config({ path: '.env.local' }) 4 | config() // initialize dotenv 5 | 6 | const checkAndPrintWarning = (name: string, value?: string): string => { 7 | if (value == null) { 8 | throw new Error(`## Error: '${name}' not defined ##`) 9 | } 10 | return value 11 | } 12 | 13 | type DeploymentType = 'production' | 'staging' 14 | interface ConfigType { 15 | DEPLOYMENT_ENV: DeploymentType 16 | TYPESENSE_NODE: string 17 | TYPESENSE_API_KEY_RW: string 18 | } 19 | // Todo: add other props in .env 20 | const Config: ConfigType = { 21 | DEPLOYMENT_ENV: checkAndPrintWarning('DEPLOYMENT_ENV', process.env.DEPLOYMENT_ENV) as DeploymentType, 22 | TYPESENSE_NODE: checkAndPrintWarning('TYPESENSE_NODE', process.env.TYPESENSE_NODE), 23 | TYPESENSE_API_KEY_RW: checkAndPrintWarning('TYPESENSE_API_KEY_RW', process.env.TYPESENSE_API_KEY_RW) 24 | } 25 | 26 | export default Config 27 | -------------------------------------------------------------------------------- /src/__tests__/import-example.json: -------------------------------------------------------------------------------- 1 | { 2 | "areas": [ 3 | { 4 | "areaName": "Utah", 5 | "countryCode": "us", 6 | "children": [ 7 | { 8 | "areaName": "Southeast Utah", 9 | "children": [ 10 | { 11 | "areaName": "Indian Creek", 12 | "description": "Indian Creek is a crack climbing mecca in the southeastern region of Utah, USA. Located within the [Bears Ears National Monument](https://en.wikipedia.org/wiki/Bears_Ears_National_Monument).", 13 | "lng": -109.5724044642857, 14 | "lat": 38.069429035714286, 15 | "children": [ 16 | { 17 | "areaName": "Supercrack Buttress", 18 | "gradeContext": "US", 19 | "description": "", 20 | "lng": -109.54552, 21 | "lat": 38.03635, 22 | "bbox": [ 23 | -109.54609091005857, 24 | 38.03590033981814, 25 | -109.54494908994141, 26 | 38.03679966018186 27 | ], 28 | "climbs": [ 29 | { 30 | "name": "The Key Flake", 31 | "grade": "5.10", 32 | "fa": "unknown", 33 | "disciplines": { 34 | "trad": true 35 | }, 36 | "safety": "UNSPECIFIED", 37 | "lng": -109.54552, 38 | "lat": 38.03635, 39 | "leftRightIndex": 1, 40 | "description": "Cool off-width that requires off-width and face skills.", 41 | "protection": "Anchors hidden up top. Need 80m to make it all the way down.", 42 | "location": "Opposite keyhole flake. Obvious right leaning offwidth that starts atop 20 ft boulder." 43 | }, 44 | { 45 | "name": "Incredible Hand Crack", 46 | "grade": "5.10", 47 | "fa": "Rich Perch, John Bragg, Doug Snively, and Anne Tarver, 1978", 48 | "disciplines": { 49 | "trad": true 50 | }, 51 | "leftRightIndex": 2, 52 | "description": "Route starts at the top of the trail from the parking lot to Supercrack Buttress.", 53 | "protection": "Cams from 2-2.5\". Heavy on 2.5\" (#2 Camalot)", 54 | "pitches": [ 55 | { 56 | "pitchNumber": 1, 57 | "grade": "5.10", 58 | "length": 100, 59 | "boltsCount": 0, 60 | "description": "A classic hand crack that widens slightly towards the top. Requires a range of cam sizes. Sustained and excellent quality." 61 | }, 62 | { 63 | "pitchNumber": 2, 64 | "grade": "5.9", 65 | "length": 30, 66 | "description": "Easier climbing with good protection. Features a mix of crack sizes. Shorter than the first pitch but equally enjoyable." 67 | } 68 | ] 69 | } 70 | ] 71 | } 72 | ] 73 | } 74 | ] 75 | } 76 | ] 77 | } 78 | ] 79 | } -------------------------------------------------------------------------------- /src/auth/index.ts: -------------------------------------------------------------------------------- 1 | import permissions from './permissions.js' 2 | import { createContext } from './middleware.js' 3 | export { permissions, createContext } 4 | -------------------------------------------------------------------------------- /src/auth/local-dev/middleware.ts: -------------------------------------------------------------------------------- 1 | import muuid, { MUUID } from 'uuid-mongodb' 2 | import { AuthUserType } from '../../types.js' 3 | import { logger } from '../../logger.js' 4 | 5 | const testUUID: MUUID = muuid.v4() 6 | 7 | /* 8 | * This file is a mod of src/auth/middleware.ts and is used when starting the server via `yarn serve-dev` 9 | * It bypasses the authentication for local development 10 | */ 11 | export const localDevBypassAuthContext = async ({ req }): Promise => { 12 | const user: AuthUserType = { 13 | roles: ['user_admin', 'org_admin', 'editor'], 14 | uuid: testUUID, 15 | isBuilder: false 16 | } 17 | logger.info(`The user.roles for this session is: ${user.roles.toString()}`) 18 | return { user } 19 | } 20 | -------------------------------------------------------------------------------- /src/auth/local-dev/permissions.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * This file is a mod of src/auth/permissions.ts and is used when starting the server via `yarn serve-dev` 3 | * It bypasses the authorization for local development and allows all queries and mutations 4 | */ 5 | import { shield, allow } from 'graphql-shield' 6 | 7 | const localDevBypassAuthPermissions = shield({ 8 | Query: { 9 | '*': allow 10 | }, 11 | Mutation: { 12 | '*': allow 13 | } 14 | }, { 15 | allowExternalErrors: true, 16 | fallbackRule: allow 17 | }) 18 | 19 | export default localDevBypassAuthPermissions 20 | -------------------------------------------------------------------------------- /src/auth/middleware.ts: -------------------------------------------------------------------------------- 1 | import muid from 'uuid-mongodb' 2 | import { Request } from 'express' 3 | import { AuthUserType } from '../types.js' 4 | import { verifyJWT } from './util.js' 5 | import { logger } from '../logger.js' 6 | 7 | export interface CustomContext { 8 | user: AuthUserType 9 | token?: string 10 | } 11 | 12 | const EMTPY_USER: AuthUserType = { 13 | isBuilder: false, 14 | roles: [], 15 | uuid: undefined 16 | } 17 | 18 | /** 19 | * Create a middleware context for Apollo server 20 | */ 21 | export const createContext = async ({ req }: { req: Request }): Promise => { 22 | try { 23 | return await validateTokenAndExtractUser(req) 24 | } catch (e) { 25 | logger.error(`Can't validate token and extract user ${e.toString() as string}`) 26 | throw new Error('An unexpected error has occurred. Please notify us at support@openbeta.io.') 27 | } 28 | } 29 | 30 | async function validateTokenAndExtractUser (req: Request): Promise { 31 | const { headers } = req 32 | // eslint-disable-next-line @typescript-eslint/dot-notation 33 | const authHeader = String(headers?.['authorization'] ?? '') 34 | if (authHeader.startsWith('Bearer ')) { 35 | const token = authHeader.substring(7, authHeader.length).trim() 36 | try { 37 | const payload = await verifyJWT(token) 38 | return { 39 | user: { 40 | isBuilder: payload?.scope?.includes('builder:default') ?? false, 41 | roles: payload?.['https://tacos.openbeta.io/roles'] ?? [], 42 | uuid: payload?.['https://tacos.openbeta.io/uuid'] != null ? muid.from(payload['https://tacos.openbeta.io/uuid']) : undefined 43 | }, 44 | token 45 | } 46 | } catch (e) { 47 | logger.error(`Can't verify JWT token ${e.toString() as string}`) 48 | throw new Error("Unauthorized. Can't verify JWT token") 49 | } 50 | } 51 | 52 | return { 53 | user: EMTPY_USER 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/auth/permissions.ts: -------------------------------------------------------------------------------- 1 | import { allow, and, or, shield } from 'graphql-shield' 2 | import { isEditor, isMediaOwner, isOwner, isUserAdmin, isValidEmail } from './rules.js' 3 | 4 | const permissions = shield({ 5 | Query: { 6 | '*': allow 7 | }, 8 | Mutation: { 9 | addOrganization: isUserAdmin, 10 | setDestinationFlag: isEditor, 11 | removeArea: isEditor, 12 | addArea: isEditor, 13 | updateArea: isEditor, 14 | updateClimbs: isEditor, 15 | deleteClimbs: isEditor, 16 | bulkImportAreas: isEditor, 17 | updateUserProfile: and(isOwner, isValidEmail), 18 | addEntityTag: or(isMediaOwner, isUserAdmin), 19 | removeEntityTag: or(isMediaOwner, isUserAdmin), 20 | addMediaObjects: or(isOwner), 21 | deleteMediaObject: or(isMediaOwner, isUserAdmin) 22 | } 23 | }, 24 | { 25 | allowExternalErrors: true, 26 | fallbackRule: allow 27 | }) 28 | 29 | export default permissions 30 | -------------------------------------------------------------------------------- /src/auth/rules.ts: -------------------------------------------------------------------------------- 1 | import { inputRule, rule } from 'graphql-shield' 2 | 3 | import MediaDataSource from '../model/MutableMediaDataSource.js' 4 | import { MediaObjectGQLInput } from '../db/MediaObjectTypes.js' 5 | 6 | export const isEditor = rule()(async (parent, args, ctx, info) => { 7 | return _hasUserUuid(ctx) && ctx.user.roles.includes('editor') 8 | }) 9 | 10 | export const isUserAdmin = rule()(async (parent, args, ctx, info) => { 11 | return _hasUserUuid(ctx) && ctx.user.roles.includes('user_admin') 12 | }) 13 | 14 | /** 15 | * True when JWT payload 'uuid' is the same as `input.userUuid`. 16 | * 17 | * If input is an array, check every element of input. 18 | */ 19 | export const isOwner = rule()(async (parent, args, ctx, info) => { 20 | if (!_hasUserUuid(ctx)) return false 21 | if (Array.isArray(args.input)) { 22 | return (args.input as MediaObjectGQLInput[]).every( 23 | ({ userUuid }) => ctx.user.uuid.toUUID().toString() === userUuid) 24 | } 25 | return ctx.user.uuid.toUUID().toString() === args.input.userUuid 26 | }) 27 | 28 | /** 29 | * True when the media identified by input.mediaId has the same owner uuid as the JWT payload uuid. 30 | */ 31 | export const isMediaOwner = rule()(async (parent, args, ctx, info): Promise => { 32 | const hasUserUuid = _hasUserUuid(ctx) 33 | const isMediaOwner = await MediaDataSource.getInstance().isMediaOwner(ctx.user.uuid, args.input?.mediaId) 34 | return hasUserUuid && isMediaOwner 35 | }) 36 | 37 | export const isBuilderServiceAccount = rule()(async (parent, args, ctx: Context, info) => { 38 | return _hasUserUuid(ctx) && ctx.user.isBuilder 39 | }) 40 | 41 | export const isValidEmail = inputRule()( 42 | (yup) => 43 | yup.object({ 44 | email: yup.string().email('Please provide a valid email') 45 | }), 46 | { abortEarly: false } 47 | ) 48 | 49 | interface Context { 50 | user: { 51 | uuid?: string 52 | isBuilder: boolean 53 | } 54 | } 55 | 56 | const _hasUserUuid = (ctx: Context): boolean => ctx.user.uuid != null 57 | -------------------------------------------------------------------------------- /src/auth/util.ts: -------------------------------------------------------------------------------- 1 | import jwksClient from 'jwks-rsa' 2 | import jwt from 'jsonwebtoken' 3 | 4 | import { checkVar } from '../db/index.js' 5 | 6 | const auth0Domain = checkVar('AUTH0_DOMAIN') 7 | const auth0Kid = checkVar('AUTH0_KID') 8 | 9 | const client = jwksClient({ 10 | jwksUri: `${auth0Domain}/.well-known/jwks.json` 11 | }) 12 | 13 | export const verifyJWT = async (token): Promise => { 14 | const key = await client.getSigningKey(auth0Kid) 15 | return jwt.verify(token, key.getPublicKey()) 16 | } 17 | -------------------------------------------------------------------------------- /src/db/BulkImportTypes.ts: -------------------------------------------------------------------------------- 1 | import { AreaType } from './AreaTypes.js' 2 | import { ClimbType, DisciplineType, SafetyType } from './ClimbTypes.js' 3 | import { MUUID } from 'uuid-mongodb' 4 | import { ExperimentalAuthorType } from './UserTypes.js' 5 | 6 | export interface BulkImportResultType { 7 | addedAreas: AreaType[] 8 | updatedAreas: AreaType[] 9 | addedOrUpdatedClimbs: ClimbType[] 10 | } 11 | 12 | export interface BulkImportInputType { 13 | areas: BulkImportAreaInputType[] 14 | } 15 | 16 | export interface BulkImportAreaInputType { 17 | uuid?: MUUID 18 | areaName?: string 19 | description?: string 20 | countryCode?: string 21 | gradeContext?: string 22 | leftRightIndex?: number 23 | lng?: number 24 | lat?: number 25 | bbox?: [number, number, number, number] 26 | children?: BulkImportAreaInputType[] 27 | climbs?: BulkImportClimbInputType[] 28 | } 29 | 30 | export interface BulkImportClimbInputType { 31 | uuid?: MUUID 32 | name?: string 33 | grade: string 34 | disciplines: DisciplineType 35 | safety?: SafetyType 36 | lng?: number 37 | lat?: number 38 | leftRightIndex?: number 39 | description?: string 40 | location?: string 41 | protection?: string 42 | fa?: string 43 | length?: number 44 | boltsCount?: number 45 | experimentalAuthor?: ExperimentalAuthorType 46 | pitches?: BulkImportPitchesInputType[] 47 | } 48 | 49 | export interface BulkImportPitchesInputType { 50 | id?: MUUID 51 | pitchNumber: number 52 | grade: string 53 | disciplines?: DisciplineType 54 | description?: string 55 | length?: number 56 | boltsCount?: number 57 | } 58 | -------------------------------------------------------------------------------- /src/db/ChangeEventType.ts: -------------------------------------------------------------------------------- 1 | import { ResumeToken } from 'mongodb' 2 | 3 | import { AreaType } from './AreaTypes' 4 | import { ClimbType } from './ClimbTypes' 5 | 6 | export default interface ChangeEventType { 7 | _id: ResumeToken 8 | dbOp: string 9 | fullDocument: FDocumentType 10 | } 11 | export type AreaChangeType = ChangeEventType 12 | export type ClimbChangeType = ChangeEventType 13 | export type SupportedChangeTypes = AreaChangeType | ClimbChangeType 14 | 15 | export type TrackableTypes = (AreaType & WithDiscriminator) | (ClimbType & WithDiscriminator) 16 | export interface WithDiscriminator { 17 | kind: string 18 | } 19 | -------------------------------------------------------------------------------- /src/db/ChangeLogSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | 3 | import { ChangeLogType, SupportedCollectionTypes } from './ChangeLogType.js' 4 | import { OperationType } from './AreaTypes.js' 5 | 6 | const { Schema, connection } = mongoose 7 | 8 | const ChangeLogSchema = new Schema>({ 9 | editedBy: { 10 | type: 'object', 11 | value: { type: 'Buffer' }, 12 | required: true, 13 | unique: false, 14 | index: true 15 | }, 16 | operation: { 17 | type: Schema.Types.Mixed, 18 | enum: Object.values(OperationType), 19 | required: true 20 | }, 21 | changes: [{ type: Schema.Types.Mixed }] 22 | }, { timestamps: { createdAt: true, updatedAt: false } }) 23 | 24 | ChangeLogSchema.index({ createdAt: -1 }) 25 | ChangeLogSchema.index({ 'changes.fullDocument.metadata.area_id': 1, 'changes.kind': 1 }) 26 | ChangeLogSchema.index({ 'changes.kind': 1 }) 27 | 28 | export const getChangeLogModel = (): mongoose.Model> => { 29 | return connection.model('change_logs', ChangeLogSchema) 30 | } 31 | -------------------------------------------------------------------------------- /src/db/ChangeLogType.ts: -------------------------------------------------------------------------------- 1 | import mongose from 'mongoose' 2 | import { MUUID } from 'uuid-mongodb' 3 | 4 | import { OperationType as AreaOpType, AreaType } from './AreaTypes.js' 5 | import { ClimbEditOperationType, ClimbType } from './ClimbTypes.js' 6 | import { OperationType as OrganizationOpType, OrganizationType } from './OrganizationTypes.js' 7 | 8 | export type DBOperation = 'insert' | 'update' | 'delete' 9 | export enum DocumentKind { 10 | areas = 'areas', 11 | climbs = 'climbs', 12 | organizations = 'organizations' 13 | } 14 | 15 | export interface ChangeLogType { 16 | _id: mongose.Types.ObjectId 17 | editedBy: MUUID 18 | operation: OpType 19 | changes: Array> 20 | } 21 | 22 | // DIY since ResumeToken is defined as unknown in mongo TS 23 | export interface ResumeToken { 24 | _data: string 25 | } 26 | 27 | export interface UpdateDescription { 28 | updatedFields?: string[] 29 | removedFields?: string[] 30 | truncatedArrays?: any[] 31 | } 32 | export interface BaseChangeRecordType { 33 | _id: ResumeToken 34 | dbOp: DBOperation 35 | fullDocument: FullDocumentType 36 | updateDescription: UpdateDescription 37 | kind: DocumentKind 38 | } 39 | 40 | export type OpType = AreaOpType | ClimbEditOperationType | OrganizationOpType 41 | 42 | export interface ChangeRecordMetadataType { 43 | /** The UUID of the user to whom this change of the document is attributed */ 44 | user: MUUID 45 | operation: OpType 46 | /** 47 | * We identify history entries in the audit trail by assigning it an ObjectID. 48 | **/ 49 | historyId: mongose.Types.ObjectId 50 | prevHistoryId?: mongose.Types.ObjectId 51 | seq: number 52 | } 53 | 54 | export interface WithDiscriminator { 55 | kind: DocumentKind 56 | } 57 | 58 | export type AreaChangeLogType = ChangeLogType 59 | export type AreaChangeRecordType = BaseChangeRecordType 60 | 61 | export type ClimbChangeLogType = ChangeLogType 62 | export type OrganizationChangeLogType = ChangeLogType 63 | 64 | export type SupportedCollectionTypes = 65 | | AreaType & WithDiscriminator 66 | | ClimbType & WithDiscriminator 67 | | OrganizationType & WithDiscriminator 68 | 69 | export interface GetHistoryInputFilterType { 70 | uuidList: string[] 71 | userUuid: string 72 | fromDate: Date 73 | toDate: Date 74 | } 75 | 76 | export interface GetAreaHistoryInputFilterType { 77 | areaId: string 78 | } 79 | 80 | export interface GetOrganizationHistoryInputFilterType { 81 | orgId: MUUID 82 | } 83 | -------------------------------------------------------------------------------- /src/db/ClimbHistorySchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import muuid from 'uuid-mongodb' 3 | import { ClimbSchema } from './ClimbSchema.js' 4 | import { AreaSchema } from './AreaSchema.js' 5 | import ClimbHistoryType, { AreaHistoryType } from './ClimbHistoryType.js' 6 | import ChangeEventType from './ChangeEventType.js' 7 | import { ClimbType } from './ClimbTypes' 8 | import { AreaType } from './AreaTypes' 9 | 10 | const { Schema } = mongoose 11 | 12 | const schemaOptions = { 13 | timestamps: { createdAt: 'true' }, 14 | _id: false 15 | } 16 | 17 | const ClimbChangeEventSchema = new mongoose.Schema>({ 18 | _id: { 19 | _data: Object 20 | }, 21 | dbOp: String, 22 | fullDocument: ClimbSchema 23 | }, schemaOptions) 24 | 25 | ClimbChangeEventSchema.index({ _id: 1 }, { unique: true }) 26 | 27 | const AreaChangeEventSchema = new mongoose.Schema>({ 28 | _id: { 29 | _data: Object 30 | }, 31 | dbOp: String, 32 | fullDocument: AreaSchema 33 | }, schemaOptions) 34 | 35 | AreaChangeEventSchema.index({ _id: 1 }, { unique: true }) 36 | 37 | export const ClimbHistorySchema = new Schema({ 38 | uid: { 39 | type: 'object', 40 | value: { type: 'Buffer' }, 41 | default: () => muuid.v4() 42 | }, 43 | actionType: { 44 | type: String 45 | }, 46 | change: ClimbChangeEventSchema 47 | }, { 48 | _id: true, 49 | writeConcern: { 50 | w: 'majority', 51 | j: true, 52 | wtimeout: 5000 53 | } 54 | }) 55 | 56 | export const AreaHistorySchema = new Schema({ 57 | uid: { 58 | type: 'object', 59 | value: { type: 'Buffer' }, 60 | default: () => muuid.v4() 61 | }, 62 | actionType: { 63 | type: String 64 | }, 65 | change: AreaChangeEventSchema 66 | }, { 67 | _id: true, 68 | writeConcern: { 69 | w: 'majority', 70 | j: true, 71 | wtimeout: 5000 72 | } 73 | }) 74 | 75 | export const getClimbHistoryModel = (): mongoose.Model => { 76 | return mongoose.model('climb_history', ClimbHistorySchema) 77 | } 78 | 79 | export const getAreaHistoryModel = (): mongoose.Model => { 80 | return mongoose.model('area_history', AreaHistorySchema) 81 | } 82 | -------------------------------------------------------------------------------- /src/db/ClimbHistoryType.ts: -------------------------------------------------------------------------------- 1 | import { Document } from 'mongodb' 2 | import { MUUID } from 'uuid-mongodb' 3 | import ChangeEventType from './ChangeEventType' 4 | import { ClimbType } from './ClimbTypes' 5 | import { AreaType } from './AreaTypes' 6 | 7 | export default interface ClimbHistoryType extends Document { 8 | uid: MUUID 9 | actionType: ActionType 10 | change: ChangeEventType 11 | } 12 | 13 | export interface AreaHistoryType extends Document { 14 | uid: MUUID 15 | actionType: ActionType 16 | change: ChangeEventType 17 | } 18 | 19 | export type ActionType = 'update' | 'add' | 'delete' 20 | -------------------------------------------------------------------------------- /src/db/MediaObjectSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { MediaObject, EntityTag } from './MediaObjectTypes.js' 3 | import { PointSchema } from './ClimbSchema.js' 4 | import { MUUID } from 'uuid-mongodb' 5 | 6 | const { Schema } = mongoose 7 | 8 | const UUID_TYPE = { 9 | type: 'object', value: { type: 'Buffer' } 10 | } 11 | 12 | const muuidTransform = (v: MUUID): string => { 13 | return v.toUUID().toString() 14 | } 15 | 16 | const EntitySchema = new Schema({ 17 | targetId: { ...UUID_TYPE, index: true, transform: muuidTransform }, 18 | climbName: { type: Schema.Types.String }, 19 | areaName: { type: Schema.Types.String, required: true }, 20 | type: { type: Schema.Types.Number, required: true }, 21 | ancestors: { type: Schema.Types.String, required: true, index: true }, 22 | lnglat: { 23 | type: PointSchema, 24 | index: '2dsphere', 25 | required: false 26 | }, 27 | topoData: { type: Schema.Types.Mixed } 28 | }, { _id: true, toObject: { versionKey: false } }) 29 | 30 | const schema = new Schema({ 31 | userUuid: { ...UUID_TYPE, index: true, transform: (v: any) => v.toUUID().toString() }, 32 | mediaUrl: { type: Schema.Types.String, unique: true, index: true }, 33 | width: { type: Schema.Types.Number, required: true }, 34 | height: { type: Schema.Types.Number, required: true }, 35 | size: { type: Schema.Types.Number, required: true }, 36 | format: { type: Schema.Types.String, required: true }, 37 | entityTags: [EntitySchema] 38 | }, { _id: true, timestamps: true, toJSON: { versionKey: false }, toObject: { versionKey: false } }) 39 | 40 | /** 41 | * Additional indices 42 | */ 43 | schema.index({ 44 | /** 45 | * For filtering media objects with/without tags 46 | */ 47 | entityTags: 1, 48 | /** 49 | * For sorting media objects by insertion order 50 | */ 51 | createdAt: -1 // ascending, more recent first 52 | }) 53 | 54 | /** 55 | * For checking media ownership 56 | */ 57 | schema.index({ 58 | _id: 1, 59 | userUuid: 1 60 | }) 61 | 62 | /** 63 | * For removeEntityTag update query 64 | */ 65 | schema.index({ 66 | _id: 1, 67 | 'entityTag._id': 1 68 | }) 69 | 70 | /** 71 | * For addEntityTag() update query to check whether an existing entity already exists. 72 | */ 73 | schema.index({ 74 | _id: 1, 75 | 'entityTag.targetId': 1 76 | }) 77 | 78 | /** 79 | * Get media object model 80 | * @returns MediaObjectType 81 | */ 82 | export const getMediaObjectModel = (): mongoose.Model => { 83 | return mongoose.model('media_objects', schema) 84 | } 85 | -------------------------------------------------------------------------------- /src/db/MediaObjectTypes.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { MUUID } from 'uuid-mongodb' 3 | import { Point } from '@turf/helpers' 4 | 5 | export type ImageFormatType = 'jpeg' | 'png' | 'webp' | 'avif' 6 | 7 | export interface MediaObject { 8 | _id: mongoose.Types.ObjectId 9 | userUuid: MUUID 10 | mediaUrl: string 11 | width: number 12 | height: number 13 | format: ImageFormatType 14 | createdAt: Date 15 | size: number 16 | entityTags?: EntityTag[] 17 | } 18 | 19 | export interface EntityTag { 20 | _id: mongoose.Types.ObjectId 21 | targetId: MUUID 22 | type: number 23 | ancestors: string 24 | climbName?: string 25 | areaName: string 26 | lnglat?: Point 27 | topoData?: object 28 | } 29 | 30 | export interface MediaByUsers { 31 | username: string 32 | userUuid: MUUID 33 | mediaWithTags: MediaObject[] 34 | } 35 | export interface MediaForFeedInput { 36 | uuidStr?: string 37 | maxUsers?: number 38 | maxFiles?: number 39 | includesNoEntityTags?: boolean 40 | } 41 | 42 | export interface TagByUser { 43 | username?: string 44 | userUuid: MUUID 45 | total: number 46 | } 47 | 48 | export interface AllTimeTagStats { 49 | totalMediaWithTags: number 50 | byUsers: TagByUser[] 51 | } 52 | export interface TagsLeaderboardType { 53 | allTime: AllTimeTagStats 54 | } 55 | 56 | /** 57 | * For creating a new Media object doc 58 | */ 59 | export type NewMediaObjectDoc = Omit 60 | 61 | /** 62 | * GQL input type for getting paginated media for an "Entity", which is either a user, an area, or a climb. 63 | * The userUuid is omitted from the Area and Climb versions of this type, which are defined below 64 | * as AreaMediaQueryInput and ClimbMediaQueryInput 65 | * @param maxFiles - the maximum number of media files to return 66 | * @param first - the number of media files to return 67 | * @param after - the cursor to start from 68 | */ 69 | export interface EntityMediaGQLQueryInput { 70 | maxFiles?: number 71 | first?: number 72 | after?: string 73 | } 74 | 75 | export type UserMediaQueryInput = EntityMediaGQLQueryInput & { 76 | userUuid: MUUID 77 | } 78 | 79 | export type AreaMediaQueryInput = EntityMediaGQLQueryInput & { 80 | areaUuid: MUUID 81 | } 82 | 83 | export type ClimbMediaQueryInput = EntityMediaGQLQueryInput & { 84 | climbUuid: MUUID 85 | } 86 | 87 | /** 88 | * GQL user input type for remove tag mutation 89 | */ 90 | export interface EntityTagDeleteGQLInput { 91 | mediaId: string 92 | tagId: string 93 | } 94 | 95 | /** 96 | * Formal input type for remove tag function 97 | */ 98 | export interface EntityTagDeleteInput { 99 | mediaId: mongoose.Types.ObjectId 100 | tagId: mongoose.Types.ObjectId 101 | } 102 | 103 | /** 104 | * GQL user input type for add media mutation 105 | */ 106 | export type MediaObjectGQLInput = Pick & { 107 | userUuid: string 108 | entityTag?: Omit 109 | } 110 | 111 | /** 112 | * GQL user input for addEntityTag mutation 113 | */ 114 | export interface AddEntityTagGQLInput { 115 | mediaId: string 116 | entityId: string 117 | entityType: number 118 | topoData?: object 119 | } 120 | 121 | /** 122 | * Formal input type for addEntityTag function 123 | */ 124 | export type AddTagEntityInput = Pick & { 125 | mediaId: mongoose.Types.ObjectId 126 | entityUuid: MUUID 127 | } 128 | 129 | export interface UserMedia { 130 | userUuid: string 131 | mediaConnection: { 132 | edges: MediaEdge[] 133 | pageInfo: { 134 | hasNextPage: boolean 135 | totalItems: number 136 | endCursor: string | null 137 | } 138 | } 139 | } 140 | 141 | export type AreaMedia = Omit & { 142 | areaUuid: string 143 | } 144 | 145 | export type ClimbMedia = Omit & { 146 | climbUuid: string 147 | } 148 | 149 | interface MediaEdge { 150 | node: MediaObject 151 | cursor: string 152 | } 153 | 154 | export interface DeleteMediaGQLInput { 155 | mediaId: string 156 | } 157 | -------------------------------------------------------------------------------- /src/db/OrganizationSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import muuid from 'uuid-mongodb' 3 | 4 | import { OrganizationType, OrgType, IOrganizationContent, OperationType } from './OrganizationTypes.js' 5 | import { ChangeRecordMetadataType } from './ChangeLogType.js' 6 | 7 | const { Schema, connection } = mongoose 8 | 9 | const ChangeRecordMetadata = new Schema({ 10 | user: { 11 | type: 'object', 12 | value: { type: 'Buffer' }, 13 | required: true 14 | }, 15 | historyId: { type: Schema.Types.ObjectId, ref: 'change_logs' }, 16 | operation: { 17 | type: Schema.Types.Mixed, 18 | enum: Object.values(OperationType), 19 | required: true 20 | }, 21 | seq: { type: Number, required: true, default: 0 } 22 | }, { _id: false, timestamps: false }) 23 | 24 | const ContentSchema = new Schema({ 25 | website: { type: Schema.Types.String }, 26 | email: { type: Schema.Types.String }, 27 | donationLink: { type: Schema.Types.String }, 28 | instagramLink: { type: Schema.Types.String }, 29 | facebookLink: { type: Schema.Types.String }, 30 | hardwareReportLink: { type: Schema.Types.String }, 31 | description: { type: Schema.Types.String } 32 | }, { _id: false }) 33 | 34 | export const OrganizationSchema = new Schema({ 35 | orgId: { 36 | type: 'object', 37 | value: { type: 'Buffer' }, 38 | default: () => muuid.v4(), 39 | required: true, 40 | unique: true, 41 | index: true 42 | }, 43 | displayName: { type: String, required: true, index: true }, 44 | orgType: { 45 | type: Schema.Types.Mixed, 46 | enum: Object.values(OrgType), 47 | required: true 48 | }, 49 | associatedAreaIds: [{ type: 'Buffer' }], 50 | excludedAreaIds: [{ type: 'Buffer' }], 51 | content: ContentSchema, 52 | _change: ChangeRecordMetadata, 53 | _deleting: { type: Date }, 54 | updatedBy: { 55 | type: 'object', 56 | value: { type: 'Buffer' } 57 | }, 58 | createdBy: { 59 | type: 'object', 60 | value: { type: 'Buffer' } 61 | } 62 | }, { timestamps: true }) 63 | 64 | OrganizationSchema.index({ _deleting: 1 }, { expireAfterSeconds: 0 }) 65 | 66 | export const createOrganizationModel = (name: string = 'organizations'): mongoose.Model => { 67 | return connection.model(name, OrganizationSchema) 68 | } 69 | 70 | export const getOrganizationModel = (name: string = 'organizations'): mongoose.Model => 71 | connection.model(name, OrganizationSchema) 72 | -------------------------------------------------------------------------------- /src/db/PostSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals' 3 | import muuid from 'uuid-mongodb' 4 | import { PostType } from './PostTypes.js' 5 | import { XMediaSchema } from './XMediaSchema.js' 6 | 7 | const { Schema } = mongoose 8 | 9 | const PostSchema = new Schema({ 10 | userId: { 11 | type: 'object', 12 | value: { type: 'Buffer' }, 13 | default: () => muuid.v4(), 14 | required: true, 15 | unique: false, 16 | index: true 17 | }, 18 | xMedia: { 19 | type: [XMediaSchema], 20 | required: true 21 | }, 22 | description: { type: String } 23 | }, { 24 | _id: true, 25 | strictQuery: 'throw', 26 | toObject: { 27 | virtuals: true 28 | }, 29 | toJSON: { virtuals: true } 30 | }) 31 | 32 | PostSchema.plugin(mongooseLeanVirtuals) 33 | 34 | export const getPostModel = (name: string = 'post'): mongoose.Model => { 35 | return mongoose.model(name, PostSchema) 36 | } 37 | -------------------------------------------------------------------------------- /src/db/PostTypes.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { MUUID } from 'uuid-mongodb' 3 | import { XMediaType } from './XMediaTypes' 4 | export interface PostType { 5 | userId: MUUID 6 | xMedia: XMediaType[] 7 | description?: string 8 | } 9 | 10 | export interface AddPostInputType { 11 | photoUrls: string[] 12 | userId: string 13 | description?: string 14 | mediaType: number 15 | } 16 | 17 | export interface RemovePostInputType { 18 | postId: mongoose.Types.ObjectId 19 | } 20 | 21 | export interface GetPostsInputType { 22 | postIds: mongoose.Types.ObjectId[] 23 | } 24 | -------------------------------------------------------------------------------- /src/db/TagSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals' 3 | import muuid from 'uuid-mongodb' 4 | 5 | import { TagType, RefModelType } from './TagTypes.js' 6 | 7 | const { Schema } = mongoose 8 | 9 | const TagSchema = new Schema({ 10 | mediaUrl: { 11 | type: Schema.Types.String, 12 | required: true 13 | }, 14 | mediaUuid: { 15 | type: 'object', 16 | value: { type: 'Buffer' }, 17 | default: () => muuid.v4(), 18 | required: true, 19 | unique: false, 20 | index: true 21 | }, 22 | destinationId: { 23 | type: Schema.Types.Mixed, 24 | value: { type: 'Buffer' }, 25 | required: true, 26 | refPath: 'onModel' 27 | }, 28 | destinationType: { 29 | type: Number, 30 | required: true 31 | }, 32 | onModel: { 33 | type: String, 34 | required: true, 35 | enum: Object.values(RefModelType) 36 | } 37 | }, { 38 | _id: true, 39 | strictQuery: 'throw', 40 | toObject: { 41 | virtuals: true 42 | }, 43 | toJSON: { virtuals: true } 44 | }) 45 | 46 | TagSchema.virtual('climb', { 47 | ref: 'climbs', 48 | localField: 'destinationId', 49 | foreignField: '_id', 50 | justOne: true 51 | }) 52 | 53 | TagSchema.virtual('area', { 54 | ref: 'areas', 55 | localField: 'destinationId', 56 | foreignField: 'metadata.area_id', 57 | justOne: true 58 | }) 59 | 60 | TagSchema.plugin(mongooseLeanVirtuals) 61 | TagSchema.index({ mediaUuid: 1, destinationId: 1 }, { unique: true }) 62 | 63 | export const getTagModel = (name: string = 'tags'): mongoose.Model => { 64 | return mongoose.model(name, TagSchema) 65 | } 66 | -------------------------------------------------------------------------------- /src/db/TagTypes.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { MUUID } from 'uuid-mongodb' 3 | 4 | export interface TagType { 5 | _id?: mongoose.Types.ObjectId 6 | mediaUrl: string 7 | mediaUuid: MUUID 8 | destinationId: MUUID 9 | destinationType: number 10 | onModel: RefModelType 11 | } 12 | 13 | export enum RefModelType { 14 | climbs = 'climbs', 15 | areas = 'areas' 16 | } 17 | 18 | export interface RemoveTagInputType { 19 | tagId: mongoose.Types.ObjectId 20 | } 21 | 22 | export interface GetTagsInputType { 23 | tagIds: mongoose.Types.ObjectId[] 24 | } 25 | -------------------------------------------------------------------------------- /src/db/TickSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { TickSource, TickType } from './TickTypes' 3 | 4 | const { Schema } = mongoose 5 | 6 | /** 7 | * Tick Schema 8 | * 9 | * The tick schema defines how ticks are stored and serialized in the mongo database. 10 | * see the TickTypes.ts file for the typescript interface that defines types as they 11 | * are used within the application. Getting documents from this schema should kick out 12 | * TickType objects. 13 | */ 14 | export const TickSchema = new Schema({ 15 | name: { type: Schema.Types.String, required: true, index: true }, 16 | notes: { type: Schema.Types.String, required: false }, 17 | climbId: { type: Schema.Types.String, required: true, index: true }, 18 | userId: { type: Schema.Types.String, required: true, index: true }, 19 | style: { type: Schema.Types.String, enum: ['Lead', 'Solo', 'TR', 'Follow', 'Aid', 'Boulder'], required: false }, 20 | attemptType: { type: Schema.Types.String, enum: ['Onsight', 'Flash', 'Pinkpoint', 'Frenchfree', 'Redpoint', 'Send', 'Attempt', 'Repeat'], required: false, index: true }, 21 | dateClimbed: { type: Schema.Types.Date }, 22 | grade: { type: Schema.Types.String, required: false, index: true }, 23 | // Bear in mind that these enum types must be kept in sync with the TickSource enum 24 | source: { type: Schema.Types.String, enum: ['MP', 'OB'] as TickSource[], required: true, index: true } 25 | }) 26 | 27 | TickSchema.index({ userId: 1 }) // for ticksByUser() 28 | TickSchema.index({ userId: 1, climbId: 1 }) // for ticksByUserIdAndClimb() 29 | 30 | export const getTickModel = (name: string = 'ticks'): mongoose.Model => { 31 | return mongoose.model(name, TickSchema) 32 | } 33 | -------------------------------------------------------------------------------- /src/db/UserSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import muuid from 'uuid-mongodb' 3 | 4 | import { ExperimentalUserType, User, UsernameInfo } from './UserTypes.js' 5 | 6 | const { Schema } = mongoose 7 | 8 | export const ExperimentalUserSchema = new Schema({ 9 | _id: { 10 | type: 'object', 11 | value: { type: 'Buffer' }, 12 | default: () => muuid.v4() 13 | }, 14 | displayName: { type: Schema.Types.String, required: true, index: true }, 15 | url: { type: Schema.Types.String, required: true, index: true } 16 | }, { 17 | _id: false, 18 | timestamps: true 19 | }) 20 | 21 | /** 22 | * Temporary model used to capture user profile during bulk import of Canada data. 23 | * Use the standard User model instead. 24 | */ 25 | export const getExperimentalUserModel = (): mongoose.Model => { 26 | return mongoose.model('exp_users', ExperimentalUserSchema) 27 | } 28 | 29 | const UsernameSchema = new Schema({ 30 | username: { type: Schema.Types.String, required: true }, 31 | canonicalName: { type: Schema.Types.String, required: true } 32 | }, { 33 | _id: false, 34 | timestamps: { 35 | updatedAt: true, 36 | createdAt: false 37 | } 38 | }) 39 | 40 | export const UserSchema = new Schema({ 41 | _id: { 42 | type: 'object', 43 | value: { type: 'Buffer' } 44 | }, 45 | email: { type: Schema.Types.String }, 46 | emailVerified: { type: Schema.Types.Boolean }, 47 | displayName: { type: Schema.Types.String }, 48 | bio: { type: Schema.Types.String }, 49 | website: { type: Schema.Types.String }, 50 | avatar: { type: Schema.Types.String }, 51 | usernameInfo: { type: UsernameSchema, required: false }, 52 | createdBy: { 53 | type: 'object', 54 | value: { type: 'Buffer' } 55 | }, 56 | updatedBy: { 57 | type: 'object', 58 | value: { type: 'Buffer' } 59 | } 60 | }, { 61 | _id: false, 62 | timestamps: true 63 | }) 64 | 65 | /** 66 | * For sorting by most recent 67 | */ 68 | UserSchema.index({ createdAt: -1 }) 69 | UserSchema.index({ 'usernameInfo.canonicalName': 1 }, { sparse: true, unique: true }) 70 | UserSchema.index({ 'usernameInfo.username': 1 }, { sparse: true, unique: true }) 71 | 72 | export const getUserModel = (): mongoose.Model => { 73 | return mongoose.model('users', UserSchema) 74 | } 75 | -------------------------------------------------------------------------------- /src/db/UserTypes.ts: -------------------------------------------------------------------------------- 1 | import { MUUID } from 'uuid-mongodb' 2 | import { UserMedia } from './MediaObjectTypes.js' 3 | export interface ExperimentalUserType { 4 | _id: MUUID 5 | displayName: string 6 | nickname: string 7 | url: string 8 | createdAt: Date 9 | updatedAt: Date 10 | } 11 | 12 | export interface ExperimentalAuthorType { 13 | displayName: string 14 | url: string 15 | } 16 | 17 | export interface UsernameInfo { 18 | username: string 19 | canonicalName: string 20 | updatedAt: Date 21 | } 22 | export interface User { 23 | _id: MUUID 24 | email?: string 25 | emailVerified?: boolean 26 | displayName?: string 27 | usernameInfo?: UsernameInfo 28 | website?: string 29 | bio?: string 30 | avatar?: string 31 | createdAt: Date 32 | updatedAt: Date 33 | createdBy: MUUID 34 | updatedBy?: MUUID 35 | } 36 | 37 | export interface UpdateProfileGQLInput { 38 | username?: string 39 | userUuid: string 40 | displayName?: string 41 | bio?: string 42 | website?: string 43 | email?: string 44 | avatar?: string 45 | } 46 | 47 | export interface UsernameGQLInput { 48 | username: string 49 | } 50 | 51 | export interface UserIdGQLInput { 52 | userUuid: string 53 | } 54 | 55 | export interface GetUsernameReturn { 56 | _id: MUUID 57 | username: string 58 | updatedAt: Date 59 | } 60 | 61 | export type UserPublicProfile = Pick & { 62 | username: string 63 | } 64 | 65 | export interface UserPublicPage { 66 | profile: UserPublicProfile 67 | media: UserMedia 68 | } 69 | -------------------------------------------------------------------------------- /src/db/XMediaSchema.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import mongooseLeanVirtuals from 'mongoose-lean-virtuals' 3 | import muuid from 'uuid-mongodb' 4 | 5 | import { XMediaType } from './XMediaTypes.js' 6 | 7 | const { Schema } = mongoose 8 | 9 | export const XMediaSchema = new Schema({ 10 | userId: { 11 | type: 'object', 12 | value: { type: 'Buffer' }, 13 | default: () => muuid.v4(), 14 | required: true, 15 | unique: false, 16 | index: true 17 | }, 18 | mediaType: { 19 | type: Schema.Types.Number, 20 | required: true 21 | }, 22 | mediaUrl: { 23 | type: Schema.Types.String, 24 | required: true 25 | }, 26 | tagIds: { 27 | type: [Schema.Types.ObjectId], 28 | required: true 29 | } 30 | }, { 31 | _id: true, 32 | strictQuery: 'throw', 33 | toObject: { 34 | virtuals: true 35 | }, 36 | toJSON: { virtuals: true } 37 | }) 38 | 39 | XMediaSchema.plugin(mongooseLeanVirtuals) 40 | 41 | /** 42 | * @deprecated Superceded by MediaObjects 43 | * @param name 44 | * @returns 45 | */ 46 | export const getXMediaModel = (name: string = 'xmedia'): mongoose.Model => { 47 | return mongoose.model(name, XMediaSchema) 48 | } 49 | -------------------------------------------------------------------------------- /src/db/XMediaTypes.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { MUUID } from 'uuid-mongodb' 3 | 4 | export interface XMediaType { 5 | _id?: mongoose.Types.ObjectId 6 | userId: MUUID 7 | mediaType: number // 0: photo 8 | mediaUrl: string 9 | tagIds?: mongoose.Types.ObjectId[] 10 | } 11 | 12 | export interface RemoveXMediaInputType { 13 | xMediaId: mongoose.Types.ObjectId 14 | } 15 | 16 | export interface GetXMediaInputType { 17 | xMediaIds: mongoose.Types.ObjectId[] 18 | } 19 | -------------------------------------------------------------------------------- /src/db/export/Typesense/Client.ts: -------------------------------------------------------------------------------- 1 | import Typesense, { Client } from 'typesense' 2 | 3 | import { areaSchema, climbSchema } from './TypesenseSchemas.js' 4 | import { mongoAreaToTypeSense, mongoClimbToTypeSense } from './transformers.js' 5 | import { logger } from '../../../logger.js' 6 | import { AreaType } from '../../AreaTypes.js' 7 | import { DBOperation } from '../../ChangeLogType.js' 8 | import Config from '../../../Config.js' 9 | import { ClimbExtType, ClimbType } from '../../ClimbTypes.js' 10 | import MutableAreaDataSource from '../../../model/MutableAreaDataSource.js' 11 | 12 | /** 13 | * Return a Typesense client. 14 | * See https://typesense.org/docs/0.23.1/api/ 15 | * @returns Typesense Client object 16 | */ 17 | export default function typesense (): Client | undefined { 18 | const client = new Typesense.Client({ 19 | nodes: [ 20 | { 21 | host: Config.TYPESENSE_NODE, 22 | port: 443, 23 | protocol: 'https' 24 | } 25 | ], 26 | apiKey: Config.TYPESENSE_API_KEY_RW, 27 | numRetries: 3, // A total of 4 tries (1 original try + 3 retries) 28 | connectionTimeoutSeconds: 120, // Set a longer timeout for large imports 29 | logLevel: 'info' 30 | }) 31 | return client 32 | } 33 | 34 | /** 35 | * Update/remove a record in Area index 36 | * @param area 37 | * @param op 38 | */ 39 | export const updateAreaIndex = async (area: AreaType | null, op: DBOperation): Promise => { 40 | if (area == null) return 41 | try { 42 | if (Config.DEPLOYMENT_ENV !== 'production') { 43 | return 44 | } 45 | switch (op) { 46 | case 'insert': 47 | case 'update': 48 | await typesense()?.collections(areaSchema.name).documents().upsert(mongoAreaToTypeSense(area)) 49 | break 50 | case 'delete': 51 | await typesense()?.collections(areaSchema.name).documents(area.metadata.area_id.toUUID().toString()).delete() 52 | break 53 | } 54 | } catch (e) { 55 | logger.error({ exception: e.toString() }, 'Can\'t update Typesense Area index: ' + area.area_name) 56 | } 57 | } 58 | 59 | /** 60 | * Update/remove a record in Climb index 61 | * @param area 62 | * @param op 63 | */ 64 | export const updateClimbIndex = async (climb: ClimbType | null, op: DBOperation): Promise => { 65 | if (climb == null) return 66 | try { 67 | if (Config.DEPLOYMENT_ENV !== 'production') { 68 | return 69 | } 70 | 71 | // Look up additional attrs required by Climb index in Typesense. 72 | const { pathTokens, ancestors } = await MutableAreaDataSource.getInstance().findOneAreaByUUID(climb.metadata.areaRef) 73 | 74 | const climbExt: ClimbExtType = { 75 | ...climb, 76 | pathTokens, 77 | ancestors 78 | } 79 | 80 | switch (op) { 81 | case 'insert': 82 | case 'update': 83 | await typesense()?.collections(climbSchema.name).documents().upsert(mongoClimbToTypeSense(climbExt)) 84 | break 85 | case 'delete': 86 | await typesense()?.collections(climbSchema.name).documents(climb._id.toUUID().toString()).delete() 87 | break 88 | } 89 | } catch (e) { 90 | logger.error({ exception: e.toString() }, 'Can\'t update Typesense Climb index: ' + climb.name) 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/db/export/Typesense/Typesense.ts: -------------------------------------------------------------------------------- 1 | import { Client } from 'typesense' 2 | 3 | import typesenseClient from './Client.js' 4 | import { connectDB, gracefulExit } from '../../index.js' 5 | import { ClimbExtType } from '../../ClimbTypes.js' 6 | import { logger } from '../../../logger.js' 7 | import { areaSchema, AreaTypeSenseItem, climbSchema, ClimbTypeSenseItem } from './TypesenseSchemas.js' 8 | import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections.js' 9 | import { AreaType } from '../../AreaTypes.js' 10 | import { mongoAreaToTypeSense, mongoClimbToTypeSense } from './transformers.js' 11 | import { processMongoCollection } from '../common/index.js' 12 | import { getAllAreas, getAllClimbs } from '../queries/index.js' 13 | 14 | /** 15 | * For a given collection that might exist in typesense, drop it (if it exists) 16 | * and then create it again with the set schema. 17 | * This keeps schema up to date, and pre-empts duplicates. 18 | */ 19 | async function checkCollection ( 20 | client: Client, 21 | schema: CollectionCreateSchema 22 | ): Promise { 23 | try { 24 | // Delete if the collection already exists from a previous run 25 | await client.collections(schema.name).delete() 26 | logger.info(`dropped ${schema.name} collection from typesense`) 27 | } catch (error) { 28 | logger.error(error) 29 | } 30 | 31 | // Create a collection matching the specified schema 32 | try { 33 | await client.collections().create(schema) 34 | logger.info(`created ${schema.name} typesense collection`) 35 | } catch (error) { 36 | logger.error(error) 37 | await gracefulExit() 38 | } 39 | } 40 | 41 | async function uploadChunk (client: Client, schema: CollectionCreateSchema, chunk: Object[]): Promise { 42 | // Chunk entries may not exceed chunkSize 43 | if (chunk.length === 0) return 44 | 45 | try { 46 | logger.info(`pushing ${chunk.length} documents to typesense`) 47 | // This is safe enough. If anyone's gonna pass a non-object type then 48 | // they haven't been paying attention 49 | await client.collections(schema.name).documents().import(chunk, { action: 'upsert' }) 50 | } catch (e) { 51 | logger.error(e) 52 | } 53 | } 54 | 55 | async function updateClimbTypesense (client: Client): Promise { 56 | await processMongoCollection({ 57 | preProcess: async () => await checkCollection(client, climbSchema), 58 | converter: mongoClimbToTypeSense, 59 | dataGenerator: getAllClimbs, 60 | processChunk: async (chunk) => await uploadChunk(client, climbSchema, chunk) 61 | }) 62 | } 63 | 64 | async function updateAreaTypesense (client: Client): Promise { 65 | await processMongoCollection({ 66 | preProcess: async () => await checkCollection(client, areaSchema), 67 | converter: mongoAreaToTypeSense, 68 | dataGenerator: getAllAreas, 69 | processChunk: async (chunk) => await uploadChunk(client, areaSchema, chunk) 70 | }) 71 | } 72 | 73 | async function onDBConnected (): Promise { 74 | const node = process.env.TYPESENSE_NODE ?? '' 75 | const apiKey = process.env.TYPESENSE_API_KEY_RW ?? '' 76 | 77 | if (node === '' || apiKey === '') { 78 | logger.error('Missing env keys') 79 | await gracefulExit(1) 80 | } 81 | 82 | const typesense = typesenseClient() 83 | if (typesense == null) { 84 | process.exit(1) 85 | } 86 | 87 | logger.info('Start pushing data to TypeSense') 88 | 89 | if (process.argv.includes('--climbs')) { 90 | // Update climb data in typesense 91 | await updateClimbTypesense(typesense) 92 | logger.info('Climbs pushed to typesense') 93 | } 94 | 95 | if (process.argv.includes('--areas')) { 96 | // Update area data in typesense 97 | await updateAreaTypesense(typesense) 98 | logger.info('areas pushed to typesense') 99 | } 100 | 101 | await gracefulExit() 102 | } 103 | 104 | void connectDB(onDBConnected) 105 | -------------------------------------------------------------------------------- /src/db/export/Typesense/TypesenseSchemas.ts: -------------------------------------------------------------------------------- 1 | import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections' 2 | 3 | export interface ClimbTypeSenseItem { 4 | climbUUID: string 5 | climbName: string 6 | climbDesc: string 7 | fa: string 8 | areaNames: string[] 9 | disciplines: string[] 10 | grade?: string // Todo: switch to grade context 11 | safety: string 12 | cragLatLng?: [number, number] 13 | } 14 | 15 | /** 16 | * Typesense schema for climbs, includes most data that someone might search with, 17 | * as well as some metadata to help trim the set based on context 18 | */ 19 | export const climbSchema: CollectionCreateSchema = { 20 | name: 'climbs', 21 | fields: [ 22 | { 23 | name: 'climbName', 24 | type: 'string' as const, 25 | facet: false 26 | }, 27 | { 28 | name: 'climbDesc', 29 | type: 'string' as const, 30 | facet: false 31 | }, 32 | { 33 | name: 'fa', 34 | type: 'string' as const, 35 | facet: false 36 | }, 37 | { 38 | name: 'disciplines', 39 | type: 'string[]' as const, 40 | facet: true 41 | }, 42 | { 43 | name: 'areaNames', 44 | type: 'string[]' as const, 45 | facet: false 46 | }, 47 | { 48 | name: 'climbUUID', 49 | type: 'string' as const, 50 | index: false, 51 | optional: true 52 | }, 53 | { 54 | name: 'grade', 55 | type: 'string' as const, 56 | index: false, 57 | optional: true 58 | }, 59 | { 60 | name: 'safety', 61 | type: 'string' as const, 62 | index: false, 63 | optional: true 64 | }, 65 | { 66 | name: 'cragLatLng', 67 | type: 'geopoint' as const, 68 | index: true 69 | } 70 | ], 71 | token_separators: ['(', ')', '-', '.'] 72 | // TBD: need to have better tie-breakers (star/popularity ratings) 73 | // default_sorting_field: 'climb_name' 74 | } 75 | 76 | export interface AreaTypeSenseItem { 77 | id: string 78 | name: string 79 | pathTokens: string[] 80 | areaUUID: string 81 | areaLatLng?: [number, number] 82 | leaf: boolean 83 | isDestination: boolean 84 | totalClimbs: number 85 | density: number 86 | } 87 | 88 | /** 89 | * Typesense schema for areas. Areas are slightly easier to 90 | */ 91 | export const areaSchema: CollectionCreateSchema = { 92 | name: 'areas', 93 | fields: [ 94 | { 95 | name: 'name', 96 | type: 'string' as const, 97 | facet: false 98 | }, 99 | { 100 | // Ancestor area names of this area 101 | name: 'pathTokens', 102 | type: 'string[]' as const, 103 | facet: false 104 | }, 105 | { 106 | name: 'areaUUID', 107 | type: 'string' as const, 108 | index: false, 109 | optional: true 110 | }, 111 | { 112 | name: 'totalClimbs', 113 | type: 'int32' as const, 114 | facet: false 115 | }, 116 | { 117 | name: 'density', 118 | type: 'float' as const, 119 | facet: false 120 | }, 121 | { 122 | name: 'isDestination', 123 | type: 'bool' as const, 124 | index: true 125 | }, 126 | { 127 | name: 'leaf', 128 | type: 'bool' as const, 129 | index: true 130 | }, 131 | { 132 | name: 'areaLatLng', 133 | type: 'geopoint' as const, 134 | index: true 135 | } 136 | ], 137 | token_separators: ['(', ')', '-', '.'] 138 | } 139 | -------------------------------------------------------------------------------- /src/db/export/Typesense/Utils.ts: -------------------------------------------------------------------------------- 1 | import { Point } from '@turf/helpers' 2 | 3 | import { DisciplineType } from '../../ClimbTypes' 4 | 5 | export interface IFlatClimbTypes { 6 | typeSport: boolean 7 | typeTrad: boolean 8 | typeTR: boolean 9 | typeBouldering: boolean 10 | typeDeepWaterSolo: boolean 11 | typeMixed: boolean 12 | typeAlpine: boolean 13 | typeSnow: boolean 14 | typeIce: boolean 15 | typeAid: boolean 16 | } 17 | 18 | export const flattenDisciplines = (type: DisciplineType): IFlatClimbTypes => { 19 | return { 20 | typeSport: type?.sport ?? false, 21 | typeTrad: type?.trad ?? false, 22 | typeTR: type?.tr ?? false, 23 | typeBouldering: type?.bouldering ?? false, 24 | typeDeepWaterSolo: type?.deepwatersolo ?? false, 25 | typeMixed: type?.mixed ?? false, 26 | typeAlpine: type?.alpine ?? false, 27 | typeSnow: type?.snow ?? false, 28 | typeIce: type?.ice ?? false, 29 | typeAid: type?.aid ?? false 30 | } 31 | } 32 | 33 | export const disciplinesToArray = (type: DisciplineType): any => { 34 | const z: string[] = [] 35 | for (const property in type) { 36 | if (type[property] as boolean) { 37 | z.push(property) 38 | } 39 | } 40 | return z 41 | } 42 | 43 | /** 44 | * Convert mongo db geo point type to [lat,lng] for typesense geo search 45 | * @param geoPoint 46 | * @returns 47 | */ 48 | export const geoToLatLng = (geoPoint?: Point): [number, number] | undefined => { 49 | if (geoPoint == null) { 50 | return undefined 51 | } 52 | const { coordinates } = geoPoint 53 | return [coordinates[1], coordinates[0]] 54 | } 55 | -------------------------------------------------------------------------------- /src/db/export/Typesense/transformers.ts: -------------------------------------------------------------------------------- 1 | import { AreaTypeSenseItem, ClimbTypeSenseItem } from './TypesenseSchemas.js' 2 | import { AreaType } from '../../AreaTypes.js' 3 | import { disciplinesToArray, geoToLatLng } from './Utils.js' 4 | import { ClimbExtType, SafetyType } from '../../ClimbTypes.js' 5 | 6 | /** 7 | * Convert an Area object to a Typesense object 8 | * @param doc AreaType 9 | */ 10 | 11 | export function mongoAreaToTypeSense (doc: AreaType): AreaTypeSenseItem { 12 | return { 13 | id: doc.metadata.area_id.toUUID().toString(), 14 | areaUUID: doc.metadata.area_id.toUUID().toString(), 15 | name: doc.area_name ?? '', 16 | pathTokens: doc.pathTokens, 17 | areaLatLng: geoToLatLng(doc.metadata.lnglat), 18 | leaf: doc.metadata.leaf, 19 | isDestination: doc.metadata.isDestination, 20 | totalClimbs: doc.totalClimbs, 21 | density: doc.density 22 | } 23 | } 24 | 25 | /** 26 | * Convert a Climb object to a Typesense object 27 | * @param doc Climb type 28 | */ 29 | export function mongoClimbToTypeSense (doc: ClimbExtType): ClimbTypeSenseItem { 30 | return { 31 | climbUUID: doc._id.toUUID().toString(), 32 | climbName: doc.name, 33 | climbDesc: doc.content?.description ?? '', 34 | fa: doc.fa ?? '', 35 | areaNames: doc.pathTokens, 36 | disciplines: disciplinesToArray(doc.type), 37 | grade: doc?.yds ?? '', 38 | safety: doc?.safety ?? SafetyType.UNSPECIFIED.toString(), 39 | cragLatLng: geoToLatLng(doc.metadata.lnglat) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/db/export/common/index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | MongoCollectionProcessorOptions, 3 | processMongoCollection 4 | } from './processor.js' 5 | -------------------------------------------------------------------------------- /src/db/export/common/processor.ts: -------------------------------------------------------------------------------- 1 | export type Processor = (data: T[], chunkCount: number) => Promise 2 | 3 | export interface MongoCollectionProcessorOptions { 4 | /** 5 | * A callback that is called before the data is processed. 6 | * Use it to do any setup that needs to be done before the data is processed, e.g. 7 | * creating a new Typesense collection, or deleting an existing one. 8 | */ 9 | preProcess?: () => Promise 10 | /** 11 | * A converter function that converts the data from the source format to the 12 | * target format. 13 | * It is called per chunk, so it should be fast. 14 | * @param data The data to convert 15 | * @returns The converted data 16 | */ 17 | converter: (data: SourceDataType) => ChunkType 18 | /** 19 | * A generator function that yields chunks of data. 20 | * Common queries can be found in src/db/export/queries/ 21 | * @returns A generator that yields chunks of data 22 | */ 23 | dataGenerator: () => AsyncGenerator 24 | /** 25 | * A function that is called for every batch of data 26 | * after is has been converted. 27 | * Use it to upload the data to some external service or database. 28 | * @param chunk the chunk of data to process 29 | */ 30 | processChunk: Processor 31 | } 32 | 33 | /** 34 | * Uses the provided data generator, converters and processors to process 35 | * data from the database and upload it to an external service provided by the processor. 36 | * 37 | * ChunkType just needs to be any Object type that conforms to whatever 38 | * schema this method is supposed to be satisfying. 39 | */ 40 | export async function processMongoCollection ( 41 | options: MongoCollectionProcessorOptions 42 | ): Promise { 43 | // start by completely refreshing this collection. (Delete and stand back up) 44 | await options.preProcess?.() 45 | 46 | let chunkCount = 0 47 | for await (const chunk of options.dataGenerator()) { 48 | // upload the chunk as an array of translated objects 49 | await options.processChunk(chunk.map(options.converter), chunkCount++) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/db/export/json/area.resolver.test.ts: -------------------------------------------------------------------------------- 1 | import { resolveAreaFileName, resolveAreaSubPath } from './area.resolver' 2 | import path from 'path' 3 | 4 | describe('area resolvers', () => { 5 | describe('area name resolver', () => { 6 | const testCases = [ 7 | { name: 'should trim whitespace', input: ' test ', expected: 'test' }, 8 | { name: 'should lowercase', input: 'TEST', expected: 'test' }, 9 | { name: 'should replace spaces with underscores', input: 'test test', expected: 'test_test' }, 10 | { name: 'should replace special characters', input: 'test!@#$%^&*()_+{}:"<>?[]\';,./', expected: 'test' }, 11 | { 12 | name: 'should replace multiple spaces with single underscore', 13 | input: ' test test ', 14 | expected: 'test_test' 15 | }, 16 | { name: 'should return unknown for undefined', input: undefined, expected: 'unknown' }, 17 | { name: 'should return unknown for empty string', input: '', expected: 'unknown' }, 18 | { name: 'should return unknown for whitespace', input: ' ', expected: 'unknown' }, 19 | { 20 | name: 'acceptance test', 21 | input: '(Home Crag) Boulders a.k.a. Sherriff Boulders 12', 22 | expected: 'home_crag_boulders_aka_sherriff_boulders_12' 23 | } 24 | ] 25 | 26 | function assertNameResolver (areaName: string | undefined, expected: string) { 27 | expect(resolveAreaFileName({ area_name: areaName })).toBe(expected) 28 | } 29 | 30 | testCases.forEach(testCase => { 31 | it(testCase.name, () => { 32 | assertNameResolver(testCase.input, testCase.expected) 33 | }) 34 | }) 35 | }) 36 | 37 | describe('area sub path resolver', () => { 38 | const testCases = [ 39 | { name: 'should return current path for empty array', input: [], expected: '.' }, 40 | { name: 'should return path for single element', input: ['test'], expected: 'test' }, 41 | { name: 'should return path for multiple elements', input: ['test', 'test2'], expected: path.join('test', 'test2') }, 42 | { name: 'should ignore slashes in names', input: ['test/', 'test2\\'], expected: path.join('test', 'test2') } 43 | ] 44 | 45 | function assertSubPathResolver (path: string[], expected: string) { 46 | expect(resolveAreaSubPath({ pathTokens: path })).toBe(expected) 47 | } 48 | 49 | testCases.forEach(testCase => { 50 | it(testCase.name, () => { 51 | assertSubPathResolver(testCase.input, testCase.expected) 52 | }) 53 | }) 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /src/db/export/json/area.resolver.ts: -------------------------------------------------------------------------------- 1 | import { AreaType } from '../../AreaTypes.js' 2 | import path from 'path' 3 | 4 | export function resolveAreaFileName (area: Partial): string { 5 | const name = normalizeName(area.area_name) 6 | if (name === undefined || name === '') { return 'unknown' } else { return name } 7 | } 8 | 9 | export function resolveAreaSubPath (area: Partial): string { 10 | const paths: string[] = area.pathTokens?.map(normalizeName) 11 | .map(token => token ?? '') 12 | .filter(token => token !== '') ?? [] 13 | return path.join(...paths) 14 | } 15 | 16 | function normalizeName (name?: string): string | undefined { 17 | return name?.trim() 18 | .toLowerCase() 19 | .replace(/[^a-zA-Z0-9 -]/g, '') 20 | .replace(/\s\s+/g, ' ') 21 | .replace(/ /g, '_') 22 | } 23 | -------------------------------------------------------------------------------- /src/db/export/json/async-file.processor.test.ts: -------------------------------------------------------------------------------- 1 | import { asyncFileProcessor, Writer } from './async-file.processor' 2 | import path from 'path' 3 | 4 | interface TestType { name: string, path?: string[] } 5 | 6 | describe('file processor', () => { 7 | const writer = jest.fn(async (_data, _path) => await Promise.resolve()) 8 | const testData: TestType[] = [{ name: 'test', path: ['one', 'two'] }, { name: 'test2' }] 9 | const testPath = 'testPath' 10 | 11 | function assertWriterCalledFor (data: TestType) { 12 | expect(writer).toHaveBeenCalledWith(JSON.stringify(data), path.resolve(testPath, ...data.path ?? '', `${data.name}.json`)) 13 | } 14 | 15 | function createProcessor (w: Writer = writer) { 16 | return asyncFileProcessor({ 17 | basePath: testPath, 18 | fileNameResolver: (data: TestType) => data.name, 19 | subPathResolver: (data: TestType) => data.path?.join(path.sep) ?? '', 20 | writer: w 21 | }) 22 | } 23 | 24 | function withFailedWriteOn (failingData: { name: string }) { 25 | return async (data, path) => { 26 | if (data === JSON.stringify(failingData)) { 27 | return await Promise.reject('error') 28 | } 29 | return await writer(data, path) 30 | } 31 | } 32 | 33 | it('should write the correct data to a file', async () => { 34 | const processor = createProcessor() 35 | 36 | await processor(testData, 2) 37 | 38 | assertWriterCalledFor(testData[0]) 39 | assertWriterCalledFor(testData[1]) 40 | }) 41 | 42 | it('should continue batch processing on error', async () => { 43 | const processor = createProcessor(withFailedWriteOn(testData[0])) 44 | 45 | await expect(processor(testData, 0)).rejects.toContain('Failed to write 1/2 files') 46 | 47 | assertWriterCalledFor(testData[1]) 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /src/db/export/json/async-file.processor.ts: -------------------------------------------------------------------------------- 1 | import { promises } from 'fs' 2 | import { Processor } from '../common/processor.js' 3 | import path, { dirname } from 'path' 4 | import { logger } from '../../../logger.js' 5 | 6 | export type Writer = (data: string, path: string) => Promise 7 | export type PathResolver = (data: T) => string 8 | 9 | export interface FileProcessorOptions { 10 | basePath: string 11 | subPathResolver?: PathResolver 12 | fileNameResolver: PathResolver 13 | writer?: Writer 14 | } 15 | 16 | export function asyncFileProcessor ({ 17 | writer = async (data, path) => await promises.writeFile(path, data, 'utf-8'), 18 | ...options 19 | }: FileProcessorOptions): Processor { 20 | return async (data: T[]): Promise => { 21 | return await Promise.allSettled(data.map(async (item) => { 22 | const filePath = resolveFilePath(item, options) 23 | logger.info(`saving to file ${filePath}`) 24 | await promises.mkdir(dirname(filePath), { recursive: true }) 25 | return await writer(JSON.stringify(item), filePath) 26 | })).then(async results => { 27 | const errorCount = results.filter(result => result.status === 'rejected').length 28 | const errors = joinErrors(results, data, options) 29 | 30 | if (errorCount > 0) { throw new Error(`Failed to write ${errorCount}/${results.length} files: ${errors}`) } else { return await Promise.resolve() } 31 | }) 32 | } 33 | } 34 | 35 | function resolveFilePath (item: T, { 36 | basePath, 37 | fileNameResolver, 38 | subPathResolver 39 | }: { basePath: string, fileNameResolver: PathResolver, subPathResolver?: PathResolver }): string { 40 | if (subPathResolver != null) { 41 | basePath = path.join(basePath, subPathResolver(item)) 42 | } 43 | return path.resolve(basePath, `${fileNameResolver(item)}.json`) 44 | } 45 | 46 | function joinErrors (results: Array>>>, data: T[], options: Omit, 'writer'>): string { 47 | return results.map(extractError(data, options)) 48 | .filter(error => error !== undefined) 49 | .join(', ') 50 | } 51 | 52 | function extractError (data: T[], options: Omit, 'writer'>) { 53 | return (result: PromiseSettledResult, index: number) => { 54 | if (result.status === 'rejected') { return `${resolveFilePath(data[index], options)} (${result.reason as string})` } else { return undefined } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/db/export/json/index.ts: -------------------------------------------------------------------------------- 1 | import { connectDB, gracefulExit } from '../../index.js' 2 | import { logger } from '../../../logger.js' 3 | import { processMongoCollection, Processor } from '../common/processor.js' 4 | import { getAllAreas } from '../queries/get-all-areas.js' 5 | import { AreaType } from '../../AreaTypes.js' 6 | 7 | import { asyncFileProcessor } from './async-file.processor.js' 8 | import { fileURLToPath } from 'url' 9 | import path, { dirname } from 'path' 10 | import fs from 'fs' 11 | import { resolveAreaFileName, resolveAreaSubPath } from './area.resolver.js' 12 | 13 | const filename = fileURLToPath(import.meta.url) 14 | const workingDirectory = dirname(filename) 15 | 16 | export interface JsonExportOptions { 17 | /** 18 | * A function that processes an outputted chunk of data and writes it somewhere. 19 | * @param data the data emitted from the database 20 | */ 21 | processor: Processor 22 | } 23 | 24 | async function exportAreaData (options: JsonExportOptions): Promise { 25 | return await processMongoCollection({ 26 | dataGenerator: getAllAreas, 27 | converter: (climb) => climb, 28 | processChunk: options.processor 29 | }) 30 | } 31 | 32 | async function exportAreasToDisk (output: string): Promise { 33 | return await exportAreaData({ 34 | processor: asyncFileProcessor({ 35 | basePath: output, 36 | subPathResolver: resolveAreaSubPath, 37 | fileNameResolver: resolveAreaFileName 38 | }) 39 | }) 40 | } 41 | 42 | async function onDBConnected (): Promise { 43 | logger.info('Start exporting data as JSON') 44 | 45 | if ( 46 | !process.argv.includes('--output') || 47 | process.argv.length < process.argv.indexOf('--output') + 1 48 | ) { 49 | logger.error('Missing --output destination...') 50 | await gracefulExit(1) 51 | } 52 | 53 | // the path is relative to the current dir inside the build/ directory 54 | const output = path.resolve(workingDirectory, '../../../../', process.argv[process.argv.indexOf('--output') + 1]) 55 | fs.mkdirSync(output, { recursive: true }) 56 | 57 | await exportAreasToDisk(output) 58 | 59 | await gracefulExit() 60 | } 61 | 62 | void connectDB(onDBConnected) 63 | -------------------------------------------------------------------------------- /src/db/export/queries/defaults.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The maximum default number of documents to query from the database at once. 3 | */ 4 | export const DEFAULT_CHUNK_SIZE = 1000 5 | -------------------------------------------------------------------------------- /src/db/export/queries/get-all-areas.ts: -------------------------------------------------------------------------------- 1 | import { AreaType } from '../../AreaTypes.js' 2 | import { getAreaModel } from '../../AreaSchema.js' 3 | import { DEFAULT_CHUNK_SIZE } from './defaults.js' 4 | import { getClimbModel } from '../../ClimbSchema.js' 5 | 6 | export async function * getAllAreas (chunkSize: number = DEFAULT_CHUNK_SIZE): AsyncGenerator { 7 | let pageNum = 0 8 | 9 | while (true) { 10 | const page = await getAreaModel().find({}) 11 | .populate({ path: 'climbs', model: getClimbModel() }) 12 | .limit(chunkSize) 13 | .skip(pageNum * chunkSize) 14 | 15 | if (page.length === 0) { 16 | return 17 | } 18 | 19 | yield page 20 | pageNum += 1 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/db/export/queries/get-all-climbs.ts: -------------------------------------------------------------------------------- 1 | import { ClimbExtType } from '../../ClimbTypes.js' 2 | import { getClimbModel } from '../../ClimbSchema.js' 3 | import { DEFAULT_CHUNK_SIZE } from './defaults.js' 4 | 5 | /** 6 | * SQL equivalent: 7 | * 8 | * SELECT climbs.*, areas.ancestors, areas.pathTokens 9 | * FROM climbs left join areas on areas.metadata.area_id = climbs.metadata.areaRef; 10 | */ 11 | export async function * getAllClimbs (chunkSize: number = DEFAULT_CHUNK_SIZE): AsyncGenerator { 12 | let pageNum = 0 13 | 14 | while (true) { 15 | const page = await getClimbModel() 16 | .aggregate([ 17 | { 18 | $lookup: { 19 | from: 'areas', // other collection name 20 | localField: 'metadata.areaRef', 21 | foreignField: 'metadata.area_id', 22 | as: 'area', // clobber array of climb IDs with climb objects 23 | pipeline: [ 24 | { 25 | $project: { 26 | // only include specific fields 27 | _id: 0, 28 | ancestors: 1, 29 | pathTokens: 1 30 | } 31 | } 32 | ] 33 | } 34 | }, 35 | { $unwind: '$area' }, // Previous stage returns as an array of 1 element. 'unwind' turn it into an object. 36 | { 37 | $replaceWith: { 38 | // Merge area.* with top-level object 39 | $mergeObjects: ['$$ROOT', '$area'] 40 | } 41 | } 42 | ]) 43 | .skip(pageNum * chunkSize) 44 | .limit(chunkSize) 45 | 46 | if (page.length === 0) { 47 | return 48 | } 49 | 50 | yield page 51 | pageNum += 1 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/db/export/queries/index.ts: -------------------------------------------------------------------------------- 1 | export { getAllAreas } from './get-all-areas.js' 2 | export { getAllClimbs } from './get-all-climbs.js' 3 | -------------------------------------------------------------------------------- /src/db/import/ClimbTransformer.ts: -------------------------------------------------------------------------------- 1 | import { geometry, Point } from '@turf/helpers' 2 | import muuid from 'uuid-mongodb' 3 | import { v5 as uuidv5, NIL } from 'uuid' 4 | 5 | import { ClimbType } from '../ClimbTypes.js' 6 | import { defaultDisciplines, sanitizeDisciplines } from '../../GradeUtils.js' 7 | 8 | const transformClimbRecord = (row: any): ClimbType => { 9 | /* eslint-disable-next-line */ 10 | const { route_name, grade, gradeContext, safety, type, fa, metadata, description, protection, location } = row 11 | /* eslint-disable-next-line */ 12 | const { parent_lnglat, left_right_seq, mp_route_id, mp_sector_id } = metadata 13 | 14 | // in case mp_route_id is empty 15 | const pkeyStr = mp_route_id === '' ? `${mp_sector_id as string}.${left_right_seq as string}` : mp_route_id 16 | const uuid = muuid.from(uuidv5(pkeyStr, NIL)) 17 | const disciplines = sanitizeDisciplines(type) ?? defaultDisciplines() 18 | 19 | const boulderingDiscipline = disciplines.bouldering === true ? { vscale: grade.YDS } : {} 20 | 21 | return { 22 | _id: uuid, 23 | name: route_name, 24 | yds: grade.YDS, 25 | grades: { 26 | ...boulderingDiscipline, 27 | yds: grade.YDS, 28 | ewbank: grade.Ewbank, 29 | font: grade.Font, 30 | french: grade.French, 31 | uiaa: grade.UIAA, 32 | brazilian_crux: grade.BrazilianCrux 33 | }, 34 | gradeContext, 35 | safety, 36 | type: disciplines, 37 | fa, 38 | metadata: { 39 | lnglat: geometry('Point', parent_lnglat) as Point, 40 | left_right_index: left_right_seq, 41 | mp_id: mp_route_id, 42 | mp_crag_id: mp_sector_id, 43 | areaRef: muuid.from(uuidv5(mp_sector_id, NIL)) 44 | }, 45 | content: { 46 | description: Array.isArray(description) ? description.join('\n\n') : '', 47 | location: Array.isArray(location) ? location.join('\n\n') : '', 48 | protection: Array.isArray(protection) ? protection.join('\n\n') : '' 49 | } 50 | } 51 | } 52 | 53 | export default transformClimbRecord 54 | -------------------------------------------------------------------------------- /src/db/import/__tests__/climb-data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "route_name": "Orange Crush", 4 | "grade": { 5 | "YDS": "5.11b/c", 6 | "French": "6c+", 7 | "Ewbanks": "23", 8 | "UIAA": "VIII-", 9 | "ZA": "24", 10 | "British": "E4 6a" 11 | }, 12 | "safety": "", 13 | "type": { 14 | "sport": true 15 | }, 16 | "fa": "Wade Griffith, Sterling Killion, Scott Williams", 17 | "description": [ 18 | "Pretty cool orange arete that sports some interesting climbing. Crimpy edges start you off climbing either side of the arete till you can climb out left to a small shelf that affords a great rest. Climb back out right to the arete and up some powerful moves through the crux and up to some easier 5.10 terrain." 19 | ], 20 | "location": [ 21 | "The route is located on the far southern shoulder of Yeti dome which is actually located on the Southeastern side of Musick Mountain. Follow the dirt road from the Big Creek turnoff to the lookout tower/repeater station then follow the rounded ridge down to the cliffs. The base of the route requires scrambling down a slab a little bit to approach. It is easy to scramble up and around to the top of the route to set a TR off bolts as well." 22 | ], 23 | "protection": [ 24 | "7 QD's" 25 | ], 26 | "metadata": { 27 | "left_right_seq": "0", 28 | "parent_lnglat": [ 29 | -119.3094, 30 | 37.1667 31 | ], 32 | "parent_sector": "Yeti Dome (aka Musick Mountain)", 33 | "mp_route_id": "105817201", 34 | "mp_sector_id": "105817198", 35 | "mp_path": "Western Sierra|Yeti Dome (aka Musick Mountain)" 36 | } 37 | }, 38 | { 39 | "route_name": "Random Impulse", 40 | "grade": { 41 | "YDS": "5.7", 42 | "French": "5a", 43 | "Ewbanks": "15", 44 | "UIAA": "V+", 45 | "ZA": "13", 46 | "British": "MVS 4b" 47 | }, 48 | "safety": "", 49 | "type": { 50 | "trad": true 51 | }, 52 | "fa": "\"Unknown\" or", 53 | "description": [ 54 | "Some fun moves broken up by a few scree filled ledges and a big bush. Crux comes half way up where there is a nice corner finger crack for 10-15 feet. Either climb the finger crack or do some stem moves with the corner on the right and a boulder feature on the left. Continue up wandering terrain and build an anchor, or walk off." 55 | ], 56 | "location": [ 57 | "25 feet to the right of Deep Springs Education." 58 | ], 59 | "protection": [ 60 | "A small assortment of cams and maybe a nut or two" 61 | ], 62 | "metadata": { 63 | "left_right_seq": "1", 64 | "parent_lnglat": [ 65 | -118.13831, 66 | 37.3129 67 | ], 68 | "parent_sector": "Westgard Pass East Side (Hwy 168)", 69 | "mp_route_id": "119101118", 70 | "mp_sector_id": "119100232", 71 | "mp_path": "Sierra Eastside|Westgard Pass East Side (Hwy 168)" 72 | } 73 | } 74 | ] -------------------------------------------------------------------------------- /src/db/import/usa/LinkClimbsWithCrags.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { AreaType } from '../../AreaTypes.js' 3 | import { ClimbType } from '../../ClimbTypes.js' 4 | 5 | /** 6 | * Add climb IDs to Area.climbs[] aka link climbs to their corresponding crags. 7 | * We need this function because climbs and areas are stored in 2 separate json files. 8 | * 1. Group climbs in climb model by crag_id 9 | * 2. For each group, find the corresponding crag and update the 'climbs' field 10 | * @param climbModel 11 | * @param areaModel 12 | * @returns void 13 | */ 14 | export const linkClimbsWithAreas = async ( 15 | climbModel: mongoose.Model, 16 | areaModel: mongoose.Model): Promise => { 17 | // Group all climbs by crag 18 | const climbsGroupByCrag: Array<{ _id: mongoose.Types.ObjectId, climbs: ClimbType[] }> = await climbModel.aggregate([ 19 | { $group: { _id: '$metadata.areaRef', climbs: { $push: '$$ROOT._id' } } } 20 | ]).allowDiskUse(true) 21 | 22 | // Populate area.climbs array with climb IDs 23 | for await (const climbGroup of climbsGroupByCrag) { 24 | const cragId = climbGroup._id 25 | const { climbs } = climbGroup 26 | await areaModel.findOneAndUpdate({ 'metadata.area_id': cragId }, { climbs, totalClimbs: climbs.length }).clone() 27 | } 28 | return await Promise.resolve() 29 | } 30 | -------------------------------------------------------------------------------- /src/db/import/usa/SeedState.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import readline from 'node:readline' 3 | import fs from 'node:fs' 4 | 5 | import { getAreaModel } from '../../index.js' 6 | import { AreaType } from '../../AreaTypes.js' 7 | import { linkClimbsWithAreas } from './LinkClimbsWithCrags.js' 8 | import { getClimbModel } from '../../ClimbSchema.js' 9 | import { ClimbType } from '../../ClimbTypes.js' 10 | import transformClimbRecord from '../ClimbTransformer.js' 11 | import { createAreas } from './AreaTransformer.js' 12 | import { AreaNode } from './AreaTree.js' 13 | import { logger } from '../../../logger.js' 14 | 15 | export interface JobStats { 16 | state: string 17 | areaCount: number 18 | climbCount: number 19 | } 20 | 21 | export const seedState = async (root: AreaNode, stateCode: string, fileClimbs: string, fileAreas: string): Promise => { 22 | console.time('Loaded ' + stateCode) 23 | 24 | const areaModel: mongoose.Model = getAreaModel('areas') 25 | const climbModel: mongoose.Model = getClimbModel('climbs') 26 | logger.info('start', stateCode) 27 | const stats = await Promise.all([ 28 | loadClimbs(fileClimbs, climbModel), 29 | loadAreas(root, fileAreas, areaModel) 30 | ]) 31 | logger.info('link', stateCode) 32 | await linkClimbsWithAreas(climbModel, areaModel) 33 | 34 | console.timeEnd('Loaded ' + stateCode) 35 | 36 | return await Promise.resolve({ state: stateCode, climbCount: stats[0], areaCount: stats[1] }) 37 | } 38 | 39 | export const dropCollection = async (name: string): Promise => { 40 | try { 41 | await mongoose.connection.db.dropCollection(name) 42 | } catch (e) { 43 | } 44 | } 45 | 46 | const loadClimbs = async (fileName: string, model: mongoose.Model): Promise => { 47 | let count = 0 48 | const chunkSize = 100 49 | let chunk: ClimbType[] = [] 50 | 51 | const rl = readline.createInterface({ 52 | input: fs.createReadStream(fileName), 53 | terminal: false 54 | }) 55 | 56 | for await (const line of rl) { 57 | const jsonLine = JSON.parse(line) 58 | const record = transformClimbRecord(jsonLine) 59 | chunk.push(record) 60 | if (chunk.length % chunkSize === 0) { 61 | count = count + chunk.length 62 | await model.insertMany(chunk, { ordered: false }) 63 | chunk = [] 64 | } 65 | } 66 | 67 | if (chunk.length > 0) { 68 | count = count + chunk.length 69 | await model.insertMany(chunk, { ordered: false }) 70 | } 71 | return count 72 | } 73 | 74 | const loadAreas = async (root: AreaNode, fileName: string, model: mongoose.Model): Promise => { 75 | const buffer: any[] = [] 76 | 77 | const rl = readline.createInterface({ 78 | input: fs.createReadStream(fileName), 79 | terminal: false 80 | }) 81 | 82 | for await (const line of rl) { 83 | buffer.push(JSON.parse(line)) 84 | } 85 | 86 | return await createAreas(root, buffer, model) 87 | } 88 | -------------------------------------------------------------------------------- /src/db/import/usa/USADay0Seed.ts: -------------------------------------------------------------------------------- 1 | import fs from 'node:fs' 2 | import pLimit from 'p-limit' 3 | 4 | import { connectDB, gracefulExit, createIndexes } from '../../index.js' 5 | import { createRoot } from './AreaTransformer.js' 6 | import US_STATES from './us-states.js' 7 | import { seedState, dropCollection, JobStats } from './SeedState.js' 8 | import { logger } from '../../../logger.js' 9 | 10 | const contentDir: string = process.env.CONTENT_BASEDIR ?? '' 11 | 12 | const DEFAULT_CONCURRENT_JOBS = 4 13 | const concurrentJobs: number = 14 | process.env.OB_SEED_JOBS !== undefined 15 | ? parseInt(process.env.OB_SEED_JOBS) 16 | : DEFAULT_CONCURRENT_JOBS 17 | 18 | logger.info('Data dir', contentDir) 19 | logger.info('Max concurrent jobs: ', concurrentJobs) 20 | 21 | if (contentDir === '') { 22 | logger.error('Missing CONTENT_BASEDIR env') 23 | process.exit(1) 24 | } 25 | 26 | const main = async (): Promise => { 27 | const limiter = pLimit( 28 | concurrentJobs > 0 ? concurrentJobs : DEFAULT_CONCURRENT_JOBS 29 | ) 30 | 31 | // TODO: Allow update. Right now we drop the entire collection on each run. 32 | await dropCollection('areas') 33 | await dropCollection('climbs') 34 | 35 | console.time('Creating indexes') 36 | await createIndexes() 37 | console.timeEnd('Creating indexes') 38 | 39 | const rootNode = await createRoot('US', 'USA') 40 | 41 | const stats: Array = await Promise.all>( 42 | US_STATES.map(async state => { 43 | const code = state.code.toLowerCase() 44 | const fRoutes = `${contentDir}/${code}-routes.jsonlines` 45 | const fAreas = `${contentDir}/${code}-areas.jsonlines` 46 | 47 | if (fs.existsSync(fRoutes) && fs.existsSync(fAreas)) { 48 | /* eslint-disable-next-line */ 49 | return limiter(seedState, rootNode, code, fRoutes, fAreas) 50 | } 51 | return await Promise.resolve() 52 | }) 53 | ) 54 | 55 | printStats(stats) 56 | 57 | await gracefulExit() 58 | return await Promise.resolve() 59 | } 60 | 61 | const printStats = (stats: Array): void => { 62 | logger.info('------------------ Summary -------------------') 63 | const sums = { states: 0, climbs: 0, areas: 0 } 64 | for (const entry of stats) { 65 | if (entry !== undefined) { 66 | logger.info(entry) 67 | const e = entry as JobStats 68 | sums.climbs += e.climbCount 69 | sums.areas += e.climbCount 70 | sums.states += 1 71 | } 72 | } 73 | logger.info('---------------------------------------------') 74 | logger.info('Total: ', sums) 75 | } 76 | 77 | void connectDB(main) 78 | -------------------------------------------------------------------------------- /src/db/import/usa/__tests__/Tree.test.ts: -------------------------------------------------------------------------------- 1 | import { Tree, createRootNode } from '../AreaTree' 2 | 3 | const path1 = 'Oregon|Central Oregon|Paulina Peak|Vigilantes de Obsidiana|Roca Rhodales' 4 | const path2 = 'Oregon|Central Oregon|Smith Rock|Spiderman Buttress' 5 | 6 | const jsonLine1 = { 7 | url: '/area/117795688/foo-bar' 8 | } 9 | 10 | const jsonLine2 = { 11 | url: '/area/1234567/foo-bar' 12 | } 13 | 14 | describe('Area Tree data structure', () => { 15 | it('should create a tree from path string', () => { 16 | const root = createRootNode('US') 17 | const tree = new Tree(root) 18 | tree.insertMany(path1, jsonLine1) 19 | expect(tree.map.size).toEqual(path1.split('|').length) 20 | expect(tree.atPath('Oregon|Central Oregon')?.children.size).toEqual(1) 21 | }) 22 | 23 | it('shoud add a branch', () => { 24 | const tree = new Tree(createRootNode('US')) 25 | tree.insertMany(path1, jsonLine1) 26 | tree.insertMany(path2, jsonLine2) // Central Oregon should now have 2 children 27 | 28 | expect(tree.atPath('Oregon')?.children.size).toEqual(1) 29 | const node = tree.atPath('Oregon|Central Oregon') 30 | expect(node?.children.size).toEqual(2) 31 | 32 | // verify Central Oregon children 33 | if (node?.children !== undefined) { 34 | const ids = Array.from(node.children.values()) 35 | const child1 = tree.atPath('Oregon|Central Oregon|Paulina Peak') 36 | const child2 = tree.atPath('Oregon|Central Oregon|Smith Rock') 37 | expect([child1?._id, child2?._id]).toEqual(expect.arrayContaining(ids)) 38 | } 39 | }) 40 | 41 | it('builds complete path to root', () => { 42 | const countryRoot = createRootNode('US') 43 | const tree = new Tree(countryRoot) 44 | tree.insertMany(path1, jsonLine1) 45 | const leaf = tree.atPath(path1) 46 | if (leaf !== undefined) { 47 | const ancestors = leaf.getAncestors() 48 | console.log(ancestors) 49 | expect(ancestors.length).toEqual(path1.split('|').length + 1) // all element of path1 + 1 for US root 50 | expect(ancestors[0]).toEqual(countryRoot?.uuid) 51 | const stateRoot = tree.atPath('Oregon') 52 | expect(ancestors[1]).toEqual(stateRoot?.uuid) 53 | } 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /src/db/import/usa/__tests__/Utils.test.ts: -------------------------------------------------------------------------------- 1 | import { extractMpId } from '../AreaTransformer.js' 2 | 3 | test('Verify area url parser', () => { 4 | expect(extractMpId('/area/117795688/foo-bar')).toEqual('117795688') 5 | // test again since Regex matcher can be stateful 6 | expect(extractMpId('/area/123/abc')).toEqual('123') 7 | expect(extractMpId('/area//apple')).toEqual(undefined) 8 | }) 9 | -------------------------------------------------------------------------------- /src/db/import/usa/us-states.ts: -------------------------------------------------------------------------------- 1 | const US_STATES = [ 2 | { 3 | name: 'Alabama', 4 | code: 'AL' 5 | }, 6 | { 7 | name: 'Alaska', 8 | code: 'AK' 9 | }, 10 | { 11 | name: 'Arizona', 12 | code: 'AZ' 13 | }, 14 | { 15 | name: 'Arkansas', 16 | code: 'AR' 17 | }, 18 | { 19 | name: 'California', 20 | code: 'CA' 21 | }, 22 | { 23 | name: 'Colorado', 24 | code: 'CO' 25 | }, 26 | { 27 | name: 'Connecticut', 28 | code: 'CT' 29 | }, 30 | { 31 | name: 'Delaware', 32 | code: 'DE' 33 | }, 34 | { 35 | name: 'Florida', 36 | code: 'FL' 37 | }, 38 | { 39 | name: 'Georgia', 40 | code: 'GA' 41 | }, 42 | { 43 | name: 'Hawaii', 44 | code: 'HI' 45 | }, 46 | { 47 | name: 'Idaho', 48 | code: 'ID' 49 | }, 50 | { 51 | name: 'Illinois', 52 | code: 'IL' 53 | }, 54 | { 55 | name: 'Indiana', 56 | code: 'IN' 57 | }, 58 | { 59 | name: 'Iowa', 60 | code: 'IA' 61 | }, 62 | { 63 | name: 'Kansas', 64 | code: 'KS' 65 | }, 66 | { 67 | name: 'Kentucky', 68 | code: 'KY' 69 | }, 70 | { 71 | name: 'Louisiana', 72 | code: 'LA' 73 | }, 74 | { 75 | name: 'Maine', 76 | code: 'ME' 77 | }, 78 | { 79 | name: 'Maryland', 80 | code: 'MD' 81 | }, 82 | { 83 | name: 'Massachusetts', 84 | code: 'MA' 85 | }, 86 | { 87 | name: 'Michigan', 88 | code: 'MI' 89 | }, 90 | { 91 | name: 'Minnesota', 92 | code: 'MN' 93 | }, 94 | { 95 | name: 'Mississippi', 96 | code: 'MS' 97 | }, 98 | { 99 | name: 'Missouri', 100 | code: 'MO' 101 | }, 102 | { 103 | name: 'Montana', 104 | code: 'MT' 105 | }, 106 | { 107 | name: 'Nebraska', 108 | code: 'NE' 109 | }, 110 | { 111 | name: 'Nevada', 112 | code: 'NV' 113 | }, 114 | { 115 | name: 'New Hampshire', 116 | code: 'NH' 117 | }, 118 | { 119 | name: 'New Jersey', 120 | code: 'NJ' 121 | }, 122 | { 123 | name: 'New Mexico', 124 | code: 'NM' 125 | }, 126 | { 127 | name: 'New York', 128 | code: 'NY' 129 | }, 130 | { 131 | name: 'North Carolina', 132 | code: 'NC' 133 | }, 134 | { 135 | name: 'North Dakota', 136 | code: 'ND' 137 | }, 138 | { 139 | name: 'Ohio', 140 | code: 'OH' 141 | }, 142 | { 143 | name: 'Oklahoma', 144 | code: 'OK' 145 | }, 146 | { 147 | name: 'Oregon', 148 | code: 'OR' 149 | }, 150 | { 151 | name: 'Pennsylvania', 152 | code: 'PA' 153 | }, 154 | { 155 | name: 'Rhode Island', 156 | code: 'RI' 157 | }, 158 | { 159 | name: 'South Carolina', 160 | code: 'SC' 161 | }, 162 | { 163 | name: 'South Dakota', 164 | code: 'SD' 165 | }, 166 | { 167 | name: 'Tennessee', 168 | code: 'TN' 169 | }, 170 | { 171 | name: 'Texas', 172 | code: 'TX' 173 | }, 174 | { 175 | name: 'Utah', 176 | code: 'UT' 177 | }, 178 | { 179 | name: 'Vermont', 180 | code: 'VT' 181 | }, 182 | { 183 | name: 'Virginia', 184 | code: 'VA' 185 | }, 186 | { 187 | name: 'Washington', 188 | code: 'WA' 189 | }, 190 | { 191 | name: 'West Virginia', 192 | code: 'WV' 193 | }, 194 | { 195 | name: 'Wisconsin', 196 | code: 'WI' 197 | }, 198 | { 199 | name: 'Wyoming', 200 | code: 'WY' 201 | } 202 | ] 203 | 204 | export default US_STATES 205 | -------------------------------------------------------------------------------- /src/db/index.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { ChangeStream } from 'mongodb' 3 | import { config } from 'dotenv' 4 | import { enableAllPlugins } from 'immer' 5 | 6 | import { getAreaModel } from './AreaSchema.js' 7 | import { getClimbModel } from './ClimbSchema.js' 8 | import { getMediaObjectModel } from './MediaObjectSchema.js' 9 | import { getOrganizationModel } from './OrganizationSchema.js' 10 | import { getTickModel } from './TickSchema.js' 11 | import { getXMediaModel } from './XMediaSchema.js' 12 | import { getPostModel } from './PostSchema.js' 13 | import { getChangeLogModel } from './ChangeLogSchema.js' 14 | import { getExperimentalUserModel, getUserModel } from './UserSchema.js' 15 | import { logger } from '../logger.js' 16 | import streamListener from './edit/streamListener.js' 17 | 18 | config() 19 | enableAllPlugins() 20 | 21 | export const checkVar = (name: string): string => { 22 | const value = process.env[name] ?? '' 23 | if (value === '') { 24 | logger.error('Missing env ', name) 25 | process.exit(1) 26 | } 27 | return value 28 | } 29 | 30 | const defaultFn = logger.info.bind(logger, 'DB connected successfully') 31 | 32 | export const connectDB = async (onConnected: () => any = defaultFn): Promise => { 33 | const user = checkVar('MONGO_INITDB_ROOT_USERNAME') 34 | const pass = checkVar('MONGO_INITDB_ROOT_PASSWORD') 35 | const server = checkVar('MONGO_SERVICE') 36 | const rsName = checkVar('MONGO_REPLICA_SET_NAME') 37 | const scheme = checkVar('MONGO_SCHEME') 38 | const authDb = checkVar('MONGO_AUTHDB') 39 | const dbName = checkVar('MONGO_DBNAME') 40 | const tlsFlag = checkVar('MONGO_TLS') 41 | 42 | logger.info( 43 | `Connecting to database 'mongodb://${user}:****@${server}'...` 44 | ) 45 | try { 46 | // /* eslint-disable @typescript-eslint/no-floating-promises */ 47 | mongoose.connection.on('open', onConnected) 48 | 49 | mongoose.connection.on( 50 | 'error', (e) => { 51 | logger.error('MongoDB connection error', e) 52 | process.exit(1) 53 | } 54 | ) 55 | 56 | await mongoose.connect( 57 | `${scheme}://${user}:${pass}@${server}/${dbName}?authSource=${authDb}&tls=${tlsFlag}&replicaSet=${rsName}`, 58 | { autoIndex: true } 59 | ) 60 | } catch (e) { 61 | logger.error("Can't connect to db") 62 | process.exit(1) 63 | } 64 | } 65 | 66 | export const createIndexes = async (): Promise => { 67 | await getClimbModel().createIndexes() 68 | await getAreaModel().createIndexes() 69 | await getOrganizationModel().createIndexes() 70 | await getTickModel().createIndexes() 71 | await getXMediaModel().createIndexes() 72 | await getPostModel().createIndexes() 73 | await getMediaObjectModel().createIndexes() 74 | await getChangeLogModel().createIndexes() 75 | await getUserModel().createIndexes() 76 | } 77 | 78 | export const gracefulExit = async (exitCode: number = 0): Promise => { 79 | await mongoose.connection.close(true) 80 | logger.info('Gracefully exiting.') 81 | } 82 | 83 | export const defaultPostConnect = async (changeStreamListener = streamListener): Promise => { 84 | console.log('Kudos!') 85 | await createIndexes() 86 | return await changeStreamListener() 87 | } 88 | 89 | // eslint-disable-next-line 90 | process.on('SIGINT', gracefulExit).on('SIGTERM', gracefulExit) 91 | 92 | export { 93 | getOrganizationModel, 94 | getAreaModel, 95 | getTickModel, 96 | getClimbModel, 97 | getChangeLogModel, 98 | getXMediaModel, 99 | getPostModel, 100 | getExperimentalUserModel, 101 | getMediaObjectModel, 102 | getUserModel 103 | } 104 | -------------------------------------------------------------------------------- /src/db/utils/index.ts: -------------------------------------------------------------------------------- 1 | import { AuthorMetadata } from '../../types' 2 | 3 | export const getAuthorMetadataFromBaseNode = ({ updatedAt, updatedBy, createdAt, createdBy }: AuthorMetadata): AuthorMetadata => ({ 4 | updatedAt, 5 | updatedBy, 6 | createdAt, 7 | createdBy 8 | }) 9 | -------------------------------------------------------------------------------- /src/db/utils/jobs/AddCountriesJob.ts: -------------------------------------------------------------------------------- 1 | import enJson from 'i18n-iso-countries/langs/en.json' assert { type: 'json' } 2 | 3 | import { connectDB, gracefulExit } from '../../index.js' 4 | import MutableAreaDataSource from '../../../model/MutableAreaDataSource.js' 5 | import { logger } from '../../../logger.js' 6 | 7 | const onConnected = async (): Promise => { 8 | logger.info('Adding all countries (except USA)') 9 | logger.info('For USA run: `yarn seed-usa`') 10 | await insertAllCountries() 11 | await gracefulExit() 12 | } 13 | 14 | const insertAllCountries = async (): Promise => { 15 | const areaDS = MutableAreaDataSource.getInstance() 16 | await Promise.all( 17 | Object.keys(enJson.countries).map(async code => { 18 | if (code === 'US') return null 19 | return await areaDS.addCountry(code) 20 | }) 21 | ) 22 | } 23 | 24 | void connectDB(onConnected) 25 | -------------------------------------------------------------------------------- /src/db/utils/jobs/CragUpdater.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import bbox2Polygon from '@turf/bbox-polygon' 3 | 4 | import { getAreaModel } from '../../AreaSchema.js' 5 | import { getClimbModel } from '../../ClimbSchema.js' 6 | import { AreaType } from '../../AreaTypes.js' 7 | import { ClimbType } from '../../ClimbTypes.js' 8 | import { aggregateCragStats } from '../Aggregate.js' 9 | import { bboxFrom } from '../../../geo-utils.js' 10 | 11 | type AreaMongoType = mongoose.Document & AreaType 12 | 13 | /** 14 | * Run an update operation on all crags (leaf nodes) 15 | * Todo: finer-grained data, ie per country? 16 | */ 17 | export const visitAllCrags = async (): Promise => { 18 | const areaModel = getAreaModel('areas') 19 | 20 | // Get all crags 21 | const iterator = areaModel 22 | .find({ 23 | $or: [ 24 | { 'metadata.leaf': true }, 25 | { children: { $exists: true, $size: 0 } } 26 | ] 27 | }).batchSize(10) 28 | .populate<{ climbs: ClimbType[] }>({ path: 'climbs', model: getClimbModel() }) 29 | .allowDiskUse(true) 30 | 31 | // Calculate stats and bbox 32 | // Todo: bbox should be calculate when we insert a crag or change their coordinate 33 | for await (const crag of iterator) { 34 | const node: AreaMongoType = crag 35 | node.aggregate = aggregateCragStats(crag.toObject()) 36 | const bbox = bboxFrom(node.metadata.lnglat) 37 | node.metadata.bbox = bbox 38 | node.metadata.polygon = bbox == null ? undefined : bbox2Polygon(bbox).geometry 39 | await node.save() 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/db/utils/jobs/MapTiles/init.ts: -------------------------------------------------------------------------------- 1 | import { config } from 'dotenv' 2 | 3 | config({ path: ['.env.local', '.env'] }) 4 | 5 | const workingDir = process.env.MAPTILES_WORKING_DIR ?? '' 6 | 7 | if (workingDir.trim() === '') { 8 | throw new Error('MAPTILES_WORKING_DIR not set') 9 | } 10 | 11 | export { workingDir } 12 | -------------------------------------------------------------------------------- /src/db/utils/jobs/UpdateStatsJob.ts: -------------------------------------------------------------------------------- 1 | import { connectDB, gracefulExit } from '../../index.js' 2 | import { updateAllAreas } from './TreeUpdaters/updateAllAreas.js' 3 | import { visitAllCrags } from './CragUpdater.js' 4 | import { logger } from '../../../logger.js' 5 | 6 | const onConnected = async (): Promise => { 7 | logger.info('Initializing database') 8 | console.time('Calculating global stats') 9 | await visitAllCrags() 10 | await updateAllAreas() 11 | console.timeEnd('Calculating global stats') 12 | await gracefulExit() 13 | return await Promise.resolve() 14 | } 15 | 16 | void connectDB(onConnected) 17 | -------------------------------------------------------------------------------- /src/db/utils/jobs/migration/CreateMediaMetaCollection.ts: -------------------------------------------------------------------------------- 1 | import sharp from 'sharp' 2 | import { glob } from 'glob' 3 | import { validate as uuidValidate } from 'uuid' 4 | import muuid from 'uuid-mongodb' 5 | 6 | import { connectDB, gracefulExit } from '../../../index.js' 7 | import { logger } from '../../../../logger.js' 8 | import { MediaObject } from '../../../MediaObjectTypes.js' 9 | import { getMediaObjectModel } from '../../../MediaObjectSchema.js' 10 | import { getFileInfo } from './SirvClient.js' 11 | 12 | const LOCAL_MEDIA_DIR = process.env.LOCAL_MEDIA_DIR 13 | 14 | if (LOCAL_MEDIA_DIR == null) { 15 | throw new Error('LOCAL_MEDIA_DIR env not defined') 16 | } 17 | 18 | /** 19 | * Build the media object collection from media files on disk 20 | */ 21 | const onConnected = async (): Promise => { 22 | logger.info('Creating photo collection') 23 | const model = getMediaObjectModel() 24 | await model.ensureIndexes() 25 | const images = await glob(LOCAL_MEDIA_DIR, { 26 | nodir: true, 27 | stat: true, 28 | withFileTypes: true 29 | }) 30 | 31 | let list: any[] = [] 32 | let count = 0 33 | for (const image of images) { 34 | const { width, height, format } = await sharp(image.fullpath()).metadata() 35 | if (width == null || height == null || image.size == null) continue 36 | if ((format !== 'avif' && format !== 'jpeg' && format !== 'png' && format !== 'webp')) { 37 | logger.warn({ format, file: image.name }, 'Unexpected media format') 38 | continue 39 | } 40 | 41 | const folderUuidStr = image.parent?.name ?? '' 42 | if (!uuidValidate(folderUuidStr)) { 43 | logger.error({ file: image.name, parent: folderUuidStr }, 'Error: expect folder name to have uuid format. Found ') 44 | continue 45 | } 46 | const userUuid = muuid.from(folderUuidStr) 47 | const mediaUrl = `/u/${folderUuidStr}/${image.name}` 48 | const { btime } = await getFileInfo(mediaUrl) 49 | const meta: Omit = { 50 | userUuid, 51 | mediaUrl, 52 | size: image.size, 53 | width, 54 | height, 55 | format, 56 | entityTags: [], 57 | createdAt: btime 58 | } 59 | list.push(meta) 60 | count = count + 1 61 | 62 | if (list.length === 20) { 63 | await model.insertMany(list) 64 | list = [] 65 | } 66 | } 67 | 68 | if (list.length > 0) { 69 | await model.insertMany(list) 70 | } 71 | 72 | logger.info({ count }, 'Finish') 73 | 74 | await gracefulExit() 75 | } 76 | 77 | void connectDB(onConnected) 78 | -------------------------------------------------------------------------------- /src/db/utils/jobs/migration/SirvClient.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | 3 | const SIRV_CONFIG = { 4 | clientId: process.env.SIRV_CLIENT_ID_RO ?? null, 5 | clientSecret: process.env.SIRV_CLIENT_SECRET_RO ?? null 6 | } 7 | 8 | const client = axios.create({ 9 | baseURL: 'https://api.sirv.com/v2', 10 | headers: { 11 | 'content-type': 'application/json' 12 | } 13 | }) 14 | 15 | const headers = { 16 | 'content-type': 'application/json' 17 | } 18 | 19 | interface TokenParamsType { 20 | clientId: string | null 21 | clientSecret: string | null 22 | } 23 | 24 | const getToken = async (): Promise => { 25 | const params: TokenParamsType = { 26 | clientId: SIRV_CONFIG.clientId, 27 | clientSecret: SIRV_CONFIG.clientSecret 28 | } 29 | 30 | try { 31 | const res = await client.post( 32 | '/token', 33 | params) 34 | 35 | if (res.status === 200) { 36 | return res.data.token 37 | } 38 | } catch (e) { 39 | console.error(e) 40 | process.exit(1) 41 | } 42 | return null 43 | } 44 | 45 | const token = await getToken() ?? '' 46 | 47 | interface FileMetadaata { 48 | mtime: Date 49 | btime: Date 50 | } 51 | 52 | /** 53 | * When downloading photos from Sirv using rclone or on the UI, 54 | * the image file's upload time is lost. This function gets 55 | * the original upload timestamp. 56 | * @param filename 57 | * @returns 58 | */ 59 | export const getFileInfo = async (filename: string): Promise => { 60 | const res = await client.get( 61 | '/files/stat?filename=' + encodeURIComponent(filename), 62 | { 63 | headers: { 64 | ...headers, 65 | Authorization: `bearer ${token}` 66 | } 67 | } 68 | ) 69 | 70 | if (res.status === 200) { 71 | const { ctime, mtime } = res.data 72 | return ({ 73 | btime: new Date(ctime), 74 | mtime: new Date(mtime) 75 | }) 76 | } 77 | throw new Error('Sirv API.getFileInfo() error' + res.statusText) 78 | } 79 | -------------------------------------------------------------------------------- /src/geo-utils.ts: -------------------------------------------------------------------------------- 1 | import { featureCollection, Point } from '@turf/helpers' 2 | import bbox from '@turf/bbox' 3 | import bboxPolygon from '@turf/bbox-polygon' 4 | import area from '@turf/area' 5 | import circle from '@turf/circle' 6 | import { BBoxType } from './types' 7 | 8 | /** 9 | * Turn a single point (crag's GPS) into a circle then create a bbox. 10 | * @param point 11 | * @returns 12 | */ 13 | export const bboxFrom = (point: Point | undefined): BBoxType | undefined => { 14 | if (point == null) return undefined 15 | const options = { steps: 8 } 16 | const r = 0.05 // unit=km. Hopefully this is a large enough area (but not too large) for a crag 17 | const cir = circle(point, r, options) 18 | return bbox(cir) 19 | } 20 | 21 | /** 22 | * Create a new bounding box from a list of smaller ones 23 | * @param bboxList array of BBox 24 | * @returns BBox 25 | */ 26 | export const bboxFromList = (bboxList: BBoxType[]): any => { 27 | const z = bboxList.map(item => bboxPolygon(item)) 28 | return bbox(featureCollection(z)) 29 | } 30 | 31 | /** 32 | * Calculate climb density 33 | * @param bbox 34 | * @param totalClimbs 35 | * @returns total climbs per km sq 36 | */ 37 | export const areaDensity = (bbox: BBoxType | undefined, totalClimbs: number): number => { 38 | if (!Array.isArray(bbox) || bbox?.length !== 4) return 0 39 | const areaInKm = area(bboxPolygon(bbox)) / 1000000 40 | const minArea = areaInKm < 5 ? 5 : areaInKm 41 | return totalClimbs / minArea 42 | } 43 | -------------------------------------------------------------------------------- /src/graphql/area/AreaMutations.ts: -------------------------------------------------------------------------------- 1 | import muuid from 'uuid-mongodb' 2 | 3 | import { AreaType } from '../../db/AreaTypes.js' 4 | import { ContextWithAuth } from '../../types.js' 5 | import type MutableAreaDataSource from '../../model/MutableAreaDataSource.js' 6 | import { BulkImportInputType, BulkImportResultType } from '../../db/BulkImportTypes.js' 7 | 8 | const AreaMutations = { 9 | 10 | setDestinationFlag: async (_, { input }, context: ContextWithAuth): Promise => { 11 | const { dataSources, user } = context 12 | const { areas }: { areas: MutableAreaDataSource } = dataSources 13 | const { id, flag } = input 14 | 15 | // permission middleware shouldn't send undefined uuid 16 | if (user?.uuid == null) throw new Error('Missing user uuid') 17 | 18 | return await areas.setDestinationFlag(user.uuid, muuid.from(id), flag) 19 | }, 20 | 21 | removeArea: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 22 | const { areas } = dataSources 23 | const { uuid } = input 24 | 25 | // permission middleware shouldn't send undefined uuid 26 | if (user?.uuid == null) throw new Error('Missing user uuid') 27 | 28 | return await areas.deleteArea(user.uuid, muuid.from(uuid)) 29 | }, 30 | 31 | addArea: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 32 | const { areas } = dataSources 33 | const { name, parentUuid, countryCode, experimentalAuthor, isLeaf, isBoulder } = input 34 | 35 | // permission middleware shouldn't send undefined uuid 36 | if (user?.uuid == null) throw new Error('Missing user uuid') 37 | 38 | return await areas.addArea( 39 | user.uuid, name, 40 | parentUuid == null ? null : muuid.from(parentUuid), 41 | countryCode, 42 | experimentalAuthor, 43 | isLeaf, 44 | isBoulder 45 | ) 46 | }, 47 | 48 | updateArea: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 49 | const { areas } = dataSources 50 | 51 | if (user?.uuid == null) throw new Error('Missing user uuid') 52 | if (input?.uuid == null) throw new Error('Missing area uuid') 53 | 54 | const { lat, lng } = input 55 | if (lat != null && !isLatitude(lat)) throw Error('Invalid latitude') 56 | if (lng != null && !isLongitude(lng)) throw Error('Invalid longitude') 57 | if ((lat == null && lng != null) || (lat != null && lng == null)) throw Error('Must provide both latitude and longitude') 58 | 59 | const areaUuid = muuid.from(input.uuid) 60 | 61 | // Except for 'uuid' other fields are optional, check to see if there are any fields 62 | // besides 'uuid' 63 | const fields = Object.keys(input).filter(key => key !== 'uuid') 64 | if (fields.length === 0) return null 65 | 66 | return await areas.updateArea( 67 | user.uuid, 68 | areaUuid, 69 | input 70 | ) 71 | }, 72 | 73 | updateAreasSortingOrder: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 74 | const { areas } = dataSources 75 | 76 | if (user?.uuid == null) throw new Error('Missing user uuid') 77 | return await areas.updateSortingOrder( 78 | user.uuid, 79 | input 80 | ) 81 | }, 82 | 83 | bulkImportAreas: async (_, { input }: { input: BulkImportInputType }, { 84 | dataSources, 85 | user 86 | }: ContextWithAuth): Promise => { 87 | const { bulkImport, climbs } = dataSources 88 | if (user?.uuid == null) throw new Error('Missing user uuid') 89 | return await bulkImport.bulkImport({ 90 | user: user.uuid, 91 | input, 92 | climbs 93 | }) 94 | } 95 | } 96 | 97 | export default AreaMutations 98 | 99 | const isLatitude = (num: number): boolean => isFinite(num) && Math.abs(num) <= 90 100 | const isLongitude = (num: number): boolean => isFinite(num) && Math.abs(num) <= 180 101 | -------------------------------------------------------------------------------- /src/graphql/area/AreaQueries.ts: -------------------------------------------------------------------------------- 1 | import { AreaType, BulkAreasGQLQueryInput } from '../../db/AreaTypes' 2 | import { GQLContext } from '../../types' 3 | 4 | const AreaQueries = { 5 | cragsWithin: async (_, { filter }, { dataSources }: GQLContext): Promise => { 6 | const { areas } = dataSources 7 | const { bbox, zoom } = filter 8 | return await areas.findCragsWithin(bbox, zoom) 9 | }, 10 | 11 | countries: async (_, params, { dataSources }: GQLContext): Promise => { 12 | const { areas } = dataSources 13 | return await areas.listAllCountries() 14 | }, 15 | 16 | bulkAreas: async (_: any, params, { dataSources }: GQLContext): Promise => { 17 | const { areas } = dataSources 18 | const { ancestors } = params as BulkAreasGQLQueryInput 19 | return await areas.bulkDownloadAreas(ancestors) 20 | } 21 | } 22 | 23 | export default AreaQueries 24 | -------------------------------------------------------------------------------- /src/graphql/area/index.ts: -------------------------------------------------------------------------------- 1 | import AreaQueries from './AreaQueries.js' 2 | import AreaMutations from './AreaMutations.js' 3 | 4 | export { AreaQueries, AreaMutations } 5 | -------------------------------------------------------------------------------- /src/graphql/climb/ClimbMutations.ts: -------------------------------------------------------------------------------- 1 | import muid, { MUUID } from 'uuid-mongodb' 2 | import { ContextWithAuth } from '../../types.js' 3 | 4 | const ClimbMutations = { 5 | updateClimbs: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 6 | const { climbs: ds } = dataSources 7 | const { changes, parentId } = input 8 | 9 | if (user?.uuid == null) throw new Error('Missing user uuid') 10 | 11 | return await ds.addOrUpdateClimbs(user.uuid, muid.from(parentId), changes) 12 | }, 13 | 14 | deleteClimbs: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 15 | const { climbs: ds } = dataSources 16 | 17 | if (user?.uuid == null) throw new Error('Missing user uuid') 18 | 19 | const { idList, parentId } = input 20 | 21 | const toBeDeletedList: MUUID[] = idList.map(entry => muid.from(entry)) 22 | 23 | return await ds.deleteClimbs(user.uuid, muid.from(parentId), toBeDeletedList) 24 | } 25 | } 26 | 27 | export default ClimbMutations 28 | -------------------------------------------------------------------------------- /src/graphql/climb/index.ts: -------------------------------------------------------------------------------- 1 | import ClimbMutations from './ClimbMutations.js' 2 | 3 | export { ClimbMutations } 4 | -------------------------------------------------------------------------------- /src/graphql/common/DateScalar.ts: -------------------------------------------------------------------------------- 1 | import { GraphQLScalarType, Kind } from 'graphql' 2 | 3 | // See https://www.apollographql.com/docs/apollo-server/schema/custom-scalars/ 4 | 5 | const dateScalar = new GraphQLScalarType({ 6 | name: 'Date', 7 | description: 'Date custom scalar type', 8 | serialize (value: Date): number { 9 | return value.getTime() // Convert outgoing Date to integer for JSON 10 | }, 11 | parseValue (value: number): Date { 12 | return new Date(value) // Convert incoming integer to Date 13 | }, 14 | parseLiteral (ast) { 15 | if (ast.kind === Kind.INT) { 16 | return new Date(parseInt(ast.value, 10)) // Convert hard-coded AST string to integer and then to Date 17 | } 18 | return null // Invalid hard-coded value (not an integer) 19 | } 20 | }) 21 | 22 | export default dateScalar 23 | -------------------------------------------------------------------------------- /src/graphql/common/MuuidScalar.ts: -------------------------------------------------------------------------------- 1 | import { GraphQLScalarType, Kind } from 'graphql' 2 | import muid, { MUUID } from 'uuid-mongodb' 3 | import { muuidToString } from '../../utils/helpers.js' 4 | 5 | // Not yet possible to use scalars on the client. See https://github.com/apollographql/apollo-client/issues/8857 6 | const fromString = (s: string): MUUID => muid.from(s) 7 | 8 | // See https://www.apollographql.com/docs/apollo-server/schema/custom-scalars/ 9 | 10 | const MuuidScalar = new GraphQLScalarType({ 11 | name: 'MUUID', 12 | description: 'Mongo uuid custom scalar type', 13 | 14 | // Convert outgoing Muid to string for JSON 15 | serialize (value: MUUID): string { 16 | return muuidToString(value) 17 | }, 18 | 19 | // Convert incoming uuid (Ex. df00a273-5215-4bf9-a5d5-9793428b8650) to MUUID 20 | parseValue (value: any): MUUID { 21 | if (typeof value === 'string') { 22 | return fromString(value) 23 | } 24 | throw Error('GraphQL MuuidScalar parser expected a `string`.') 25 | }, 26 | 27 | parseLiteral (ast) { 28 | if (ast.kind === Kind.STRING) { 29 | return fromString(ast.value) 30 | } 31 | return null // Invalid hard-coded value (not a string) 32 | } 33 | }) 34 | 35 | export default MuuidScalar 36 | -------------------------------------------------------------------------------- /src/graphql/common/index.ts: -------------------------------------------------------------------------------- 1 | import resolvers from './resolvers.js' 2 | import typeDef from './typeDef.js' 3 | 4 | export { resolvers as CommonResolvers, typeDef as CommonTypeDef } 5 | -------------------------------------------------------------------------------- /src/graphql/common/resolvers.ts: -------------------------------------------------------------------------------- 1 | import dateScalar from './DateScalar.js' 2 | import MuuidScalar from './MuuidScalar.js' 3 | 4 | const resolvers = { 5 | Date: dateScalar, 6 | MUUID: MuuidScalar 7 | } 8 | 9 | export default resolvers 10 | -------------------------------------------------------------------------------- /src/graphql/common/typeDef.ts: -------------------------------------------------------------------------------- 1 | import { gql } from 'graphql-tag' 2 | const typeDefs = gql` 3 | scalar Date 4 | scalar MUUID 5 | ` 6 | 7 | export default typeDefs 8 | -------------------------------------------------------------------------------- /src/graphql/history/HistoryFieldResolvers.ts: -------------------------------------------------------------------------------- 1 | import { ChangeLogType, BaseChangeRecordType, SupportedCollectionTypes, DocumentKind } from '../../db/ChangeLogType.js' 2 | import { AuthorMetadata, DataSourcesType } from '../../types.js' 3 | import { exhaustiveCheck } from '../../utils/helpers.js' 4 | 5 | /** 6 | * History schama field resolvers 7 | */ 8 | const resolvers = { 9 | History: { 10 | id: (node: ChangeLogType) => node._id.toString(), 11 | 12 | editedBy: (node: ChangeLogType) => node.editedBy.toUUID().toString(), 13 | 14 | editedByUser: async (node: ChangeLogType, _: any, { dataSources }) => { 15 | const { users } = dataSources as DataSourcesType 16 | const u = await users.getUsername(node.editedBy) 17 | return u?.username ?? null 18 | } 19 | }, 20 | 21 | Change: { 22 | changeId: (node: BaseChangeRecordType) => node._id._data, 23 | 24 | updateDescription: ({ updateDescription }: BaseChangeRecordType) => 25 | updateDescription == null 26 | ? ({ 27 | updatedFields: [], 28 | removedFields: [], 29 | truncatedArrays: [] 30 | }) 31 | : updateDescription 32 | }, 33 | 34 | Document: { 35 | __resolveType (node: SupportedCollectionTypes) { 36 | switch (node.kind) { 37 | case DocumentKind.areas: 38 | return 'Area' 39 | case DocumentKind.climbs: 40 | return 'Climb' 41 | case DocumentKind.organizations: 42 | return 'Organization' 43 | default: 44 | return exhaustiveCheck(node.kind) 45 | } 46 | } 47 | }, 48 | 49 | AuthorMetadata: { 50 | createdBy: (node: AuthorMetadata) => node?.createdBy?.toUUID().toString(), 51 | updatedBy: (node: AuthorMetadata) => node?.updatedBy?.toUUID().toString(), 52 | 53 | createdByUser: async (node: AuthorMetadata, _: any, { dataSources }) => { 54 | const { users } = dataSources as DataSourcesType 55 | if (node?.createdBy == null) return null 56 | const u = await users.getUsername(node.createdBy) 57 | return u?.username ?? null 58 | }, 59 | 60 | updatedByUser: async (node: AuthorMetadata, _: any, { dataSources }) => { 61 | const { users } = dataSources as DataSourcesType 62 | if (node?.updatedBy == null) return null 63 | const u = await users.getUsername(node.updatedBy) 64 | return u?.username ?? null 65 | } 66 | } 67 | } 68 | 69 | export default resolvers 70 | -------------------------------------------------------------------------------- /src/graphql/history/HistoryQueries.ts: -------------------------------------------------------------------------------- 1 | import muid from 'uuid-mongodb' 2 | 3 | import { 4 | GetHistoryInputFilterType, 5 | GetAreaHistoryInputFilterType, 6 | GetOrganizationHistoryInputFilterType 7 | } from '../../db/ChangeLogType.js' 8 | import { GQLContext } from '../../types.js' 9 | 10 | const HistoryQueries = { 11 | getChangeHistory: async (_, { filter }, { dataSources }: GQLContext): Promise => { 12 | const { history } = dataSources 13 | const { uuidList }: GetHistoryInputFilterType = filter ?? {} 14 | // Note: userUuid, fromDate, toDate filters don't currently work. 15 | // Note: though we pull uuidList, we don't use it either. 16 | 17 | // Convert array of uuid in string to UUID[] 18 | const muidList = uuidList?.map(entry => muid.from(entry)) ?? [] 19 | return await history.getChangeSets(muidList) 20 | }, 21 | 22 | getAreaHistory: async (_, { filter }, { dataSources }: GQLContext): Promise => { 23 | const { history } = dataSources 24 | const { areaId }: GetAreaHistoryInputFilterType = filter ?? {} 25 | const id = muid.from(areaId) 26 | return await history.getAreaChangeSets(id) 27 | }, 28 | 29 | getOrganizationHistory: async (_, { filter }, { dataSources }: GQLContext): Promise => { 30 | const { history } = dataSources 31 | const { orgId }: GetOrganizationHistoryInputFilterType = filter ?? {} 32 | return await history.getOrganizationChangeSets(orgId) 33 | } 34 | } 35 | 36 | export default HistoryQueries 37 | -------------------------------------------------------------------------------- /src/graphql/history/index.ts: -------------------------------------------------------------------------------- 1 | import HistoryQueries from './HistoryQueries.js' 2 | import HistoryFieldResolvers from './HistoryFieldResolvers.js' 3 | export { HistoryQueries, HistoryFieldResolvers } 4 | -------------------------------------------------------------------------------- /src/graphql/media/MediaResolvers.ts: -------------------------------------------------------------------------------- 1 | import { EntityTag, MediaByUsers, MediaObject, TagByUser } from '../../db/MediaObjectTypes.js' 2 | import { geojsonPointToLatitude, geojsonPointToLongitude } from '../../utils/helpers.js' 3 | import { DataSourcesType } from '../../types.js' 4 | 5 | const MediaResolvers = { 6 | MediaByUsers: { 7 | userUuid: (node: MediaByUsers) => node.userUuid.toUUID().toString(), 8 | username: 9 | async (node: MediaByUsers, _: any, { dataSources }) => { 10 | const { users } = dataSources as DataSourcesType 11 | const u = await users.getUsername(node.userUuid) 12 | return u?.username ?? null 13 | } 14 | }, 15 | 16 | MediaWithTags: { 17 | id: (node: MediaObject) => node._id, 18 | username: async (node: MediaObject, _: any, { dataSources }) => { 19 | const { users } = dataSources as DataSourcesType 20 | const u = await users.getUsername(node.userUuid) 21 | return u?.username ?? null 22 | }, 23 | user: async (node: MediaObject, _: any, { dataSources }) => { 24 | const { users } = dataSources as DataSourcesType 25 | const u = await users.getUserPublicProfileByUuid(node.userUuid) 26 | return u ?? null 27 | }, 28 | uploadTime: (node: MediaObject) => node.createdAt 29 | }, 30 | 31 | EntityTag: { 32 | id: (node: EntityTag) => node._id, 33 | targetId: (node: EntityTag) => node.targetId.toUUID().toString(), 34 | lat: (node: EntityTag) => geojsonPointToLatitude(node.lnglat), 35 | lng: (node: EntityTag) => geojsonPointToLongitude(node.lnglat), 36 | topoData: (node: EntityTag) => node?.topoData 37 | }, 38 | 39 | DeleteTagResult: { 40 | // nothing to override 41 | }, 42 | 43 | TagsByUser: { 44 | userUuid: (node: TagByUser) => node.userUuid.toUUID().toString(), 45 | username: async (node: TagByUser, _: any, { dataSources }) => { 46 | const { users } = dataSources as DataSourcesType 47 | const u = await users.getUsername(node.userUuid) 48 | return u?.username ?? null 49 | } 50 | } 51 | } 52 | 53 | export default MediaResolvers 54 | -------------------------------------------------------------------------------- /src/graphql/media/index.ts: -------------------------------------------------------------------------------- 1 | import MediaQueries from './queries.js' 2 | import MediaMutations from './mutations.js' 3 | import MediaResolvers from './MediaResolvers.js' 4 | 5 | export { MediaQueries, MediaMutations, MediaResolvers } 6 | -------------------------------------------------------------------------------- /src/graphql/media/mutations.ts: -------------------------------------------------------------------------------- 1 | import muid from 'uuid-mongodb' 2 | import mongoose from 'mongoose' 3 | import { GQLContext } from '../../types.js' 4 | import { EntityTag, EntityTagDeleteGQLInput, AddEntityTagGQLInput, MediaObject, MediaObjectGQLInput, DeleteMediaGQLInput } from '../../db/MediaObjectTypes.js' 5 | 6 | const MediaMutations = { 7 | addMediaObjects: async (_: any, args, { dataSources }: GQLContext): Promise => { 8 | const { media } = dataSources 9 | const { input }: { input: MediaObjectGQLInput[] } = args 10 | return await media.addMediaObjects(input) 11 | }, 12 | 13 | deleteMediaObject: async (_: any, args, { dataSources }: GQLContext): Promise => { 14 | const { media } = dataSources 15 | const { input }: { input: DeleteMediaGQLInput } = args 16 | return await media.deleteMediaObject(new mongoose.Types.ObjectId(input.mediaId)) 17 | }, 18 | 19 | addEntityTag: async (_: any, args, { dataSources }: GQLContext): Promise => { 20 | const { media } = dataSources 21 | const { input }: { input: AddEntityTagGQLInput } = args 22 | const { mediaId, entityId, entityType, topoData } = input 23 | return await media.upsertEntityTag({ 24 | mediaId: new mongoose.Types.ObjectId(mediaId), 25 | entityUuid: muid.from(entityId), 26 | entityType, 27 | topoData 28 | }) 29 | }, 30 | 31 | removeEntityTag: async (_: any, args, { dataSources }: GQLContext): Promise => { 32 | const { media } = dataSources 33 | const { input }: { input: EntityTagDeleteGQLInput } = args 34 | const { mediaId, tagId } = input 35 | return await media.removeEntityTag({ 36 | mediaId: new mongoose.Types.ObjectId(mediaId), 37 | tagId: new mongoose.Types.ObjectId(tagId) 38 | }) 39 | } 40 | 41 | // updateTopoData: async (_: any, args, { dataSources }: Context): Promise => { 42 | // const { media } = dataSources 43 | // const { input }: { input: AddEntityTagGQLInput } = args 44 | // const { mediaId, entityId, entityType 45 | } 46 | 47 | export default MediaMutations 48 | -------------------------------------------------------------------------------- /src/graphql/media/queries.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import muuid from 'uuid-mongodb' 3 | import { TagsLeaderboardType, MediaObject, MediaByUsers, UserMediaQueryInput, AreaMediaQueryInput, ClimbMediaQueryInput, MediaForFeedInput } from '../../db/MediaObjectTypes.js' 4 | import { GQLContext } from '../../types.js' 5 | import { IResolvers } from '@graphql-tools/utils' 6 | 7 | const MediaQueries: IResolvers = { 8 | 9 | media: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 10 | const { media } = dataSources 11 | const id = new mongoose.Types.ObjectId(input.id) 12 | return await media.getOneMediaObjectById(id) 13 | }, 14 | 15 | getMediaForFeed: async (_, { input }, { dataSources }: GQLContext): Promise => { 16 | const { media } = dataSources 17 | const { maxUsers = 10, maxFiles = 20 } = input as MediaForFeedInput 18 | return await media.getMediaByUsers({ maxUsers, maxFiles }) 19 | }, 20 | 21 | getUserMedia: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 22 | const { media } = dataSources 23 | const { userUuid, maxFiles = 1000 } = input as UserMediaQueryInput 24 | return await media.getOneUserMedia(userUuid.toString(), maxFiles) 25 | }, 26 | 27 | getUserMediaPagination: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 28 | const { media } = dataSources 29 | const { userUuid } = input as UserMediaQueryInput 30 | return await media.getOneUserMediaPagination({ ...input, userUuid: muuid.from(userUuid) }) 31 | }, 32 | 33 | areaMediaPagination: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 34 | const { media } = dataSources 35 | const { areaUuid } = input as AreaMediaQueryInput 36 | return await media.getOneAreaMediaPagination({ ...input, areaUuid: muuid.from(areaUuid) }) 37 | }, 38 | 39 | climbMediaPagination: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 40 | const { media } = dataSources 41 | const { climbUuid } = input as ClimbMediaQueryInput 42 | return await media.getOneClimbMediaPagination({ ...input, climbUuid: muuid.from(climbUuid) }) 43 | }, 44 | 45 | getTagsLeaderboard: async (_, { limit = 30 }: { limit: number }, { dataSources }: GQLContext): Promise => { 46 | const { media } = dataSources 47 | return await media.getTagsLeaderboard(limit) 48 | } 49 | } 50 | 51 | export default MediaQueries 52 | -------------------------------------------------------------------------------- /src/graphql/organization/OrganizationMutations.ts: -------------------------------------------------------------------------------- 1 | import { OrganizationType } from '../../db/OrganizationTypes.js' 2 | import { ContextWithAuth } from '../../types.js' 3 | 4 | const OrganizationMutations = { 5 | 6 | addOrganization: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 7 | const { organizations } = dataSources 8 | 9 | // permission middleware shouldn't send undefined uuid 10 | if (user?.uuid == null) throw new Error('Missing user uuid') 11 | if (input?.orgType == null) throw new Error('Missing orgType') 12 | if (input?.displayName == null) throw new Error('Missing displayName') 13 | 14 | return await organizations.addOrganization(user.uuid, input.orgType, input) 15 | }, 16 | 17 | updateOrganization: async (_, { input }, { dataSources, user }: ContextWithAuth): Promise => { 18 | const { organizations } = dataSources 19 | 20 | if (user?.uuid == null) throw new Error('Missing user uuid') 21 | if (input?.orgId == null) throw new Error('Missing organization orgId') 22 | 23 | // Except for 'orgId' other fields are optional, check to see if there are any fields 24 | // besides 'orgId' 25 | const fields = Object.keys(input).filter(key => key !== 'orgId') 26 | if (fields.length === 0) return null 27 | 28 | return await organizations.updateOrganization( 29 | user.uuid, 30 | input.orgId, 31 | input 32 | ) 33 | } 34 | } 35 | 36 | export default OrganizationMutations 37 | -------------------------------------------------------------------------------- /src/graphql/organization/OrganizationQueries.ts: -------------------------------------------------------------------------------- 1 | import type OrganizationDataSource from '../../model/OrganizationDataSource' 2 | import { GQLContext, OrganizationGQLFilter, QueryByIdType, Sort } from '../../types' 3 | 4 | const OrganizationQueries = { 5 | organization: async (_: any, 6 | { muuid }: QueryByIdType, 7 | context: GQLContext, info) => { 8 | const { dataSources } = context 9 | const { organizations }: { organizations: OrganizationDataSource } = dataSources 10 | if (muuid != null) { 11 | return await organizations.findOneOrganizationByOrgId(muuid) 12 | } 13 | return null 14 | }, 15 | 16 | organizations: async ( 17 | _, 18 | { filter, sort, limit = 40 }: { filter?: OrganizationGQLFilter, sort?: Sort, limit?: number }, 19 | { dataSources }: GQLContext 20 | ) => { 21 | const { organizations }: { organizations: OrganizationDataSource } = dataSources 22 | const filtered = await organizations.findOrganizationsByFilter(filter) 23 | if (sort != null) { 24 | return await filtered.collation({ locale: 'en' }).sort(sort).limit(limit).toArray() 25 | } else { 26 | return await filtered.limit(limit).toArray() 27 | } 28 | } 29 | } 30 | 31 | export default OrganizationQueries 32 | -------------------------------------------------------------------------------- /src/graphql/organization/index.ts: -------------------------------------------------------------------------------- 1 | import OrganizationMutations from './OrganizationMutations.js' 2 | import OrganizationQueries from './OrganizationQueries.js' 3 | 4 | export { OrganizationMutations, OrganizationQueries } 5 | -------------------------------------------------------------------------------- /src/graphql/schema/ClimbEdit.gql: -------------------------------------------------------------------------------- 1 | type Mutation { 2 | """ 3 | Create or update one or more climbs. 4 | """ 5 | updateClimbs(input: UpdateClimbsInput): [ID] 6 | 7 | """ 8 | Delete one or more climbs 9 | """ 10 | deleteClimbs(input: DeleteManyClimbsInput): Int 11 | } 12 | 13 | input DeleteManyClimbsInput { 14 | parentId: ID 15 | idList: [ID] 16 | } 17 | 18 | """ 19 | Create/update climbs input parameter. 20 | """ 21 | input UpdateClimbsInput { 22 | "Parent area ID" 23 | parentId: ID! 24 | "Array of change records" 25 | changes: [SingleClimbChangeInput] 26 | } 27 | 28 | """ 29 | Climb change record. If the climb ID is omitted or does not exist in the database, a new climb will be created. 30 | """ 31 | input SingleClimbChangeInput { 32 | "Climb UUID" 33 | id: ID 34 | name: String 35 | disciplines: DisciplineType 36 | grade: String 37 | leftRightIndex: Int 38 | description: String 39 | location: String 40 | protection: String 41 | "Legacy FA data" 42 | fa: String 43 | "Length in meters" 44 | length: Int 45 | "Number of fixed anchors" 46 | boltsCount: Int 47 | "List of Pitch objects representing individual pitches of a multi-pitch climb" 48 | pitches: [PitchInput] 49 | experimentalAuthor: ExperimentalAuthorType 50 | } 51 | 52 | input GradeTypeInput { 53 | vscale: String 54 | yds: String 55 | ewbank: String 56 | french: String 57 | brazilianCrux: String 58 | font: String 59 | uiaa: String 60 | } 61 | 62 | input PitchInput { 63 | id: ID! 64 | parentId: ID! 65 | pitchNumber: Int! 66 | grades: GradeTypeInput 67 | disciplines: DisciplineType 68 | } 69 | 70 | input DisciplineType { 71 | trad: Boolean 72 | sport: Boolean 73 | bouldering: Boolean 74 | deepwatersolo: Boolean 75 | alpine: Boolean 76 | snow: Boolean 77 | ice: Boolean 78 | mixed: Boolean 79 | aid: Boolean 80 | tr: Boolean 81 | } 82 | 83 | input ExperimentalAuthorType { 84 | displayName: String! 85 | url: String! 86 | } -------------------------------------------------------------------------------- /src/graphql/schema/History.gql: -------------------------------------------------------------------------------- 1 | input AllHistoryFilter { 2 | uuidList: [ID] 3 | userUuid: ID 4 | fromDate: Date 5 | toDate: Date 6 | } 7 | 8 | input AreaHistoryFilter { 9 | areaId: ID 10 | } 11 | 12 | input OrganizationHistoryFilter { 13 | orgId: MUUID 14 | } 15 | 16 | type UpdateDescription { 17 | updatedFields: [String] 18 | } 19 | 20 | type Change { 21 | changeId: ID! 22 | dbOp: String! 23 | fullDocument: Document 24 | updateDescription: UpdateDescription 25 | } 26 | 27 | union Document = Area | Climb | Organization 28 | 29 | type History { 30 | id: ID! 31 | editedBy: ID! 32 | editedByUser: String 33 | operation: String! 34 | createdAt: Date! 35 | changes: [Change] 36 | } 37 | 38 | type Query { 39 | getChangeHistory(filter: AllHistoryFilter): [History] 40 | getAreaHistory(filter: AreaHistoryFilter): [History] 41 | getOrganizationHistory(filter: OrganizationHistoryFilter): [History] 42 | } 43 | 44 | """Author metadata""" 45 | type AuthorMetadata { 46 | createdAt: Date 47 | createdBy: ID 48 | createdByUser: String 49 | updatedAt: Date 50 | updatedBy: ID 51 | updatedByUser: String 52 | } 53 | -------------------------------------------------------------------------------- /src/graphql/schema/Organization.gql: -------------------------------------------------------------------------------- 1 | type Query { 2 | organization(muuid: MUUID): Organization 3 | organizations(filter: OrgFilter, sort: OrgSort, limit: Int): [Organization] 4 | } 5 | 6 | "A climbing area, wall or crag" 7 | type Organization { 8 | id: ID! 9 | """ 10 | Type of organization. Currently we only support local climbing organizations, which 11 | are associated with certain climbing areas. In future there may be advocacy groups 12 | like the Access Fund or interest groups like the American Alpine Club that are not 13 | associated with any specific climbing areas. 14 | """ 15 | orgType: String! 16 | "We use orgId for identification of organizations. The id field is used in internal database relations." 17 | orgId: MUUID! 18 | 19 | associatedAreaIds: [MUUID] 20 | excludedAreaIds: [MUUID] 21 | 22 | "Name of organization to be displayed on the site." 23 | displayName: String! 24 | content: OrganizationContent 25 | createdAt: Date 26 | createdBy: MUUID 27 | updatedAt: Date 28 | updatedBy: MUUID 29 | } 30 | 31 | type OrganizationContent { 32 | website: String 33 | email: String 34 | donationLink: String 35 | instagramLink: String 36 | facebookLink: String 37 | hardwareReportLink: String 38 | description: String 39 | } 40 | 41 | input OrgSort { 42 | displayName: Int 43 | updatedAt: Int 44 | } 45 | 46 | input OrgFilter { 47 | displayName: DisplayNameFilter 48 | associatedAreaIds: AssociatedAreaIdsFilter 49 | excludedAreaIds: ExcludedAreaIdsFilter 50 | } 51 | 52 | input DisplayNameFilter { 53 | match: String! 54 | exactMatch: Boolean 55 | } 56 | 57 | "Filter for organizations that are associated with an area." 58 | input AssociatedAreaIdsFilter { 59 | includes: [MUUID] 60 | } 61 | 62 | "Filter for organizations that have not excluded themselves from an area." 63 | input ExcludedAreaIdsFilter { 64 | excludes: [MUUID] 65 | } 66 | -------------------------------------------------------------------------------- /src/graphql/schema/OrganizationEdit.gql: -------------------------------------------------------------------------------- 1 | type Mutation { 2 | """ 3 | Add an organization 4 | """ 5 | addOrganization(input: AddOrganizationInput): Organization 6 | 7 | """ 8 | Update organization attributes 9 | """ 10 | updateOrganization(input: OrganizationEditableFieldsInput): Organization 11 | } 12 | 13 | input AddOrganizationInput { 14 | displayName: String! 15 | orgType: String! 16 | associatedAreaIds: [MUUID] 17 | excludedAreaIds: [MUUID] 18 | website: String 19 | email: String 20 | donationLink: String 21 | instagramLink: String 22 | facebookLink: String 23 | hardwareReportLink: String 24 | description: String 25 | } 26 | 27 | input OrganizationEditableFieldsInput { 28 | orgId: MUUID! 29 | associatedAreaIds: [MUUID] 30 | excludedAreaIds: [MUUID] 31 | displayName: String 32 | website: String 33 | email: String 34 | donationLink: String 35 | instagramLink: String 36 | facebookLink: String 37 | hardwareReportLink: String 38 | description: String 39 | } 40 | -------------------------------------------------------------------------------- /src/graphql/schema/README.md: -------------------------------------------------------------------------------- 1 | # Schema Files 2 | 3 | If you enjoy syntax hilighting and GQL formatting then, this seperation will be good for you health. 4 | Take a look at the https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql extension, this will provide auto-formatting and syntax hilighting in this project. 5 | 6 | This will also give us better clarity on services like GitHub with syntax hilighting and readability. It also gives our project a better layout with proper IDE icons that indicate file intent. 7 | 8 | ## Documentation Guidelines 9 | 10 | The schema defines available data, but should always **describe** the data and its purpose. No attribute should be assumed to be obvious to other developers. New developers in particular will have no sense of existing terminology, and data should be explained to API consumers on the assumption that they do not share a common vocab with you. 11 | 12 | You can use basic markdown inside field / query documentation and we strongly encourage you make use of it in instances that make the content more readable. 13 | 14 | You may feel free to copy/paste type documentation if there is no difference between exposed data and internal data, but some data is passed and understood differently within the API, or has no literal data but is resolved as a computed value. 15 | 16 | ## Exploring Schema 17 | 18 | Building this project locally and then doing `yarn serve` will allow you to browse the schema on your local machine, along with markdown rendering and searchable structure. Look at the [Root README](../../../README.md) for more information on building. 19 | -------------------------------------------------------------------------------- /src/graphql/schema/Tag.gql: -------------------------------------------------------------------------------- 1 | type Query { 2 | getTags(input: GetTagInput): GetTagResponse 3 | } 4 | 5 | "Input for adding a new tag input." 6 | input AddTagInput { 7 | mediaUrl: String! 8 | mediaUuid: ID! 9 | destinationId: ID! 10 | destinationType: Int! 11 | } 12 | 13 | type AddTagResponse { 14 | tagId: ID 15 | } 16 | 17 | input RemoveTagInput { 18 | tagId: ID! 19 | } 20 | 21 | type RemoveTagResponse { 22 | numDeleted: String 23 | } 24 | 25 | input GetTagInput { 26 | tagIds: [ID]! 27 | } 28 | 29 | type GetTagResponse { 30 | tag: [Tag] 31 | } 32 | 33 | "Tags are what link a post & photo to climb(s) and area(s). XMedia contains an array of TagIds." 34 | type Tag { 35 | _id: ID 36 | mediaUrl: String! 37 | mediaUuid: ID! 38 | destinationId: ID! 39 | destinationType: Int! 40 | } 41 | -------------------------------------------------------------------------------- /src/graphql/schema/User.gql: -------------------------------------------------------------------------------- 1 | type Mutation { 2 | """ 3 | Update a user profile or create a new profile if it doesn't exist. 4 | Note: The email field is required when creating a new profile and 5 | will be ignore in subsequent update calls to prevent users from 6 | changing their email. The frontend calls this API whenever a new user 7 | logs in; their email therefore should have been verified at this point. 8 | When we support email address change in the future, we will need to 9 | create a separate update-email mutation to make sure users take the 10 | neccessary steps. 11 | """ 12 | updateUserProfile(input: UserProfileInput): Boolean 13 | } 14 | 15 | type Query { 16 | user(input: LocateUserBy!): UserPublicProfile! 17 | userPage(input: LocateUserBy!): UserPublicPage! 18 | 19 | "Check to see if a username already exists in the database." 20 | usernameExists(input: UsernameInput!): Boolean 21 | "Get username object by user uuid" 22 | getUsername(input: UserIDInput!): UsernameDetail 23 | "Get user public profile" 24 | getUserPublicProfileByUuid(input: UserIDInput!): UserPublicProfile 25 | "A users page is their profile + their media (paginated)" 26 | getUserPublicPage(input: UsernameInput!): UserPublicPage 27 | } 28 | 29 | """ 30 | Users can be principally identified either by their username or by their user id. 31 | There is reason to prefer the latter over the former - usernames can be changed, and 32 | in any scenario where you might be caching or trying to produce a permanent resource 33 | reference you will want to prefer the uuid over the username. 34 | """ 35 | input LocateUserBy { 36 | username: String 37 | userUuid: ID 38 | } 39 | 40 | input UsernameInput { 41 | username: String! 42 | } 43 | 44 | input UserIDInput { 45 | userUuid: ID! 46 | } 47 | 48 | input UserProfileInput { 49 | userUuid: ID! 50 | username: String 51 | displayName: String 52 | bio: String 53 | website: String 54 | email: String 55 | avatar: String 56 | } 57 | 58 | "Username detail object" 59 | type UsernameDetail { 60 | userUuid: ID! 61 | username: String 62 | lastUpdated: Date 63 | } 64 | 65 | type UserPublicPage { 66 | profile: UserPublicProfile 67 | media: UserMedia 68 | } 69 | 70 | type UserPublicProfile { 71 | userUuid: ID! 72 | username: String! 73 | displayName: String 74 | bio: String 75 | website: String 76 | avatar: String 77 | } -------------------------------------------------------------------------------- /src/graphql/tag/TagResolvers.ts: -------------------------------------------------------------------------------- 1 | const TagResolvers = {} 2 | 3 | export default TagResolvers 4 | -------------------------------------------------------------------------------- /src/graphql/tag/index.ts: -------------------------------------------------------------------------------- 1 | import TagQueries from './queries.js' 2 | import TagResolvers from './TagResolvers.js' 3 | 4 | export { TagQueries, TagResolvers } 5 | -------------------------------------------------------------------------------- /src/graphql/tag/queries.ts: -------------------------------------------------------------------------------- 1 | import { getTagModel } from '../../db/TagSchema.js' 2 | import { GetTagsInputType } from '../../db/TagTypes.js' 3 | 4 | const TagQueries = { 5 | // Given a list of TagIds, return a list of Tag documents. 6 | getTags: async (_, { input }: { input: GetTagsInputType }) => { 7 | const TagModel = getTagModel() 8 | const tag = await TagModel.find({ _id: { $in: input.tagIds } }).lean() 9 | return { tag } 10 | } 11 | } 12 | 13 | export default TagQueries 14 | -------------------------------------------------------------------------------- /src/graphql/tick/TickImportTypeDef.ts: -------------------------------------------------------------------------------- 1 | import { gql } from 'graphql-tag' 2 | 3 | const TickImportTypeDefs = gql` 4 | type TickImport{ 5 | uuid: [TickType] 6 | } 7 | ` 8 | 9 | export default TickImportTypeDefs 10 | -------------------------------------------------------------------------------- /src/graphql/tick/TickMutations.ts: -------------------------------------------------------------------------------- 1 | import mongoose from 'mongoose' 2 | import { TickType } from '../../db/TickTypes' 3 | import type TickDataSource from '../../model/TickDataSource' 4 | 5 | const TickMutations = { 6 | addTick: async ( 7 | _, 8 | { input }, 9 | { dataSources }) => { 10 | const { ticks }: { ticks: TickDataSource } = dataSources 11 | const tick: TickType = input 12 | return await ticks.addTick(tick) 13 | }, 14 | deleteTick: async ( 15 | _, 16 | { _id }, 17 | { dataSources }) => { 18 | const { ticks }: { ticks: TickDataSource } = dataSources 19 | const res = await ticks.deleteTick(_id) 20 | if (res?.deletedCount === 1) return { _id, removed: true } 21 | return { _id, removed: false } 22 | }, 23 | deleteAllTicks: async ( 24 | _, 25 | { userId }, 26 | { dataSources }) => { 27 | const { ticks }: { ticks: TickDataSource } = dataSources 28 | const res = await ticks.deleteAllTicks(userId) 29 | if (res?.deletedCount > 0) return { deletedCount: res?.deletedCount, removed: true } 30 | return { deletedCount: 0, removed: false } 31 | }, 32 | importTicks: async ( 33 | _, 34 | { input }, 35 | { dataSources }) => { 36 | const { ticks }: { ticks: TickDataSource } = dataSources 37 | const tickImport: TickType[] = input 38 | const userId = tickImport[0].userId 39 | await ticks.deleteImportedTicks(userId) 40 | return await ticks.importTicks(tickImport) 41 | }, 42 | editTick: async ( 43 | _, 44 | { input }, 45 | { dataSources }) => { 46 | const { ticks }: { ticks: TickDataSource } = dataSources 47 | const { _id, updatedTick } = input 48 | if (updatedTick.dateClimbed != null) { 49 | const date = new Date(updatedTick.dateClimbed) 50 | if (!(date instanceof Date && !isNaN(date.getTime()))) { 51 | throw new Error('Invalid date format') 52 | } 53 | updatedTick.dateClimbed = new Date(`${date.toISOString().split('T')[0]}T12:00:00Z`) 54 | } 55 | return await ticks.editTick(new mongoose.Types.ObjectId(_id), updatedTick) 56 | } 57 | } 58 | 59 | export default TickMutations 60 | -------------------------------------------------------------------------------- /src/graphql/tick/TickQueries.ts: -------------------------------------------------------------------------------- 1 | import { TickType, TickUserSelectors } from '../../db/TickTypes' 2 | import type TickDataSource from '../../model/TickDataSource' 3 | 4 | const TickQueries = { 5 | userTicks: async (_, input: TickUserSelectors, { dataSources }): Promise => { 6 | const { ticks }: { ticks: TickDataSource } = dataSources 7 | return await ticks.ticksByUser(input) 8 | }, 9 | userTicksByClimbId: async (_, input, { dataSources }): Promise => { 10 | const { ticks }: { ticks: TickDataSource } = dataSources 11 | const { climbId, userId } = input 12 | return await ticks.ticksByUserIdAndClimb(climbId, userId) 13 | } 14 | } 15 | 16 | export default TickQueries 17 | -------------------------------------------------------------------------------- /src/graphql/tick/TickResolvers.ts: -------------------------------------------------------------------------------- 1 | import muuid from 'uuid-mongodb' 2 | import { TickType } from '../../db/TickTypes.js' 3 | import { GQLContext } from '../../types.js' 4 | 5 | export const TickResolvers = { 6 | TickType: { 7 | user: async (node: TickType, args: any, { dataSources }: GQLContext) => { 8 | const { users } = dataSources 9 | return await users.getUserPublicProfileByUuid(muuid.from(node.userId)) 10 | }, 11 | 12 | climb: async (node: TickType, args: any, { dataSources }: GQLContext) => { 13 | const { areas } = dataSources 14 | return await areas.findOneClimbByUUID(muuid.from(node.climbId)) 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/graphql/tick/index.ts: -------------------------------------------------------------------------------- 1 | import TickMutations from './TickMutations.js' 2 | import TickQueries from './TickQueries.js' 3 | import { TickResolvers } from './TickResolvers.js' 4 | 5 | export { TickMutations, TickQueries, TickResolvers } 6 | -------------------------------------------------------------------------------- /src/graphql/user/UserMutations.ts: -------------------------------------------------------------------------------- 1 | import { DataSourcesType, ContextWithAuth } from '../../types.js' 2 | import { UpdateProfileGQLInput } from '../../db/UserTypes.js' 3 | 4 | const UserMutations = { 5 | updateUserProfile: async (_: any, { input }, { dataSources, user: authenticatedUser }: ContextWithAuth) => { 6 | const { users }: DataSourcesType = dataSources 7 | 8 | if (authenticatedUser?.uuid == null) throw new Error('Missing user uuid') 9 | 10 | return await users.createOrUpdateUserProfile(authenticatedUser.uuid, input as UpdateProfileGQLInput) 11 | } 12 | 13 | } 14 | 15 | export default UserMutations 16 | -------------------------------------------------------------------------------- /src/graphql/user/UserQueries.ts: -------------------------------------------------------------------------------- 1 | import muuid from 'uuid-mongodb' 2 | import { GraphQLError } from 'graphql' 3 | 4 | import { DataSourcesType, ContextWithAuth, GQLContext } from '../../types.js' 5 | import { GetUsernameReturn, UserPublicProfile, UserPublicPage } from '../../db/UserTypes.js' 6 | 7 | const UserQueries = { 8 | user: async (_: any, { input }, { dataSources }: ContextWithAuth): Promise => { 9 | const { users }: DataSourcesType = dataSources 10 | 11 | let uuid = (input.userUuid !== undefined) && muuid.from(input.userUuid) 12 | if (uuid === false) { 13 | if (input.username === undefined) { 14 | throw new Error('Supply either UUID (preferred) or username') 15 | } 16 | uuid = await users.uuidFromUsername(input.username) 17 | } 18 | 19 | const profile = await users.getUserPublicProfileByUuid(uuid) 20 | 21 | if (profile === null) { 22 | throw new Error('The requested user has no ascociated public profile') 23 | } 24 | 25 | return profile 26 | }, 27 | 28 | userPage: async (_: any, { input }, { dataSources }: ContextWithAuth): Promise => { 29 | const { users, media: mediaDS }: DataSourcesType = dataSources 30 | 31 | let uuid = (input.userUuid !== undefined) && muuid.from(input.userUuid) 32 | if (uuid === false) { 33 | uuid = await users.uuidFromUsername(input.username) 34 | } 35 | 36 | const profile = await users.getUserPublicProfileByUuid(uuid) 37 | if (profile == null) { 38 | throw new GraphQLError('User profile not found.', { 39 | extensions: { 40 | code: 'NOT_FOUND' 41 | } 42 | }) 43 | } 44 | 45 | const media = await mediaDS.getOneUserMediaPagination({ userUuid: profile._id }) 46 | return { 47 | profile, 48 | media 49 | } 50 | }, 51 | 52 | usernameExists: async (_: any, { input }, { dataSources }): Promise => { 53 | const { users }: DataSourcesType = dataSources 54 | return await users.usernameExists(input.username) 55 | }, 56 | 57 | getUsername: async (_: any, { input }, { dataSources }): Promise => { 58 | const { users }: DataSourcesType = dataSources 59 | const uuid = muuid.from(input.userUuid) 60 | return await users.getUsername(uuid) 61 | }, 62 | 63 | getUserPublicProfileByUuid: async (_: any, { input }, { dataSources }: ContextWithAuth): Promise => { 64 | const { users }: DataSourcesType = dataSources 65 | const uuid = muuid.from(input.userUuid) 66 | return await users.getUserPublicProfileByUuid(uuid) 67 | }, 68 | 69 | getUserPublicPage: async (_: any, { input }, { dataSources }: GQLContext): Promise => { 70 | const { users, media: mediaDS }: DataSourcesType = dataSources 71 | const profile = await users.getUserPublicProfile(input.username) 72 | if (profile == null) { 73 | throw new GraphQLError('User profile not found.', { 74 | extensions: { 75 | code: 'NOT_FOUND' 76 | } 77 | }) 78 | } 79 | const media = await mediaDS.getOneUserMediaPagination({ userUuid: profile._id }) 80 | return { 81 | profile, 82 | media 83 | } 84 | } 85 | } 86 | 87 | export default UserQueries 88 | -------------------------------------------------------------------------------- /src/graphql/user/UserResolvers.ts: -------------------------------------------------------------------------------- 1 | import { GetUsernameReturn, UserPublicProfile } from '../../db/UserTypes.js' 2 | 3 | const UserResolvers: object = { 4 | 5 | UserPublicProfile: { 6 | userUuid: (node: UserPublicProfile) => node._id.toUUID().toString() 7 | }, 8 | 9 | UsernameDetail: { 10 | userUuid: (node: GetUsernameReturn) => node._id.toUUID().toString(), 11 | lastUpdated: (node: GetUsernameReturn) => node.updatedAt 12 | } 13 | } 14 | 15 | export default UserResolvers 16 | -------------------------------------------------------------------------------- /src/graphql/user/index.ts: -------------------------------------------------------------------------------- 1 | import UserQueries from './UserQueries.js' 2 | import UserMutations from './UserMutations.js' 3 | import UserResolvers from './UserResolvers.js' 4 | 5 | export { UserQueries, UserMutations, UserResolvers } 6 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | import pino from 'pino' 2 | import { createWriteStream } from 'pino-logflare' 3 | 4 | const setupLogFlare = (apiKey?: string, sourceToken?: string): any | undefined => { 5 | if (typeof apiKey !== 'undefined' && typeof sourceToken !== 'undefined') { 6 | return createWriteStream({ 7 | apiKey, 8 | sourceToken 9 | }) 10 | } 11 | return undefined 12 | } 13 | 14 | export const logger = pino({ 15 | name: 'openbeta-graphql', 16 | level: 'info' 17 | }, setupLogFlare(process.env.LOGFLARE_API_KEY, process.env.LOGFLARE_SOURCE_TOKEN)) 18 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import { connectDB, defaultPostConnect } from './db/index.js' 2 | import { createServer } from './server.js' 3 | 4 | await connectDB(defaultPostConnect) 5 | await createServer() 6 | -------------------------------------------------------------------------------- /src/model/AreaHistoryDatasource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { MUUID } from 'uuid-mongodb' 3 | import { AreaChangeLogType, ChangeLogType } from '../db/ChangeLogType.js' 4 | import { getChangeLogModel } from '../db/index.js' 5 | 6 | export class AreaHistoryDataSource extends MongoDataSource { 7 | changelogModel = getChangeLogModel() 8 | 9 | async getChangeSetsByUuid (areaUuid?: MUUID): Promise { 10 | let rs 11 | if (areaUuid == null) { 12 | // No area id specified: return all changes 13 | const filter: any = { 14 | $match: { 15 | 'changes.kind': 'areas' 16 | } 17 | } 18 | 19 | rs = await this.changelogModel.aggregate([ 20 | filter, 21 | { 22 | $sort: { 23 | createdAt: -1 24 | } 25 | } 26 | ]) 27 | return rs as AreaChangeLogType[] 28 | } else { 29 | const filter = { 30 | $match: { 31 | changes: { 32 | $elemMatch: 33 | { 'fullDocument.metadata.area_id': areaUuid, kind: 'areas' } 34 | } 35 | } 36 | } 37 | 38 | const rs2 = await this.changelogModel 39 | .aggregate([ 40 | filter, 41 | // https://github.com/Automattic/mongoose/issues/12415 42 | // { 43 | // $set: { 44 | // changes: { 45 | // $sortArray: { 46 | // input: '$changes', 47 | // sortBy: { 'fullDocument._change.seq': -1 } 48 | // } 49 | // } 50 | // } 51 | // }, 52 | { 53 | $sort: { 54 | createdAt: -1 55 | } 56 | } 57 | ]) 58 | return rs2 59 | } 60 | } 61 | 62 | static instance: AreaHistoryDataSource 63 | 64 | static getInstance (): AreaHistoryDataSource { 65 | if (AreaHistoryDataSource.instance == null) { 66 | // @ts-expect-error 67 | AreaHistoryDataSource.instance = new AreaHistoryDataSource({ modelOrCollection: getChangeLogModel() }) 68 | } 69 | return AreaHistoryDataSource.instance 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/model/ChangeLogDataSource.ts: -------------------------------------------------------------------------------- 1 | import mongoose, { ClientSession } from 'mongoose' 2 | import { MongoDataSource } from 'apollo-datasource-mongodb' 3 | import { MUUID } from 'uuid-mongodb' 4 | 5 | import { getChangeLogModel } from '../db/index.js' 6 | import { 7 | AreaChangeLogType, 8 | BaseChangeRecordType, 9 | ChangeLogType, 10 | ClimbChangeLogType, 11 | OpType, 12 | OrganizationChangeLogType 13 | } from '../db/ChangeLogType' 14 | import { logger } from '../logger.js' 15 | import { AreaHistoryDataSource } from './AreaHistoryDatasource.js' 16 | import { OrganizationHistoryDataSource } from './OrganizationHistoryDatasource.js' 17 | 18 | export default class ChangeLogDataSource extends MongoDataSource { 19 | changeLogModel = getChangeLogModel() 20 | 21 | /** 22 | * Create a new change set 23 | * @param uuid 24 | * @param operation 25 | * @returns 26 | */ 27 | async create (session: ClientSession, uuid: MUUID, operation: OpType): Promise { 28 | const newChangeDoc: ChangeLogType = { 29 | _id: new mongoose.Types.ObjectId(), 30 | editedBy: uuid, 31 | operation, 32 | changes: [] 33 | } 34 | const rs = await this.changeLogModel.insertMany(newChangeDoc, { session }) 35 | if (rs?.length !== 1) throw new Error('Error inserting new change') 36 | return rs[0] 37 | } 38 | 39 | /** 40 | * Record a new change in the changeset 41 | * @param changeRecord 42 | */ 43 | async record (changeRecord: BaseChangeRecordType): Promise { 44 | const filter = { 45 | _id: changeRecord.fullDocument._change?.historyId 46 | } 47 | 48 | const rs = await this.changeLogModel.updateOne(filter, 49 | { 50 | $push: { 51 | changes: { 52 | $each: [changeRecord], 53 | $sort: { 'fullDocument._change.seq': -1 } 54 | } 55 | } 56 | }, { 57 | upsert: false 58 | }) 59 | 60 | if (rs.matchedCount < 1) { 61 | logger.error(changeRecord.fullDocument, 'History Id not found. Ignore change.') 62 | } 63 | return this 64 | } 65 | 66 | async getAreaChangeSets (areaUuid?: MUUID): Promise { 67 | return await AreaHistoryDataSource.getInstance().getChangeSetsByUuid(areaUuid) 68 | } 69 | 70 | async getOrganizationChangeSets (orgId?: MUUID): Promise { 71 | return await OrganizationHistoryDataSource.getInstance().getChangeSetsByOrgId(orgId) 72 | } 73 | 74 | /** 75 | * Return all changes. For now just handle Area type. 76 | * @param uuidList optional filter 77 | * @returns change sets 78 | */ 79 | async getChangeSets (uuidList: MUUID[]): Promise> { 80 | return await this.changeLogModel.aggregate([ 81 | { 82 | $sort: { 83 | createdAt: -1 84 | } 85 | } 86 | ]).limit(500) 87 | } 88 | 89 | async _testRemoveAll (): Promise { 90 | await this.changeLogModel.deleteMany() 91 | } 92 | 93 | static instance: ChangeLogDataSource 94 | 95 | static getInstance (): ChangeLogDataSource { 96 | if (ChangeLogDataSource.instance == null) { 97 | /** 98 | * Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88 99 | */ 100 | // @ts-expect-error 101 | ChangeLogDataSource.instance = new ChangeLogDataSource({ modelOrCollection: getChangeLogModel() }) 102 | } 103 | return ChangeLogDataSource.instance 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/model/ClimbDataSource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { MUUID } from 'uuid-mongodb' 3 | import { getAreaModel } from '../db/AreaSchema.js' 4 | import { getClimbModel } from '../db/ClimbSchema.js' 5 | import { ClimbType } from '../db/ClimbTypes.js' 6 | 7 | // TODO move climb helper functions from AreaDataSource here 8 | export default class ClimbDataSource extends MongoDataSource { 9 | areaModel = getAreaModel() 10 | climbModel = getClimbModel() 11 | 12 | /** 13 | * Helper look up method. This is mainly used for testing. See `AreaDataSource.findOneClimbByUUID()` for public API method. 14 | * @param id climb uuid 15 | * @returns ClimbType object or null if not found 16 | */ 17 | async findOneClimbByMUUID (id: MUUID): Promise { 18 | const rs = await this.climbModel.findOne({ _id: id, _deleting: { $eq: null } }).lean() 19 | return rs 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/model/ExperimentalUserDataSource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { ClientSession } from 'mongoose' 3 | import muuid, { MUUID } from 'uuid-mongodb' 4 | import { v5 as uuidv5, NIL } from 'uuid' 5 | 6 | import { getExperimentalUserModel } from '../db/index.js' 7 | import { ExperimentalUserType } from '../db/UserTypes.js' 8 | 9 | /** 10 | * @deprecated 11 | */ 12 | export default class ExperimentalUserDataSource extends MongoDataSource { 13 | experimentUserModel = getExperimentalUserModel() 14 | 15 | /** 16 | * Create or update a user. 17 | * @param session transaction 18 | * @param inputDisplayName 19 | * @param inputUrl 20 | * @returns User UUID if successful. null otherwise. 21 | */ 22 | async updateUser (session: ClientSession, inputDisplayName: string, inputUrl: string): Promise { 23 | const url: string = inputUrl 24 | let displayName = inputDisplayName != null ? inputDisplayName.trim().substring(0, 50) : '' 25 | let uuid: MUUID 26 | if (url == null || url.trim() === '') { 27 | if (displayName === '') { 28 | // displayName and url are both null/empty 29 | return null 30 | } 31 | uuid = muuid.v4() 32 | } else { 33 | // generate uuid from inputUrl 34 | uuid = muuid.from(uuidv5(inputUrl, NIL)) 35 | if (displayName === '') { 36 | displayName = `u_${uuid.toUUID().toString()}` 37 | } 38 | } 39 | 40 | const filter = { 41 | _id: uuid 42 | } 43 | const doc = { 44 | displayName, 45 | url 46 | } 47 | const rs = await this.experimentUserModel.findOneAndUpdate(filter, doc, { new: true, upsert: true, session }).lean() 48 | 49 | if (rs._id != null) { 50 | return rs._id 51 | } 52 | return null 53 | } 54 | 55 | static instance: ExperimentalUserDataSource 56 | 57 | static getInstance (): ExperimentalUserDataSource { 58 | if (ExperimentalUserDataSource.instance == null) { 59 | // Why suppress TS error? See: https://github.com/GraphQLGuide/apollo-datasource-mongodb/issues/88 60 | // @ts-expect-error 61 | ExperimentalUserDataSource.instance = new ExperimentalUserDataSource({ modelOrCollection: getExperimentalUserModel() }) 62 | } 63 | return ExperimentalUserDataSource.instance 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/model/OrganizationDataSource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { Filter } from 'mongodb' 3 | import type { FindCursor, WithId } from 'mongodb' 4 | import muuid from 'uuid-mongodb' 5 | 6 | import { getOrganizationModel } from '../db/index.js' 7 | import { AssociatedAreaIdsFilterParams, DisplayNameFilterParams, ExcludedAreaIdsFilterParams, OrganizationGQLFilter } from '../types' 8 | import { OrganizationType } from '../db/OrganizationTypes.js' 9 | import { muuidToString } from '../utils/helpers.js' 10 | 11 | export default class OrganizationDataSource extends MongoDataSource { 12 | organizationModel = getOrganizationModel() 13 | 14 | async findOrganizationsByFilter (filters?: OrganizationGQLFilter): Promise>> { 15 | let mongoFilter: any = {} 16 | if (filters != null) { 17 | mongoFilter = Object.entries(filters).reduce>((acc, [key, filter]): Filter => { 18 | switch (key) { 19 | case 'displayName': { 20 | const displayNameFilter = (filter as DisplayNameFilterParams) 21 | const param = displayNameFilter.exactMatch !== true ? new RegExp(displayNameFilter.match, 'ig') : displayNameFilter.match 22 | acc.displayName = param 23 | break 24 | } 25 | case 'associatedAreaIds': { 26 | const associatedAreaIdFilter = (filter as AssociatedAreaIdsFilterParams) 27 | acc.associatedAreaIds = { $in: associatedAreaIdFilter.includes } 28 | break 29 | } 30 | case 'excludedAreaIds': { 31 | const excludedAreaIdFilter = (filter as ExcludedAreaIdsFilterParams) 32 | acc.excludedAreaIds = { $not: { $in: excludedAreaIdFilter.excludes } } 33 | break 34 | } 35 | default: 36 | break 37 | } 38 | return acc 39 | }, {}) 40 | } 41 | 42 | mongoFilter._deleting = { $eq: null } // not marked for deletion 43 | return this.collection.find(mongoFilter) 44 | } 45 | 46 | async findOneOrganizationByOrgId (orgId: muuid.MUUID): Promise { 47 | const rs = await this.organizationModel 48 | .aggregate([ 49 | { $match: { orgId, _deleting: { $eq: null } } } 50 | ]) 51 | 52 | if (rs != null && rs.length === 1) { 53 | return rs[0] 54 | } 55 | throw new Error(`Organization ${muuidToString(orgId)} not found.`) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/model/OrganizationHistoryDatasource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { MUUID } from 'uuid-mongodb' 3 | import { OrganizationChangeLogType } from '../db/ChangeLogType.js' 4 | import { getChangeLogModel } from '../db/index.js' 5 | 6 | export class OrganizationHistoryDataSource extends MongoDataSource { 7 | changelogModel = getChangeLogModel() 8 | 9 | async getChangeSetsByOrgId (orgId?: MUUID): Promise { 10 | let rs 11 | if (orgId == null) { 12 | // No orgId specified: return all changes 13 | const filter: any = { 14 | $match: { 15 | 'changes.kind': 'organizations' 16 | } 17 | } 18 | 19 | rs = await this.changelogModel.aggregate([ 20 | filter, 21 | { 22 | $sort: { 23 | createdAt: -1 24 | } 25 | } 26 | ]) 27 | return rs as OrganizationChangeLogType[] 28 | } else { 29 | const filter = { 30 | $match: { 31 | changes: { 32 | $elemMatch: 33 | { 'fullDocument.orgId': orgId, kind: 'organizations' } 34 | } 35 | } 36 | } 37 | 38 | const rs2 = await this.changelogModel 39 | .aggregate([ 40 | filter, 41 | // https://github.com/Automattic/mongoose/issues/12415 42 | // { 43 | // $set: { 44 | // changes: { 45 | // $sortArray: { 46 | // input: '$changes', 47 | // sortBy: { 'fullDocument._change.seq': -1 } 48 | // } 49 | // } 50 | // } 51 | // }, 52 | { 53 | $sort: { 54 | createdAt: -1 55 | } 56 | } 57 | ]) 58 | return rs2 59 | } 60 | } 61 | 62 | static instance: OrganizationHistoryDataSource 63 | 64 | static getInstance (): OrganizationHistoryDataSource { 65 | if (OrganizationHistoryDataSource.instance == null) { 66 | // @ts-expect-error 67 | OrganizationHistoryDataSource.instance = new OrganizationHistoryDataSource({ modelOrCollection: getChangeLogModel() }) 68 | } 69 | return OrganizationHistoryDataSource.instance 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/model/PostDataSource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { PostType } from '../db/PostTypes' 3 | import { getPostModel } from '../db/index.js' 4 | 5 | /** 6 | * Not being used at the moment 7 | */ 8 | export default class PostDataSource extends MongoDataSource { 9 | postModel = getPostModel() 10 | 11 | /** 12 | * @param post 13 | * takes in a new post 14 | * @returns 15 | * returns that new post 16 | */ 17 | async addPost ({ 18 | userId, 19 | xMedia, 20 | description 21 | }: PostType): Promise { 22 | try { 23 | const doc: PostType = { 24 | userId, 25 | xMedia, 26 | description 27 | } 28 | 29 | const res: PostType = await this.postModel.create({ ...doc }) 30 | return res 31 | } catch (ex) { 32 | console.error('Failed to add post:', ex) 33 | return null 34 | } 35 | } 36 | 37 | // Delete Post 38 | 39 | // Edit Post 40 | } 41 | -------------------------------------------------------------------------------- /src/model/XMediaDataSource.ts: -------------------------------------------------------------------------------- 1 | import { MongoDataSource } from 'apollo-datasource-mongodb' 2 | import { XMediaType } from '../db/XMediaTypes' 3 | import { getXMediaModel } from '../db/index.js' 4 | 5 | /** 6 | * Not being used at the moment 7 | */ 8 | export default class XMediaDataSource extends MongoDataSource { 9 | xMediaModel = getXMediaModel() 10 | 11 | /** 12 | * @param xMedia 13 | * takes in a new xMedia 14 | * @returns 15 | * returns that new xMedia 16 | */ 17 | async addXMedia ({ 18 | userId, 19 | mediaType, 20 | mediaUrl 21 | }: XMediaType): Promise { 22 | try { 23 | const doc: XMediaType = { 24 | userId, 25 | mediaType, 26 | mediaUrl 27 | } 28 | 29 | const res: XMediaType = await this.xMediaModel.create({ ...doc }) 30 | return res 31 | } catch (ex) { 32 | console.error('Failed to add XMedia:', ex) 33 | return null 34 | } 35 | } 36 | 37 | // Delete XMedia 38 | 39 | // Edit XMedia 40 | } 41 | -------------------------------------------------------------------------------- /src/model/__tests__/AreaUtils.ts: -------------------------------------------------------------------------------- 1 | describe('Test area utilities', () => { 2 | test.todo('The name comparison code unit') 3 | test.todo('The name-uniqueness system with other side-effects stripped out') 4 | }) 5 | -------------------------------------------------------------------------------- /src/model/__tests__/ChangeLogDS.ts: -------------------------------------------------------------------------------- 1 | import muuid from 'uuid-mongodb' 2 | import { getAreaModel, getChangeLogModel } from '../../db/index.js' 3 | import ChangeLogDataSource from '../ChangeLogDataSource.js' 4 | import { OpType } from '../../db/ChangeLogType.js' 5 | import { OperationType } from '../../db/AreaTypes.js' 6 | 7 | import { logger } from '../../logger.js' 8 | import inMemoryDB from '../../utils/inMemoryDB.js' 9 | 10 | describe('Area history', () => { 11 | let changeLog: ChangeLogDataSource 12 | 13 | beforeAll(async () => { 14 | await inMemoryDB.connect() 15 | 16 | try { 17 | await getAreaModel().collection.drop() 18 | await getChangeLogModel().collection.drop() 19 | } catch (e) { 20 | logger.info('Expected exception') 21 | } 22 | 23 | changeLog = ChangeLogDataSource.getInstance() 24 | }) 25 | 26 | afterAll(async () => { 27 | await inMemoryDB.close() 28 | }) 29 | 30 | it('should create a change record', async () => { 31 | const userId = muuid.v4() 32 | const op: OpType = OperationType.addCountry 33 | 34 | const session = await getChangeLogModel().startSession() 35 | const ret = await changeLog.create(session, userId, op) 36 | 37 | expect(ret._id).toBeDefined() 38 | expect(ret.editedBy).toEqual(userId) 39 | expect(ret.operation).toEqual(op) 40 | expect(ret.changes).toHaveLength(0) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /src/server.ts: -------------------------------------------------------------------------------- 1 | import { ApolloServer } from '@apollo/server' 2 | import { expressMiddleware } from '@apollo/server/express4' 3 | import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer' 4 | import express from 'express' 5 | import cors from 'cors' 6 | import * as http from 'http' 7 | import bodyParser from 'body-parser' 8 | import { InMemoryLRUCache } from '@apollo/utils.keyvaluecache' 9 | 10 | import { applyMiddleware } from 'graphql-middleware' 11 | import { graphqlSchema } from './graphql/resolvers.js' 12 | import MutableAreaDataSource from './model/MutableAreaDataSource.js' 13 | import ChangeLogDataSource from './model/ChangeLogDataSource.js' 14 | import MutableMediaDataSource from './model/MutableMediaDataSource.js' 15 | import MutableClimbDataSource from './model/MutableClimbDataSource.js' 16 | import TickDataSource from './model/TickDataSource.js' 17 | import permissions from './auth/permissions.js' 18 | import { createContext } from './auth/middleware.js' 19 | import { localDevBypassAuthContext } from './auth/local-dev/middleware.js' 20 | import localDevBypassAuthPermissions from './auth/local-dev/permissions.js' 21 | import MutableOrgDS from './model/MutableOrganizationDataSource.js' 22 | import UserDataSource from './model/UserDataSource.js' 23 | import BulkImportDataSource from './model/BulkImportDataSource.js' 24 | 25 | /** 26 | * Create a GraphQL server 27 | */ 28 | export async function createServer (): Promise<{ app: express.Application, server: ApolloServer }> { 29 | const schema = applyMiddleware( 30 | graphqlSchema, 31 | (process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthPermissions : permissions).generate(graphqlSchema) 32 | ) 33 | const dataSources = ({ 34 | climbs: MutableClimbDataSource.getInstance(), 35 | areas: MutableAreaDataSource.getInstance(), 36 | bulkImport: BulkImportDataSource.getInstance(), 37 | organizations: MutableOrgDS.getInstance(), 38 | ticks: TickDataSource.getInstance(), 39 | history: ChangeLogDataSource.getInstance(), 40 | media: MutableMediaDataSource.getInstance(), 41 | users: UserDataSource.getInstance() 42 | }) 43 | 44 | const app = express() 45 | const httpServer = http.createServer(app) 46 | 47 | const server = new ApolloServer({ 48 | introspection: true, 49 | schema, 50 | plugins: [ApolloServerPluginDrainHttpServer({ httpServer })], 51 | cache: new InMemoryLRUCache({ 52 | max: 100 53 | }) 54 | }) 55 | // server must be started before applying middleware 56 | await server.start() 57 | 58 | const context = process.env.LOCAL_DEV_BYPASS_AUTH === 'true' ? localDevBypassAuthContext : createContext 59 | 60 | app.use('/', 61 | bodyParser.json({ limit: '10mb' }), 62 | cors(), 63 | express.json(), 64 | expressMiddleware(server, { 65 | context: async ({ req }) => ({ dataSources, ...await context({ req }) }) 66 | }) 67 | ) 68 | 69 | await new Promise(resolve => httpServer.listen({ port: 4000 }, resolve)) 70 | return { app, server } 71 | } 72 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { BBox } from '@turf/helpers' 2 | import { MUUID } from 'uuid-mongodb' 3 | 4 | import { AreaType } from './db/AreaTypes.js' 5 | import type MutableAreaDataSource from './model/MutableAreaDataSource.js' 6 | import type TickDataSource from './model/TickDataSource.js' 7 | import type HistoryDataSouce from './model/ChangeLogDataSource.js' 8 | import type MutableMediaDataSource from './model/MutableMediaDataSource.js' 9 | import MutableClimbDataSource from './model/MutableClimbDataSource.js' 10 | import MutableOrganizationDataSource from './model/MutableOrganizationDataSource.js' 11 | import type UserDataSource from './model/UserDataSource.js' 12 | import BulkImportDataSource from './model/BulkImportDataSource' 13 | 14 | export enum SortDirection { 15 | ASC = 1, 16 | DESC = -1 17 | } 18 | 19 | export type Sortable = 'area_name' 20 | 21 | export type Sort = Record 22 | 23 | type Filterable = 'area_name' | 'leaf_status' | 'path_tokens' | 'field_compare' 24 | 25 | export interface ComparisonFilterParams { 26 | field: 'totalClimbs' | 'density' 27 | num: number 28 | comparison: 'lt' | 'gt' | 'eq' 29 | } 30 | 31 | export interface AreaFilterParams { 32 | match: string 33 | exactMatch: boolean | undefined 34 | } 35 | 36 | export interface LeafStatusParams { 37 | isLeaf: boolean 38 | } 39 | 40 | export interface PathTokenParams { 41 | tokens: string[] 42 | exactMatch: boolean | undefined 43 | size: number 44 | } 45 | 46 | type FilterParams = AreaFilterParams | LeafStatusParams | PathTokenParams | ComparisonFilterParams[] 47 | export type GQLFilter = Record 48 | 49 | export interface DisplayNameFilterParams { 50 | match: string 51 | exactMatch: boolean | undefined 52 | } 53 | 54 | export interface AssociatedAreaIdsFilterParams { 55 | includes: MUUID[] 56 | } 57 | 58 | export interface ExcludedAreaIdsFilterParams { 59 | excludes: MUUID[] 60 | } 61 | 62 | type OrganizationFilterable = 'displayName' | 'associatedAreaIds' | 'excludedAreaIds' 63 | 64 | type OrganizationFilterParams = DisplayNameFilterParams | AssociatedAreaIdsFilterParams | ExcludedAreaIdsFilterParams 65 | export type OrganizationGQLFilter = Partial> 66 | 67 | export type LNGLAT = [number, number] 68 | export type BBoxType = BBox 69 | 70 | export interface StatisticsType { 71 | totalClimbs: number 72 | totalCrags: number 73 | } 74 | 75 | export interface CragsNear { 76 | _id: string 77 | count: number 78 | crags: AreaType 79 | } 80 | 81 | export interface QueryByIdType { 82 | id?: string 83 | uuid?: string 84 | muuid?: MUUID 85 | } 86 | 87 | export interface AuthUserType { 88 | roles: string[] 89 | uuid: MUUID | undefined 90 | isBuilder: boolean 91 | } 92 | 93 | export interface DataSourcesType { 94 | areas: MutableAreaDataSource 95 | bulkImport: BulkImportDataSource 96 | organizations: MutableOrganizationDataSource 97 | ticks: TickDataSource 98 | history: HistoryDataSouce 99 | media: MutableMediaDataSource 100 | climbs: MutableClimbDataSource 101 | users: UserDataSource 102 | } 103 | 104 | export interface GQLContext { 105 | dataSources: DataSourcesType 106 | } 107 | 108 | export interface ContextWithAuth extends GQLContext { 109 | user: AuthUserType 110 | } 111 | 112 | export interface AuthorMetadata { 113 | updatedAt?: Date 114 | updatedBy?: MUUID 115 | createdAt?: Date 116 | createdBy?: MUUID 117 | } 118 | -------------------------------------------------------------------------------- /src/utils/helpers.ts: -------------------------------------------------------------------------------- 1 | import { MUUID } from 'uuid-mongodb' 2 | import { Point } from '@turf/helpers' 3 | import { ClientSession } from 'mongoose' 4 | 5 | export const muuidToString = (m: MUUID): string => m.toUUID().toString() 6 | 7 | /** 8 | * Ensures that type-checking errors out if enums are not 9 | * handled exhaustively in switch statements. 10 | * Eg. 11 | * switch(val) { 12 | * case enumOne: 13 | * ... 14 | * default: 15 | * exhaustiveCheck(val) 16 | * } 17 | * @param _value 18 | */ 19 | export function exhaustiveCheck (_value: never): never { 20 | throw new Error(`ERROR! Enum not handled for ${JSON.stringify(_value)}`) 21 | } 22 | 23 | export const geojsonPointToLongitude = (point?: Point | undefined): number | undefined => point?.coordinates[0] 24 | export const geojsonPointToLatitude = (point?: Point): number | undefined => point?.coordinates[1] 25 | 26 | export const NON_ALPHANUMERIC_REGEX = /[\W_\s]+/g 27 | export const canonicalizeUsername = (username: string): string => username.replaceAll(NON_ALPHANUMERIC_REGEX, '') 28 | 29 | // withTransaction() doesn't return the callback result 30 | // see https://jira.mongodb.org/browse/NODE-2014 31 | export const withTransaction = async (session: ClientSession, closure: () => Promise): Promise => { 32 | let result: T | undefined 33 | await session.withTransaction(async () => { 34 | result = await closure() 35 | return result 36 | }) 37 | return result 38 | } 39 | -------------------------------------------------------------------------------- /src/utils/inMemoryDB.ts: -------------------------------------------------------------------------------- 1 | import mongoose, { ConnectOptions } from 'mongoose' 2 | import { ChangeStream, ChangeStreamDocument, MongoClient } from 'mongodb' 3 | import { MongoMemoryReplSet } from 'mongodb-memory-server' 4 | import { checkVar, defaultPostConnect } from '../db/index.js' 5 | import { logger } from '../logger.js' 6 | import { testStreamListener } from '../db/edit/streamListener' 7 | 8 | /** 9 | * In-memory Mongo replset used for testing. 10 | * More portable than requiring user to set up Mongo in a background Docker process. 11 | * Need a replset to faciliate transactions. 12 | */ 13 | let mongod: MongoMemoryReplSet 14 | let stream: ChangeStream | undefined 15 | 16 | /** 17 | * Connect to the in-memory database. 18 | */ 19 | export const connect = async (onChange?: (change: ChangeStreamDocument) => void): Promise => { 20 | mongod = await MongoMemoryReplSet.create({ 21 | // Stream listener listens on DB denoted by 'MONGO_DBNAME' env var. 22 | replSet: { count: 1, storageEngine: 'wiredTiger', dbName: checkVar('MONGO_DBNAME') } 23 | }) 24 | const uri = await mongod.getUri(checkVar('MONGO_DBNAME')) 25 | logger.info(`Connecting to in-memory database ${uri}`) 26 | const mongooseOpts: ConnectOptions = { 27 | autoIndex: false // Create indices using defaultPostConnect instead. 28 | } 29 | 30 | await mongoose.connect(uri, mongooseOpts) 31 | mongoose.set('debug', false) // Set to 'true' to enable verbose mode 32 | stream = await defaultPostConnect(async () => await testStreamListener(onChange)) 33 | } 34 | 35 | /** 36 | * Drop database, close the connection and stop mongod. 37 | */ 38 | export const close = async (): Promise => { 39 | await stream?.close() 40 | await mongoose.connection.dropDatabase() 41 | await mongoose.connection.close() 42 | await mongod.stop() 43 | } 44 | 45 | /** 46 | * Remove all the data for all db collections. 47 | */ 48 | export const clear = async (): Promise => { 49 | const collections = mongoose.connection.collections 50 | 51 | for (const key in collections) { 52 | const collection = collections[key] 53 | await collection.deleteMany({}) 54 | } 55 | } 56 | 57 | /** 58 | * Bypass Mongoose to insert data directly into Mongo. 59 | * Useful for inserting data that is incompatible with Mongoose schemas for migration testing. 60 | * @param collection Name of collection for documents to be inserted into. 61 | * @param docs Documents to be inserted into collection. 62 | */ 63 | const insertDirectly = async (collection: string, documents: any[]): Promise => { 64 | const uri = await mongod.getUri(checkVar('MONGO_DBNAME')) 65 | const client = new MongoClient(uri) 66 | try { 67 | const database = client.db(checkVar('MONGO_DBNAME')) 68 | const mCollection = database.collection(collection) 69 | const result = await mCollection.insertMany(documents) 70 | 71 | console.log(`${result.insertedCount} documents were inserted directly into MongoDB`) 72 | } finally { 73 | void client.close() 74 | } 75 | } 76 | 77 | export interface InMemoryDB { 78 | connect: () => Promise 79 | close: () => Promise 80 | clear: () => Promise 81 | insertDirectly: (collection: string, documents: any[]) => Promise 82 | } 83 | 84 | export default { connect, close, clear, insertDirectly, stream } 85 | -------------------------------------------------------------------------------- /src/utils/sanitize.ts: -------------------------------------------------------------------------------- 1 | import sanitizeHtml from 'sanitize-html' 2 | 3 | /** 4 | * Sanitize paragraphs for description/block of text 5 | * @param text user input 6 | * @returns sanitized text 7 | */ 8 | export const sanitize = (text: string): string => sanitizeHtml(text, { 9 | allowedTags: ['b', 'i', 'em', 'strong', 'a', 'p', 'br', 'ul', 'ol', 'li'], 10 | allowedAttributes: { 11 | a: ['href'] 12 | } 13 | }) 14 | 15 | /** 16 | * Strict sanitize text for heading & title 17 | * @param text user input 18 | * @returns sanitized text 19 | */ 20 | export const sanitizeStrict = (text: string): string => sanitizeHtml(text, { 21 | allowedTags: [], 22 | allowedAttributes: { 23 | } 24 | }).trim() 25 | 26 | export const trimToNull = (text?: string): string | null => { 27 | return text?.trim() ?? null 28 | } 29 | -------------------------------------------------------------------------------- /src/utils/testUtils.ts: -------------------------------------------------------------------------------- 1 | import jwt from 'jsonwebtoken' 2 | import { jest } from '@jest/globals' 3 | import request from 'supertest' 4 | import { ApolloServer } from '@apollo/server' 5 | import express from 'express' 6 | 7 | import type { InMemoryDB } from './inMemoryDB.js' 8 | import inMemoryDB from './inMemoryDB.js' 9 | import { createServer } from '../server.js' 10 | 11 | const PORT = 4000 12 | 13 | export interface QueryAPIProps { 14 | query?: string 15 | operationName?: string 16 | variables?: any 17 | userUuid?: string 18 | roles?: string[] 19 | port?: number 20 | endpoint?: string 21 | app?: express.Application 22 | body?: any 23 | } 24 | 25 | /* 26 | * Helper function for querying the locally-served API. It mocks JWT verification 27 | * so we can pretend to have an role we want when calling the API. 28 | */ 29 | export const queryAPI = async ({ 30 | query, 31 | operationName, 32 | variables, 33 | userUuid = '', 34 | roles = [], 35 | app, 36 | endpoint = '/', 37 | port = PORT 38 | }: QueryAPIProps): Promise => { 39 | // Avoid needing to pass in actual signed tokens. 40 | const jwtSpy = jest.spyOn(jwt, 'verify') 41 | jwtSpy.mockImplementation(() => { 42 | return { 43 | // Roles defined at https://manage.auth0.com/dashboard/us/dev-fmjy7n5n/roles 44 | 'https://tacos.openbeta.io/roles': roles, 45 | 'https://tacos.openbeta.io/uuid': userUuid 46 | } 47 | }) 48 | 49 | const queryObj = { query, operationName, variables } 50 | let req = request(app ?? `http://localhost:${port}`) 51 | .post(endpoint) 52 | .send(queryObj) 53 | 54 | if (userUuid != null) { 55 | req = req.set('Authorization', 'Bearer placeholder-jwt-see-SpyOn') 56 | } 57 | 58 | return await req 59 | } 60 | 61 | export interface SetUpServerReturnType { 62 | server: ApolloServer 63 | app: express.Application 64 | inMemoryDB: InMemoryDB 65 | } 66 | 67 | /* 68 | * Starts Apollo server and has Mongo inMemory replset connect to it. 69 | */ 70 | export const setUpServer = async (): Promise => { 71 | await inMemoryDB.connect() 72 | 73 | const { app, server } = await createServer() 74 | return { app, server, inMemoryDB } 75 | } 76 | 77 | export const isFulfilled = ( 78 | p: PromiseSettledResult 79 | ): p is PromiseFulfilledResult => p.status === 'fulfilled' 80 | export const isRejected = ( 81 | p: PromiseSettledResult 82 | ): p is PromiseRejectedResult => p.status === 'rejected' 83 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "types": [ 4 | "node", 5 | "jest", 6 | ], 7 | "allowJs": true, 8 | "skipLibCheck": true, 9 | "target": "ESNext", 10 | "module": "ESNext", 11 | "esModuleInterop": true, 12 | "moduleResolution": "Node", 13 | "outDir": "build", 14 | "sourceMap": true, 15 | "strictNullChecks": true, 16 | "allowSyntheticDefaultImports": true, 17 | "forceConsistentCasingInFileNames": true, 18 | "resolveJsonModule": true 19 | }, 20 | "include": [ 21 | "src/**/*.ts", 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tsconfig.release.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "exclude": [ 4 | "**/*.test.*", 5 | "**/__mocks__/*", 6 | "**/__tests__/*" 7 | ] 8 | } --------------------------------------------------------------------------------