├── .dockerignore ├── .github ├── cdk │ ├── .gitignore │ ├── cdkactions.yaml │ ├── main.ts │ ├── package.json │ ├── tsconfig.json │ └── yarn.lock └── workflows │ ├── cdkactions_build-and-deploy.yaml │ └── cdkactions_validate.yaml ├── .gitignore ├── Dockerfile ├── LICENSE ├── Pipfile ├── Pipfile.lock ├── README.md ├── buildingRedis.py ├── buildings.csv ├── cron ├── save_laundry_data.py └── send_gsr_push_notifications.py ├── docker-compose.yml ├── k8s └── values.yaml ├── runserver.py ├── server ├── __init__.py ├── account │ ├── __init__.py │ ├── account.py │ ├── courses.py │ ├── degrees.py │ └── settings.py ├── analytics.py ├── auth.py ├── base.py ├── buildings.py ├── calendar3year.py ├── dining │ ├── __init__.py │ ├── balance.py │ ├── diningImages.csv │ ├── diningRedis.py │ ├── hours_menus.py │ ├── preferences.py │ └── transactions.py ├── directory.py ├── event.py ├── fitness.py ├── homepage.py ├── housing.py ├── laundry.py ├── models.py ├── news.py ├── notifications.py ├── nso.py ├── pcr.py ├── penndata.py ├── polls │ ├── __init__.py │ ├── archive.py │ ├── creation.py │ └── vote.py ├── portal │ ├── __init__.py │ ├── account.py │ ├── creation.py │ └── posts.py ├── privacy.py ├── registrar.py ├── studyspaces │ ├── __init__.py │ ├── availability.py │ ├── book.py │ ├── cancel.py │ ├── deprecated.py │ ├── groups.py │ ├── models.py │ ├── notifications.py │ ├── reservations.py │ └── search.py ├── transit.py ├── utils.py └── weather.py ├── setup.cfg ├── stops.json ├── tests ├── __init__.py ├── laundry_snapshot.html ├── test_dining.py ├── test_general.py ├── test_laundry.py └── test_studyspaces.py ├── weather.json └── wsgi.py /.dockerignore: -------------------------------------------------------------------------------- 1 | # Docker 2 | Dockerfile 3 | .dockerignore 4 | 5 | # git 6 | .circleci 7 | .git 8 | .gitignore 9 | .gitmodules 10 | **/*.md 11 | LICENSE 12 | 13 | # Misc 14 | .coverage 15 | **/__pycache__/ 16 | tests/ 17 | -------------------------------------------------------------------------------- /.github/cdk/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | main.js 3 | main.d.ts 4 | -------------------------------------------------------------------------------- /.github/cdk/cdkactions.yaml: -------------------------------------------------------------------------------- 1 | language: typescript 2 | app: node main.js 3 | -------------------------------------------------------------------------------- /.github/cdk/main.ts: -------------------------------------------------------------------------------- 1 | import * as dedent from "dedent-js"; 2 | import { Construct } from "constructs"; 3 | import { App, CheckoutJob, Stack, Workflow } from "cdkactions"; 4 | import { DeployJob, DockerPublishJob } from "@pennlabs/kraken"; 5 | 6 | export class LASStack extends Stack { 7 | constructor(scope: Construct, name: string) { 8 | super(scope, name); 9 | const workflow = new Workflow(this, 'build-and-deploy', { 10 | name: 'Build and Deploy', 11 | on: 'push', 12 | }); 13 | 14 | const checkJob = new CheckoutJob(workflow, 'check', { 15 | runsOn: 'ubuntu-latest', 16 | steps: [ 17 | { 18 | name: 'Cache', 19 | uses: 'actions/cache@v2', 20 | with: { 21 | path: '~/.local/share/virtualenvs', 22 | key: "v0-${{ hashFiles('./Pipfile.lock') }}", 23 | }, 24 | }, 25 | { 26 | name: 'Install Dependencies', 27 | run: dedent`pip install pipenv 28 | pipenv install --deploy --dev` 29 | }, 30 | { 31 | name: 'Lint (flake8)', 32 | run: 'pipenv run flake8 .', 33 | }, 34 | ], 35 | container: { 36 | image: `python:3.8`, 37 | }, 38 | env: { 39 | REDIS_URL: 'redis://redis:6379', 40 | }, 41 | }); 42 | 43 | const publishJob = new DockerPublishJob(workflow, 'publish', { 44 | imageName: 'labs-api-server', 45 | }, 46 | { 47 | needs: checkJob.id 48 | }); 49 | 50 | new DeployJob(workflow, {}, { 51 | needs: publishJob.id 52 | }); 53 | } 54 | } 55 | 56 | const app = new App(); 57 | new LASStack(app, 'labs-api-server'); 58 | app.synth(); 59 | -------------------------------------------------------------------------------- /.github/cdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk", 3 | "version": "0.1.0", 4 | "main": "main.js", 5 | "types": "main.ts", 6 | "license": "Apache-2.0", 7 | "private": true, 8 | "scripts": { 9 | "synth": "cdkactions synth", 10 | "compile": "tsc", 11 | "watch": "tsc -w", 12 | "build": "yarn compile && yarn synth", 13 | "upgrade-cdk": "yarn upgrade cdkactions@latest cdkactions-cli@latest" 14 | }, 15 | "dependencies": { 16 | "@pennlabs/kraken": "^0.5.0", 17 | "cdkactions": "^0.2.3", 18 | "constructs": "^3.2.109" 19 | }, 20 | "devDependencies": { 21 | "@types/node": "^14.14.22", 22 | "cdkactions-cli": "^0.2.3", 23 | "typescript": "^4.1.3" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.github/cdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "alwaysStrict": true, 4 | "charset": "utf8", 5 | "declaration": true, 6 | "experimentalDecorators": true, 7 | "inlineSourceMap": true, 8 | "inlineSources": true, 9 | "lib": [ 10 | "es2018" 11 | ], 12 | "module": "CommonJS", 13 | "noEmitOnError": true, 14 | "noFallthroughCasesInSwitch": true, 15 | "noImplicitAny": true, 16 | "noImplicitReturns": true, 17 | "noImplicitThis": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "resolveJsonModule": true, 21 | "strict": true, 22 | "strictNullChecks": true, 23 | "strictPropertyInitialization": true, 24 | "stripInternal": true, 25 | "target": "ES2018" 26 | }, 27 | "include": [ 28 | "**/*.ts" 29 | ], 30 | "exclude": [ 31 | "node_modules" 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /.github/cdk/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@pennlabs/kraken@^0.5.0": 6 | version "0.5.0" 7 | resolved "https://registry.yarnpkg.com/@pennlabs/kraken/-/kraken-0.5.0.tgz#b79d24f3d34a1ef4aff7edc0e90e5bd9388c1897" 8 | integrity sha512-4QHJ7hPoxswsNpELdlDXX73GKecelCdPXKf7MEJYZ3Xe3UCZGEStVLYFoJUEev+KaxN0IxxTqYZ3kfPqabqEIQ== 9 | dependencies: 10 | cdkactions "^0.2.0" 11 | constructs "^3.2.80" 12 | dedent-js "^1.0.1" 13 | 14 | "@types/node@^14.14.22": 15 | version "14.14.22" 16 | resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.22.tgz#0d29f382472c4ccf3bd96ff0ce47daf5b7b84b18" 17 | integrity sha512-g+f/qj/cNcqKkc3tFqlXOYjrmZA+jNBiDzbP3kH+B+otKFqAdPgVTGP1IeKRdMml/aE69as5S4FqtxAbl+LaMw== 18 | 19 | ansi-regex@^5.0.0: 20 | version "5.0.0" 21 | resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" 22 | integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== 23 | 24 | ansi-styles@^4.0.0: 25 | version "4.3.0" 26 | resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" 27 | integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== 28 | dependencies: 29 | color-convert "^2.0.1" 30 | 31 | argparse@^2.0.1: 32 | version "2.0.1" 33 | resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" 34 | integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== 35 | 36 | cdkactions-cli@^0.2.3: 37 | version "0.2.3" 38 | resolved "https://registry.yarnpkg.com/cdkactions-cli/-/cdkactions-cli-0.2.3.tgz#2393682b37ab0b04c6964160b393e8d71b08118f" 39 | integrity sha512-qYPbzuQ1M5gQGa8NRnaWwm3iXmdqMoiHR7YTh6oYROpfBGER7kwBBb6ydFlSwKK62hE0B++by43hbEBXlHvr8A== 40 | dependencies: 41 | cdkactions "^0.2.3" 42 | constructs "^3.2.109" 43 | fs-extra "^8.1.0" 44 | sscaff "^1.2.0" 45 | yaml "^1.10.0" 46 | yargs "^16.2.0" 47 | 48 | cdkactions@^0.2.0, cdkactions@^0.2.3: 49 | version "0.2.3" 50 | resolved "https://registry.yarnpkg.com/cdkactions/-/cdkactions-0.2.3.tgz#aa27bf720962376d54f8ef95cdfb0ab46458b966" 51 | integrity sha512-/DYQ2qsT6fzgZB+cmQjtPqR4aAWCqAytWbFpJK+iJLQ4jQrl6l4uMf01TLiWY3mAILS0YGlwPcoBbGvq9Jnz5g== 52 | dependencies: 53 | js-yaml "^4.0.0" 54 | ts-dedent "^2.0.0" 55 | 56 | cliui@^7.0.2: 57 | version "7.0.4" 58 | resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" 59 | integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== 60 | dependencies: 61 | string-width "^4.2.0" 62 | strip-ansi "^6.0.0" 63 | wrap-ansi "^7.0.0" 64 | 65 | color-convert@^2.0.1: 66 | version "2.0.1" 67 | resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" 68 | integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== 69 | dependencies: 70 | color-name "~1.1.4" 71 | 72 | color-name@~1.1.4: 73 | version "1.1.4" 74 | resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" 75 | integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== 76 | 77 | constructs@^3.2.109, constructs@^3.2.80: 78 | version "3.3.3" 79 | resolved "https://registry.yarnpkg.com/constructs/-/constructs-3.3.3.tgz#532d3b736d2a9f090fc5ded1e43d4a1b17819290" 80 | integrity sha512-tus97CfZU14VcAdM5Qhg5OGMj89IAyxOyomt7h2Gup5EpBiMz6yIkOsxV9GAh30xmUKWSAwX56AD8QjzZCjIEQ== 81 | 82 | dedent-js@^1.0.1: 83 | version "1.0.1" 84 | resolved "https://registry.yarnpkg.com/dedent-js/-/dedent-js-1.0.1.tgz#bee5fb7c9e727d85dffa24590d10ec1ab1255305" 85 | integrity sha1-vuX7fJ5yfYXf+iRZDRDsGrElUwU= 86 | 87 | emoji-regex@^8.0.0: 88 | version "8.0.0" 89 | resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" 90 | integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== 91 | 92 | escalade@^3.1.1: 93 | version "3.1.1" 94 | resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" 95 | integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== 96 | 97 | fs-extra@^8.1.0: 98 | version "8.1.0" 99 | resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" 100 | integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== 101 | dependencies: 102 | graceful-fs "^4.2.0" 103 | jsonfile "^4.0.0" 104 | universalify "^0.1.0" 105 | 106 | get-caller-file@^2.0.5: 107 | version "2.0.5" 108 | resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" 109 | integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== 110 | 111 | graceful-fs@^4.1.6, graceful-fs@^4.2.0: 112 | version "4.2.4" 113 | resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" 114 | integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== 115 | 116 | is-fullwidth-code-point@^3.0.0: 117 | version "3.0.0" 118 | resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" 119 | integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== 120 | 121 | js-yaml@^4.0.0: 122 | version "4.0.0" 123 | resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.0.0.tgz#f426bc0ff4b4051926cd588c71113183409a121f" 124 | integrity sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q== 125 | dependencies: 126 | argparse "^2.0.1" 127 | 128 | jsonfile@^4.0.0: 129 | version "4.0.0" 130 | resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" 131 | integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= 132 | optionalDependencies: 133 | graceful-fs "^4.1.6" 134 | 135 | require-directory@^2.1.1: 136 | version "2.1.1" 137 | resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" 138 | integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= 139 | 140 | sscaff@^1.2.0: 141 | version "1.2.0" 142 | resolved "https://registry.yarnpkg.com/sscaff/-/sscaff-1.2.0.tgz#d015f199ac53c2df66c4b6135b29bd01f7885445" 143 | integrity sha512-Xyf2tWLnO0Z297FKag0e8IXFIpnYRWZ3FBn4dN2qlMRsOcpf0P54FPhvdcb1Es0Fm4hbhYYXa23jR+VPGPQhSg== 144 | 145 | string-width@^4.1.0, string-width@^4.2.0: 146 | version "4.2.0" 147 | resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" 148 | integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== 149 | dependencies: 150 | emoji-regex "^8.0.0" 151 | is-fullwidth-code-point "^3.0.0" 152 | strip-ansi "^6.0.0" 153 | 154 | strip-ansi@^6.0.0: 155 | version "6.0.0" 156 | resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" 157 | integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== 158 | dependencies: 159 | ansi-regex "^5.0.0" 160 | 161 | ts-dedent@^2.0.0: 162 | version "2.0.0" 163 | resolved "https://registry.yarnpkg.com/ts-dedent/-/ts-dedent-2.0.0.tgz#47c5eb23d9096f3237cc413bc82d387d36dbe690" 164 | integrity sha512-DfxKjSFQfw9+uf7N9Cy8Ebx9fv5fquK4hZ6SD3Rzr+1jKP6AVA6H8+B5457ZpUs0JKsGpGqIevbpZ9DMQJDp1A== 165 | 166 | typescript@^4.1.3: 167 | version "4.1.3" 168 | resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.3.tgz#519d582bd94cba0cf8934c7d8e8467e473f53bb7" 169 | integrity sha512-B3ZIOf1IKeH2ixgHhj6la6xdwR9QrLC5d1VKeCSY4tvkqhF2eqd9O7txNlS0PO3GrBAFIdr3L1ndNwteUbZLYg== 170 | 171 | universalify@^0.1.0: 172 | version "0.1.2" 173 | resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" 174 | integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== 175 | 176 | wrap-ansi@^7.0.0: 177 | version "7.0.0" 178 | resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" 179 | integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== 180 | dependencies: 181 | ansi-styles "^4.0.0" 182 | string-width "^4.1.0" 183 | strip-ansi "^6.0.0" 184 | 185 | y18n@^5.0.5: 186 | version "5.0.5" 187 | resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" 188 | integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== 189 | 190 | yaml@^1.10.0: 191 | version "1.10.0" 192 | resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e" 193 | integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== 194 | 195 | yargs-parser@^20.2.2: 196 | version "20.2.4" 197 | resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" 198 | integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== 199 | 200 | yargs@^16.2.0: 201 | version "16.2.0" 202 | resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" 203 | integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== 204 | dependencies: 205 | cliui "^7.0.2" 206 | escalade "^3.1.1" 207 | get-caller-file "^2.0.5" 208 | require-directory "^2.1.1" 209 | string-width "^4.2.0" 210 | y18n "^5.0.5" 211 | yargs-parser "^20.2.2" 212 | -------------------------------------------------------------------------------- /.github/workflows/cdkactions_build-and-deploy.yaml: -------------------------------------------------------------------------------- 1 | # Generated by cdkactions. Do not modify 2 | # Generated as part of the 'labs-api-server' stack. 3 | name: Build and Deploy 4 | on: push 5 | jobs: 6 | check: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - name: Cache 11 | uses: actions/cache@v2 12 | with: 13 | path: ~/.local/share/virtualenvs 14 | key: v0-${{ hashFiles('./Pipfile.lock') }} 15 | - name: Install Dependencies 16 | run: |- 17 | pip install pipenv 18 | pipenv install --deploy --dev 19 | - name: Lint (flake8) 20 | run: pipenv run flake8 . 21 | container: 22 | image: python:3.8 23 | env: 24 | REDIS_URL: redis://redis:6379 25 | publish-publish: 26 | name: Publish publish 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v2 30 | - uses: docker/setup-qemu-action@v1 31 | - uses: docker/setup-buildx-action@v1 32 | - name: Cache Docker layers 33 | uses: actions/cache@v2 34 | with: 35 | path: /tmp/.buildx-cache 36 | key: buildx-publish-publish 37 | - uses: docker/login-action@v1 38 | with: 39 | username: ${{ secrets.DOCKER_USERNAME }} 40 | password: ${{ secrets.DOCKER_PASSWORD }} 41 | - name: Build/Publish 42 | uses: docker/build-push-action@v2 43 | with: 44 | context: . 45 | file: ./Dockerfile 46 | push: ${{ github.ref == 'refs/heads/master' }} 47 | cache-from: type=local,src=/tmp/.buildx-cache,type=registry,ref=pennlabs/labs-api-server:latest 48 | cache-to: type=local,dest=/tmp/.buildx-cache 49 | tags: pennlabs/labs-api-server:latest,pennlabs/labs-api-server:${{ github.sha }} 50 | needs: check 51 | deploy: 52 | runs-on: ubuntu-latest 53 | container: 54 | image: pennlabs/helm-tools:39b60af248944898fcbc58d1fe5b0f1995420aef 55 | if: github.ref == 'refs/heads/master' 56 | steps: 57 | - uses: actions/checkout@v2 58 | - name: Deploy 59 | run: |- 60 | aws eks --region us-east-1 update-kubeconfig --name production --role-arn arn:aws:iam::${AWS_ACCOUNT_ID}:role/kubectl 61 | 62 | # get repo name (by removing owner/organization) 63 | RELEASE_NAME=${REPOSITORY#*/} 64 | 65 | # this specifies what tag of icarus to pull down 66 | DEPLOY_TAG=$(yq r k8s/values.yaml deploy_version) 67 | if [ "$DEPLOY_TAG" = "null" ]; then 68 | echo "Could not find deploy tag" 69 | exit 1 70 | fi 71 | 72 | helm repo add pennlabs https://helm.pennlabs.org/ 73 | for i in {1..10}; do 74 | # This is bash soup, but it'll do. 75 | # 1. Attempt to install with helm 76 | # 2. If this succeeds, exit with a success status code 77 | # 3. If it fails, mark the command as succeeded so that '-e' doesn't kick us out 78 | # 4. Wait 10s and try again 79 | helm upgrade --install --atomic --set=image_tag=$IMAGE_TAG -f k8s/values.yaml --version "${DEPLOY_TAG}" $RELEASE_NAME pennlabs/icarus && exit 0 || true 80 | sleep 10s 81 | echo "Retrying deploy for $i times" 82 | done 83 | 84 | # If we get here, all helm installs failed so our command should fail 85 | exit 1 86 | env: 87 | IMAGE_TAG: ${{ github.sha }} 88 | AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} 89 | AWS_ACCESS_KEY_ID: ${{ secrets.GH_AWS_ACCESS_KEY_ID }} 90 | AWS_SECRET_ACCESS_KEY: ${{ secrets.GH_AWS_SECRET_ACCESS_KEY }} 91 | DO_AUTH_TOKEN: ${{ secrets.DO_AUTH_TOKEN }} 92 | K8S_CLUSTER_ID: ${{ secrets.K8S_CLUSTER_ID }} 93 | REPOSITORY: ${{ github.repository }} 94 | needs: publish-publish 95 | -------------------------------------------------------------------------------- /.github/workflows/cdkactions_validate.yaml: -------------------------------------------------------------------------------- 1 | # Generated by cdkactions. Do not modify 2 | # Generated as part of the 'validate' stack. 3 | name: Validate cdkactions manifests 4 | on: push 5 | jobs: 6 | validate: 7 | name: Validate cdkactions manifests 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | with: 12 | token: ${{ github.token }} 13 | - name: Validate manifests 14 | run: |- 15 | cd .github/cdk 16 | yarn install 17 | yarn build 18 | git --no-pager diff ../workflows 19 | git diff-index --quiet HEAD -- ../workflows 20 | - name: Push updated manifests 21 | if: "false" 22 | run: |- 23 | cd .github/workflows 24 | git config user.name github-actions 25 | git config user.email github-actions[bot]@users.noreply.github.com 26 | git add . 27 | git commit -m "Update cdkactions manifests" || exit 0 28 | git push 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | venv 3 | .ropeproject 4 | penn/ 5 | penn 6 | .env 7 | 8 | ### Python ### 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | 13 | # C extensions 14 | *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | env/ 19 | build/ 20 | develop-eggs/ 21 | dist/ 22 | downloads/ 23 | eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | env.sh 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .coverage 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | test-results/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | 60 | # Sphinx documentation 61 | docs/_build/ 62 | 63 | # PyBuilder 64 | target/ 65 | 66 | # iOS Push Notification Key 67 | ios_key.p8 68 | 69 | # Misc 70 | .idea/ 71 | .DS_Store 72 | .vscode/ 73 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8-slim 2 | 3 | LABEL maintainer="Penn Labs" 4 | 5 | # Install build dependencies 6 | RUN apt-get update && apt-get install --no-install-recommends -y default-libmysqlclient-dev gcc \ 7 | && rm -rf /var/lib/apt/lists/* 8 | 9 | # Install pipenv 10 | RUN pip install pipenv 11 | 12 | WORKDIR /app/ 13 | 14 | # Copy project dependencies 15 | COPY Pipfile* /app/ 16 | 17 | # Install project dependencies 18 | RUN pipenv install --system 19 | 20 | # Copy project files 21 | COPY . /app/ 22 | 23 | 24 | # Run uWSGI 25 | CMD ["/usr/local/bin/uwsgi", "--ini", "/app/setup.cfg"] 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2020 Penn Labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | mock = "*" 8 | nose2 = {extras = ["coverage_plugin"],version = "*"} 9 | flake8 = "*" 10 | flake8-isort = "*" 11 | flake8-quotes = "*" 12 | nose = "*" 13 | codecov = "*" 14 | 15 | [packages] 16 | boto3 = "*" 17 | beautifulsoup4 = "*" 18 | blinker = "*" 19 | click = "*" 20 | funcsigs = "*" 21 | html5lib = "*" 22 | itsdangerous = "*" 23 | mysqlclient = "*" 24 | nameparser = "*" 25 | pandas = "*" 26 | pbr = "*" 27 | penncoursereview = "*" 28 | python-dateutil = "*" 29 | pytz = "*" 30 | raven = "*" 31 | redis = "*" 32 | requests = "*" 33 | six = "*" 34 | tinify = "*" 35 | tzlocal = "*" 36 | Flask-Bcrypt = "*" 37 | Flask = "*" 38 | Flask-Cors = "*" 39 | Flask-SQLAlchemy = "*" 40 | Jinja2 = "*" 41 | MarkupSafe = "*" 42 | PennSDK = "*" 43 | SQLAlchemy = "*" 44 | Werkzeug = "*" 45 | apns2 = "*" 46 | uwsgi = "*" 47 | flake8 = "*" 48 | isort = "*" 49 | 50 | [requires] 51 | python_version = "3" 52 | -------------------------------------------------------------------------------- /buildingRedis.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import json 3 | 4 | import redis 5 | 6 | from server import app 7 | 8 | 9 | db = redis.StrictRedis().from_url(app.config["REDIS_URL"]) 10 | with open("buildings.csv") as f: 11 | reader = csv.DictReader(f) 12 | for row in reader: 13 | db.set("buildings:%s" % (row["code_courses"]), json.dumps(row)) 14 | -------------------------------------------------------------------------------- /buildings.csv: -------------------------------------------------------------------------------- 1 | name,latitude,longitude,source,code_courses,notes 2 | 3401 Market Street,39.95603,-75.19160,Courses,34MK, 3 | 3401 Walnut Street,39.95306,-75.19300,Courses,WLNT, 4 | 3440 Market Street,39.95576,-75.19220,Courses,MRKT, 5 | 3550 Market Street,39.95632,-75.19400,Courses,35MK, 6 | 3600 Market Street,39.95585,-75.19470,Courses,36MK, 7 | 3700 Market Street,39.95603,-75.19660,Courses,37MK, 8 | Anatomy/Chemistry Building,39.94887,-75.19760,Courses,ACHM, 9 | Annenberg Center,39.95336,-75.19660,Courses,ANNC, 10 | Annenberg School,39.95326,-75.19580,Courses,ANNS, 11 | Blockley Hall,39.94938,-75.19460,Courses,BLOC, 12 | Caster Building,39.95256,-75.19750,Courses,CAST, 13 | Center for Programs in Contemporary Writing,39.95365,-75.19890,Courses,CPCW,3808 Walnut Street 14 | Charles Addams Fine Arts Hall,39.95295,-75.19500,Courses,ADDM, 15 | Chemistry Building,39.95064,-75.19230,Courses,CHEM, 16 | Chemistry Labs,39.95193,-75.20230,Courses,CLAB, 17 | Civic House,39.95247,-75.20150,Courses,CVHS, 18 | Claire M. Fagin Hall,39.94786,-75.19810,Courses,NEGB, 19 | Claudia Cohen Hall,39.95141,-75.19490,Courses,COHN, 20 | Clinical Research Building,39.94938,-75.19460,Courses,CLRB, 21 | College Hall,39.95139,-75.19380,Courses,COLL, 22 | Colonial Penn Center,39.95236,-75.19660,Courses,CPCR, 23 | David Rittenhouse Labs,39.95212,-75.18980,Courses,DRLB, 24 | Dental School,39.95276,-75.20290,Courses,DENT, 25 | "Duhring Wing, Furness Building",39.95205,-75.19280,Courses,DUHR, 26 | Fels Center,39.95355,-75.19910,Courses,FELS, 27 | Fisher-Bennett Hall,39.95257,-75.19160,Courses,BENN, 28 | Furness Building,39.95205,-75.19280,Courses,FURN, 29 | "Gittis Hall, Law School",39.95391,-75.19310,Courses,GITT, 30 | Goddard Labs,39.94938,-75.19460,Courses,GLAB, 31 | "Gregory College House, Class of 25",39.95193,-75.20230,Courses,CL25, 32 | Hayden Hall,39.95131,-75.19130,Courses,HAYD, 33 | Houston Hall,39.95082,-75.19390,Courses,HH, 34 | Jaffe Building,39.95274,-75.19290,Courses,JAFF, 35 | John Morgan Building,39.94955,-75.19670,Courses,MRGN, 36 | Johnson Pavilion,39.94948,-75.19560,Courses,JOHN, 37 | Jon M. Huntsman Hall,39.95307,-75.19820,Courses,JMHH, 38 | Kelly Writers House,39.95259,-75.19930,Courses,KWH, 39 | Lauder-Fischer Hall,39.95184,-75.19750,Courses,L-FH, 40 | Law School,39.95394,-75.19210,Courses,LAWS, 41 | Leidy Labs,39.94973,-75.19900,Courses,LLAB, 42 | Levine Hall,39.95222,-75.19110,Courses,LEVH, 43 | Levy Center,39.95274,-75.20290,Courses,LEVY, 44 | Lippincott Library,39.95281,-75.19350,Courses,LIPP, 45 | McNeil Building,39.95201,-75.19790,Courses,MCNB, 46 | McNeil Center for Early American Studies,39.95443,-75.19620,Courses,MCES, 47 | Meyerson Hall,39.95230,-75.19260,Courses,MEYH, 48 | Moore Building,39.95238,-75.19050,Courses,MOOR, 49 | Morgan Building,39.95186,-75.19200,Courses,MORG, 50 | Music Annex,39.95207,-75.19200,Courses,MUSX, 51 | Music Building,39.95207,-75.19200,Courses,MUSB, 52 | "Pepper Hall, Law School",39.95387,-75.19290,Courses,PEPP, 53 | School of Veterinary Medicine,39.86950,-75.75370,Courses,VETS, 54 | Skirkanich Hall,39.95204,-75.19050,Courses,SKIR, 55 | Steinberg-Dietrich Hall,39.95184,-75.19640,Courses,SHDH, 56 | Stellar-Chance Laboratories,39.94937,-75.19460,Courses,STCH, 57 | Stiteler Hall,39.95274,-75.19720,Courses,STIT, 58 | "Tanenbaum Hall, Law School",39.95387,-75.19330,Courses,TANE, 59 | "The Arts, Research and Culture House",39.95205,-75.19510,Courses,ARCH,3601 Locust Walk 60 | Towne Building,39.95233,-75.19060,Courses,TOWN, 61 | University Museum,39.94870,-75.19030,Courses,MUSE, 62 | Van Pelt Library,39.95254,-75.19360,Courses,VANP, 63 | Vance Hall,39.95147,-75.19810,Courses,VANC, 64 | W.E.B. Du Bois College House,39.95358,-75.20050,Courses,DUBH, 65 | Williams Hall,39.95102,-75.19480,Courses,WILL, -------------------------------------------------------------------------------- /cron/save_laundry_data.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | 6 | sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 7 | 8 | if True: 9 | import server 10 | 11 | 12 | server.laundry.save_data() 13 | -------------------------------------------------------------------------------- /cron/send_gsr_push_notifications.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | 6 | sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 7 | 8 | if True: 9 | import server 10 | 11 | 12 | server.studyspaces.notifications.send_reminders() 13 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | web: 4 | build: . 5 | environment: 6 | REDIS_URL: redis://redis:6379 7 | command: ["python3", "runserver.py"] 8 | volumes: 9 | - "" 10 | redis: 11 | image: redis 12 | -------------------------------------------------------------------------------- /k8s/values.yaml: -------------------------------------------------------------------------------- 1 | deploy_version: 0.1.20 2 | image_tag: latest 3 | 4 | applications: 5 | - name: flask 6 | image: pennlabs/labs-api-server 7 | secret: labs-api-server 8 | ingress: 9 | hosts: 10 | - host: api.pennlabs.org 11 | paths: ["/"] 12 | secretMounts: 13 | - name: labs-api-server 14 | item: ios-key 15 | path: "/app/ios_key.p8" 16 | - name: redis 17 | image: redis 18 | tag: "5" 19 | port: 6379 20 | cronjobs: 21 | - name: laundry 22 | schedule: "*/15 * * * *" 23 | secret: labs-api-server 24 | image: pennlabs/labs-api-server 25 | cmd: ["python3", "cron/save_laundry_data.py"] 26 | - name: gsr-notifications 27 | schedule: "20,50 * * * *" 28 | secret: labs-api-server 29 | image: pennlabs/labs-api-server 30 | cmd: ["python3", "cron/send_gsr_push_notifications.py"] 31 | secretMounts: 32 | - name: labs-api-server 33 | item: ios-key 34 | path: "/app/ios_key.p8" 35 | -------------------------------------------------------------------------------- /runserver.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from server import app 4 | 5 | 6 | app.run(debug=True) 7 | -------------------------------------------------------------------------------- /server/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import boto3 4 | import redis 5 | import tinify 6 | from flask import Flask 7 | from flask_bcrypt import Bcrypt 8 | from flask_cors import CORS 9 | from raven.contrib.flask import Sentry 10 | 11 | from server.models import sqldb 12 | 13 | 14 | app = Flask(__name__) 15 | bcrypt = Bcrypt(app) 16 | 17 | # Tinify Image Compression API 18 | tinify.key = os.environ.get("TINIFY_KEY") 19 | 20 | # sentry 21 | sentry = Sentry(app) 22 | 23 | # AWS S3 24 | s3 = boto3.client( 25 | "s3", 26 | aws_access_key_id=os.environ.get("AWS_KEY"), 27 | aws_secret_access_key=os.environ.get("AWS_SECRET"), 28 | ) 29 | 30 | # allow cors 31 | CORS(app) 32 | 33 | # redis 34 | app.config["REDIS_URL"] = os.environ.get("REDIS_URL", "redis://localhost:6379") 35 | db = redis.StrictRedis().from_url(app.config["REDIS_URL"]) 36 | app.secret_key = os.urandom(24) 37 | 38 | import server.account.account # noqa 39 | import server.account.settings # noqa 40 | import server.analytics # noqa 41 | import server.auth # noqa 42 | import server.buildings # noqa 43 | import server.calendar3year # noqa 44 | import server.dining.balance # noqa 45 | import server.dining.diningRedis # noqa 46 | import server.dining.hours_menus # noqa 47 | import server.dining.preferences # noqa 48 | import server.dining.transactions # noqa 49 | import server.directory # noqa 50 | import server.event # noqa 51 | import server.fitness # noqa 52 | import server.homepage # noqa 53 | import server.laundry # noqa 54 | import server.news # noqa 55 | import server.nso # noqa 56 | import server.pcr # noqa 57 | import server.polls.archive # noqa 58 | import server.polls.creation # noqa 59 | import server.polls.vote # noqa 60 | import server.portal.account # noqa 61 | import server.portal.creation # noqa 62 | import server.portal.posts # noqa 63 | import server.registrar # noqa 64 | import server.studyspaces.availability # noqa 65 | import server.studyspaces.book # noqa 66 | import server.studyspaces.cancel # noqa 67 | import server.studyspaces.deprecated # noqa 68 | import server.studyspaces.notifications # noqa 69 | import server.studyspaces.reservations # noqa 70 | import server.studyspaces.search # noqa 71 | import server.transit # noqa 72 | import server.weather # noqa 73 | import server.housing # noqa 74 | import server.notifications # noqa 75 | import server.privacy # noqa 76 | 77 | # sql 78 | app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get("DATABASE_URL", "sqlite:///:memory:") 79 | app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False 80 | sqldb.init_app(app) 81 | with app.app_context(): 82 | sqldb.create_all() 83 | 84 | if __name__ == "__main__": 85 | app.run(debug=True) 86 | -------------------------------------------------------------------------------- /server/account/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/server/account/__init__.py -------------------------------------------------------------------------------- /server/account/account.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | from sqlalchemy.exc import IntegrityError 3 | 4 | from server import app, sqldb 5 | # from server.account.courses import add_courses 6 | from server.account.degrees import add_schools_and_majors 7 | from server.models import Account 8 | 9 | 10 | """ 11 | Example: JSON Encoding 12 | { 13 | 'first': 'Josh', 14 | 'last': 'Doman', 15 | 'image_url': null, 16 | 'pennkey': 'joshdo', 17 | 'pennid': '144363238', 18 | 'degrees': [ 19 | { 20 | 'school_name': 'Engineering & Applied Science', 21 | 'school_code': 'EAS', 22 | 'degree_name':'Bachelor of Science in Economics', 23 | 'degree_code':'BS', 24 | 'expected_grad_term': '2020A', 25 | 'majors': [ 26 | { 27 | 'major_name': 'Applied Science - Computer Science', 28 | 'major_code': 'ASCS' 29 | } 30 | ] 31 | }, { 32 | 'school_name': 'Wharton Undergraduate', 33 | 'school_code': 'WH', 34 | 'degree_name':'Bachelor of Applied Science', 35 | 'degree_code':'BAS', 36 | 'expected_grad_term': '2020A', 37 | 'majors': [ 38 | { 39 | 'major_name': 'Wharton Ung Program - Undeclared', 40 | 'major_code': 'WUNG' 41 | } 42 | ] 43 | } 44 | ], 45 | 'courses': [ 46 | { 47 | 'term': '2019A', 48 | 'name': 'Advanced Corp Finance', 49 | 'dept': 'FNCE', 50 | 'code': '203', 51 | 'section': '001', 52 | 'building': 'JMHH', 53 | 'room': '370', 54 | 'weekdays': 'MW', 55 | 'start_date': '2019-01-16', 56 | 'end_date': '2019-05-01', 57 | 'start_time': '10:30 AM', 58 | 'end_time': '12:00 PM', 59 | 'instructors': [ 60 | 'Christian Opp', 61 | 'Kevin Kaiser' 62 | ], 63 | 'meeting_times': [ 64 | { 65 | 'weekday': 'M', 66 | 'start_time': '10:00 AM', 67 | 'end_time': '11:00 AM', 68 | 'building': 'JMHH', 69 | 'room': '255' 70 | }, 71 | { 72 | 'weekday': 'W', 73 | 'start_time': '10:00 AM', 74 | 'end_time': '11:00 AM', 75 | 'building': 'TOWN', 76 | 'room': '100' 77 | }, 78 | { 79 | 'weekday': 'R', 80 | 'start_time': '2:00 PM', 81 | 'end_time': '3:00 PM' 82 | } 83 | ] 84 | } 85 | ] 86 | } 87 | """ 88 | 89 | 90 | @app.route("/account/register", methods=["POST"]) 91 | def register_account_endpoint(): 92 | """ Add/update a Penn account in the database with degrees (optional) and current courses (optional) """ 93 | json = request.get_json() 94 | if json: 95 | try: 96 | account = get_account(json) 97 | 98 | try: 99 | sqldb.session.add(account) 100 | sqldb.session.commit() 101 | except IntegrityError: 102 | sqldb.session.rollback() 103 | account = update_account(account) 104 | sqldb.session.commit() 105 | 106 | degrees = json.get("degrees") 107 | if degrees: 108 | add_schools_and_majors(account, degrees) 109 | 110 | # courses = json.get('courses') 111 | # if courses: 112 | # add_courses(account, courses) 113 | 114 | return jsonify({"account_id": account.id}) 115 | except KeyError as e: 116 | return jsonify({"error": str(e)}), 400 117 | else: 118 | return jsonify({"error": "JSON not passed"}), 400 119 | 120 | 121 | def get_account(json): 122 | first = json.get("first") 123 | last = json.get("last") 124 | pennkey = json.get("pennkey") 125 | 126 | if pennkey is None: 127 | raise KeyError("pennkey is missing") 128 | 129 | pennid = json.get("pennid") 130 | email = json.get("email") 131 | affiliations_list = json.get("affiliations") 132 | affiliation = None 133 | image_url = json.get("image_url") 134 | if not email: 135 | email = get_potential_email(json) 136 | if affiliations_list: 137 | filtered_affliations = filter(lambda x: x != "member", affiliations_list) 138 | if filtered_affliations: 139 | affiliation = ",".join(filtered_affliations) 140 | 141 | return Account( 142 | first=first, 143 | last=last, 144 | pennkey=pennkey, 145 | pennid=pennid, 146 | email=email, 147 | affiliation=affiliation, 148 | image_url=image_url, 149 | ) 150 | 151 | 152 | def update_account(updated_account): 153 | # Update an account (guaranteed to exist because pennkey already in database and pennkey unique) 154 | account = Account.query.filter_by(pennkey=updated_account.pennkey).first() 155 | if account: 156 | account.first = updated_account.first 157 | account.last = updated_account.last 158 | if updated_account.email: 159 | account.email = updated_account.email 160 | if updated_account.image_url: 161 | account.image_url = updated_account.image_url 162 | if updated_account.pennid: 163 | account.pennid = updated_account.pennid 164 | if updated_account.affiliation: 165 | account.affiliation = updated_account.affiliation 166 | return account 167 | 168 | 169 | def get_potential_email(json): 170 | pennkey = json.get("pennkey") 171 | degrees = json.get("degrees", None) 172 | if degrees is None: 173 | return None 174 | 175 | email = None 176 | if degrees: 177 | for degree in degrees: 178 | code = degree.get("school_code") 179 | if code: 180 | if "WH" in code: 181 | return "{}@wharton.upenn.edu".format(pennkey) 182 | elif "COL" in code: 183 | email = "{}@sas.upenn.edu".format(pennkey) 184 | elif "SAS" in code: 185 | email = "{}@sas.upenn.edu".format(pennkey) 186 | elif "EAS" in code: 187 | email = "{}@seas.upenn.edu".format(pennkey) 188 | elif "NUR" in code: 189 | email = "{}@nursing.upenn.edu".format(pennkey) 190 | elif "SOD" in code: 191 | email = "{}@design.upenn.edu".format(pennkey) 192 | elif "EDG" in code: 193 | email = "{}@gse.upenn.edu".format(pennkey) 194 | elif "GEP" in code: 195 | email = "{}@seas.upenn.edu".format(pennkey) 196 | elif "GAS" in code: 197 | email = "{}@sas.upenn.edu".format(pennkey) 198 | elif "GEN" in code: 199 | email = "{}@seas.upenn.edu".format(pennkey) 200 | elif "EDP" in code: 201 | email = "{}@gse.upenn.edu".format(pennkey) 202 | elif "LPS" in code: 203 | email = "{}@sas.upenn.edu".format(pennkey) 204 | elif "SP2" in code: 205 | email = "{}@upenn.edu".format(pennkey) 206 | elif "NUG" in code: 207 | email = "{}@nursing.upenn.edu".format(pennkey) 208 | return email 209 | -------------------------------------------------------------------------------- /server/account/degrees.py: -------------------------------------------------------------------------------- 1 | from flask import g, jsonify, request 2 | 3 | from server import app, sqldb 4 | from server.auth import auth 5 | from server.models import Degree, Major, School, SchoolMajorAccount 6 | 7 | 8 | @app.route("/account/degrees", methods=["POST"]) 9 | @auth() 10 | def add_degrees(): 11 | json = request.get_json() 12 | add_schools_and_majors(g.account, json) 13 | return jsonify({"success": True}) 14 | 15 | 16 | @app.route("/account/degrees/delete", methods=["POST"]) 17 | @auth() 18 | def delete_degrees(): 19 | SchoolMajorAccount.query.filter_by(account_id=g.account.id).delete() 20 | sqldb.session.commit() 21 | return jsonify({"success": True}) 22 | 23 | 24 | def add_schools_and_majors(account, json_array): 25 | # Remove degrees in DB and replace with new ones (if any) 26 | SchoolMajorAccount.query.filter_by(account_id=account.id).delete() 27 | 28 | account_schools = [] 29 | for json in json_array: 30 | school_name = json.get("school_name") 31 | school_code = json.get("school_code") 32 | degree_name = json.get("degree_name") 33 | degree_code = json.get("degree_code") 34 | majors = json.get("majors") 35 | expected_grad = json.get("expected_grad_term") 36 | 37 | if school_name is None: 38 | raise KeyError("school_name is missing") 39 | if school_code is None: 40 | raise KeyError("school_code is missing") 41 | if degree_name is None: 42 | raise KeyError("degree_name is missing") 43 | if degree_code is None: 44 | raise KeyError("degree_code is missing") 45 | if majors is None: 46 | raise KeyError("majors is missing") 47 | if expected_grad is None: 48 | raise KeyError("expected_grad_term is missing") 49 | 50 | school = School.query.filter_by(name=school_name, code=school_code).first() 51 | if school is None: 52 | school = School(name=school_name, code=school_code) 53 | sqldb.session.add(school) 54 | sqldb.session.commit() 55 | 56 | degree = Degree.query.filter_by(code=degree_code).first() 57 | if degree is None: 58 | degree = Degree(name=degree_name, code=degree_code, school_id=school.id) 59 | sqldb.session.add(degree) 60 | sqldb.session.commit() 61 | 62 | if majors: 63 | for mJSON in majors: 64 | major_name = mJSON.get("major_name") 65 | major_code = mJSON.get("major_code") 66 | 67 | if major_name is None: 68 | raise KeyError("major_name is missing") 69 | if major_code is None: 70 | raise KeyError("major_code is missing") 71 | 72 | major = Major.query.filter_by(code=major_code).first() 73 | if major is None: 74 | major = Major(name=major_name, code=major_code, degree_code=degree_code) 75 | sqldb.session.add(major) 76 | sqldb.session.commit() 77 | 78 | asm = SchoolMajorAccount( 79 | account_id=account.id, 80 | school_id=school.id, 81 | major=major.code, 82 | expected_grad=expected_grad, 83 | ) 84 | account_schools.append(asm) 85 | else: 86 | asm = SchoolMajorAccount( 87 | account_id=account.id, school_id=school.id, major=None, expected_grad=expected_grad 88 | ) 89 | account_schools.append(asm) 90 | 91 | if account_schools: 92 | for asm in account_schools: 93 | sqldb.session.add(asm) 94 | sqldb.session.commit() 95 | -------------------------------------------------------------------------------- /server/account/settings.py: -------------------------------------------------------------------------------- 1 | from flask import g, jsonify 2 | 3 | from server import app 4 | from server.auth import auth 5 | from server.notifications import get_notification_settings 6 | from server.privacy import get_privacy_settings 7 | 8 | 9 | @app.route("/account/settings", methods=["GET"]) 10 | @auth() 11 | def get_account_settings(): 12 | notifSettings = get_notification_settings(g.account) 13 | privacySettings = get_privacy_settings(g.account) 14 | return jsonify({"notifications": notifSettings, "privacy": privacySettings, }) 15 | -------------------------------------------------------------------------------- /server/analytics.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from flask import jsonify, request 4 | 5 | from server import app, sqldb 6 | from server.models import Account, AnalyticsEvent, User 7 | 8 | 9 | @app.route("/feed/analytics", methods=["POST"]) 10 | def send_analytics(): 11 | try: 12 | user = User.get_user() 13 | except ValueError as e: 14 | return jsonify({"success": False, "error": str(e)}), 400 15 | 16 | try: 17 | account = Account.get_account() 18 | account_id = account.id 19 | except ValueError: 20 | account_id = None 21 | 22 | data = request.get_json() 23 | events = list(data) 24 | 25 | for event_json in events: 26 | timestamp_str = event_json.get("timestamp") 27 | 28 | # Some timestamps malformed as '2019-09-08T4:18:24.709 PM' 29 | if "AM" in timestamp_str or "PM" in timestamp_str: 30 | timestamp_str = timestamp_str.split(" ")[0] 31 | 32 | timestamp = datetime.datetime.strptime(timestamp_str, "%Y-%m-%dT%H:%M:%S.%f") 33 | type = event_json.get("cell_type") 34 | index = int(event_json.get("index")) 35 | post_id = event_json.get("id") 36 | flag = bool(event_json.get("is_interaction")) 37 | # if any(x == type for x in ['news', 'post']): 38 | # Only log news and post events 39 | event = AnalyticsEvent( 40 | user=user.id, 41 | account_id=account_id, 42 | timestamp=timestamp, 43 | type=type, 44 | index=index, 45 | post_id=post_id, 46 | is_interaction=flag, 47 | ) 48 | sqldb.session.add(event) 49 | sqldb.session.commit() 50 | 51 | return jsonify({"success": True, "error": None}) 52 | -------------------------------------------------------------------------------- /server/auth.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | from functools import wraps 4 | 5 | import requests 6 | from flask import g, jsonify, request 7 | from sqlalchemy import or_ 8 | 9 | from server import sqldb 10 | from server.models import Account, generate_uuid 11 | 12 | 13 | def auth(nullable=False): 14 | def _auth(f): 15 | @wraps(f) 16 | def __auth(*args, **kwargs): 17 | # Authorization headers are restricted on iOS and not allowed to be set. 18 | # Thus, iOS sets an X-Authorization header to carry the bearer token. 19 | # We check both the X-Authorization header and the regular Authorization header for the access token. 20 | # For more info: see https://developer.apple.com/documentation/foundation/nsurlrequest#1776617 21 | g.account = None 22 | x_authorization = request.headers.get("X-Authorization") 23 | authorization = request.headers.get("Authorization") 24 | if (authorization and " " in authorization) or ( 25 | x_authorization and " " in x_authorization 26 | ): 27 | auth_type, token = ( 28 | authorization.split() if authorization else x_authorization.split() 29 | ) 30 | if auth_type == "Bearer": # Only validate if Authorization header type is Bearer 31 | try: 32 | body = {"token": token} 33 | headers = {"Authorization": "Bearer {}".format(token)} 34 | data = requests.post( 35 | url="https://platform.pennlabs.org/accounts/introspect/", 36 | headers=headers, 37 | data=body, 38 | ) 39 | if data.status_code == 200: # Access token is valid 40 | data = data.json() 41 | account = Account.query.filter_by(pennid=data["user"]["pennid"]).first() 42 | if account: 43 | g.account = account 44 | return f() 45 | else: 46 | return ( 47 | f() 48 | if nullable 49 | else (jsonify({"error": "Account not found."}), 400) 50 | ) 51 | else: 52 | return f() if nullable else (jsonify({"error": "Invalid token"}), 401) 53 | except requests.exceptions.RequestException: # Can't connect to platform 54 | # Throw a 403 because we can't verify the incoming access token so we 55 | # treat it as invalid. Ideally platform will never go down, so this 56 | # should never happen. 57 | return ( 58 | f() 59 | if nullable 60 | else (jsonify({"error": "Unable to connect to Platform"}), 401) 61 | ) 62 | else: 63 | return ( 64 | f() 65 | if nullable 66 | else (jsonify({"error": "Authorization token type is not Bearer."}), 401) 67 | ) 68 | else: 69 | return ( 70 | f() 71 | if nullable 72 | else (jsonify({"error": "An access token was not provided."}), 401) 73 | ) 74 | 75 | return __auth 76 | 77 | return _auth 78 | 79 | 80 | def internal_auth(f): 81 | @wraps(f) 82 | def _internal_auth(*args, **kwargs): 83 | authorization = request.headers.get("Authorization") 84 | if authorization and " " in authorization: 85 | auth_type, token = authorization.split() 86 | if auth_type == "Bearer" and token == os.environ.get("AUTH_SECRET"): 87 | return f() 88 | else: 89 | return jsonify({"error": "Auth secret is not correct."}), 401 90 | else: 91 | return jsonify({"error": "Auth secret not provided."}), 401 92 | 93 | return _internal_auth 94 | 95 | 96 | class AnonymousID(sqldb.Model): 97 | __tablename__ = "anonymous_id" 98 | 99 | id = sqldb.Column(sqldb.VARCHAR(255), primary_key=True, default=generate_uuid) 100 | device_key = sqldb.Column(sqldb.VARCHAR(255)) 101 | password_hash = sqldb.Column(sqldb.VARCHAR(255)) 102 | type = sqldb.Column(sqldb.VARCHAR(255)) 103 | created_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 104 | updated_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 105 | 106 | 107 | def anonymous_auth(f): 108 | @wraps(f) 109 | def _anonymous_auth(*args, **kwargs): 110 | device_key = request.headers.get("X-Device-Key") 111 | password_hash = request.headers.get("X-Password-Hash") 112 | data_type = request.headers.get("X-Data-Type") 113 | 114 | if not device_key or not password_hash or not data_type: 115 | return ( 116 | jsonify( 117 | {"error": "Missing header X-Device-Key or X-Password-Hash or X-Data-Type."} 118 | ), 119 | 400, 120 | ) 121 | 122 | anonymous_id = ( 123 | AnonymousID.query.filter(AnonymousID.type == data_type) 124 | .filter( 125 | or_( 126 | AnonymousID.password_hash == password_hash, AnonymousID.device_key == device_key 127 | ) 128 | ) 129 | .first() 130 | ) 131 | if anonymous_id: 132 | # If device key or password hash has changed, update them 133 | if anonymous_id.device_key != device_key: 134 | anonymous_id.device_key = device_key 135 | anonymous_id.updated_at = datetime.now() 136 | sqldb.session.commit() 137 | elif anonymous_id.password_hash != password_hash: 138 | anonymous_id.password_hash = password_hash 139 | anonymous_id.updated_at = datetime.now() 140 | sqldb.session.commit() 141 | else: 142 | anonymous_id = AnonymousID( 143 | device_key=device_key, password_hash=password_hash, type=data_type 144 | ) 145 | sqldb.session.add(anonymous_id) 146 | sqldb.session.commit() 147 | 148 | g.anonymous_id = anonymous_id.id 149 | return f() 150 | 151 | return _anonymous_auth 152 | -------------------------------------------------------------------------------- /server/base.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | 4 | from flask import jsonify, make_response 5 | 6 | from server import db 7 | 8 | 9 | def cached_route(redis_key, td, func): 10 | data = cache_get(redis_key, td, func) 11 | secs = int(db.ttl(redis_key)) 12 | return make_response(jsonify(data), 200, {"Cache-Control": "max-age=%d" % secs}) 13 | 14 | 15 | def cache_get(redis_key, td, func): 16 | if db.exists(redis_key): 17 | return json.loads(db.get(redis_key).decode("utf8")) 18 | else: 19 | data = func() 20 | db.set(redis_key, json.dumps(data)) 21 | db.pexpireat(redis_key, datetime.datetime.now() + td) 22 | return data 23 | -------------------------------------------------------------------------------- /server/buildings.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from flask import json, jsonify, request 4 | 5 | from server import app, db 6 | from server.base import cached_route 7 | from server.penndata import map_search 8 | 9 | 10 | @app.route("/buildings/", methods=["GET"]) 11 | def building(building_code): 12 | if db.exists("buildings:%s" % (building_code)): 13 | building_info = db.get("buildings:%s" % (building_code)).decode("utf8") 14 | return jsonify(json.loads(building_info)) 15 | else: 16 | return None 17 | 18 | 19 | @app.route("/buildings/search", methods=["GET"]) 20 | def building_search(): 21 | search_query = request.args["q"] 22 | td = datetime.timedelta(days=30) 23 | 24 | def get_data(): 25 | data = map_search.search(search_query) 26 | if data is None: 27 | return {"Error": "The search query could not be processed"} 28 | else: 29 | return data 30 | 31 | return cached_route("building_search:%s" % search_query, td, get_data) 32 | -------------------------------------------------------------------------------- /server/calendar3year.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import re 3 | 4 | from flask import jsonify 5 | from pytz import timezone 6 | 7 | from server import app 8 | from server.base import cache_get 9 | from server.penndata import calendar 10 | 11 | 12 | def pull_calendar(d): 13 | """Pulls the calendar from the Penn website and 14 | filters out which events are 2 weeks away from date d. 15 | 16 | :param d: date object that specifies the date 17 | """ 18 | pulled_calendar = cache_get("calendar:3year", datetime.timedelta(weeks=1), calendar.pull_3year) 19 | within_range = [] 20 | for event in pulled_calendar: 21 | start = event["end"] 22 | event_date = datetime.datetime.strptime(start, "%Y-%m-%d").date() 23 | time_diff = event_date - d 24 | if time_diff.total_seconds() > 0 and time_diff.total_seconds() <= 1209600: 25 | event["name"] = re.split( 26 | "Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday", event["name"] 27 | )[0].strip() 28 | event["name"] = re.split(r"\($", event["name"])[0].strip() 29 | event["name"] = event["name"].replace("\\", "") 30 | if "Advance Registration" in event["name"]: 31 | event["name"] = "Advance Registration" 32 | within_range.append(event) 33 | return within_range 34 | 35 | 36 | def pull_calendar_response(d): 37 | calendar = pull_calendar(d) 38 | return jsonify({"calendar": calendar}) 39 | 40 | 41 | def pull_todays_calendar(): 42 | """Returns array of events which are 2 weeks away 43 | from today 44 | """ 45 | est = timezone("EST") 46 | now = datetime.datetime.now(est) 47 | today = now.date() 48 | return pull_calendar(today) 49 | 50 | 51 | @app.route("/calendar/", methods=["GET"]) 52 | def pull_today(): 53 | """Returns JSON object with all events 2 weeks from the 54 | current date. 55 | """ 56 | est = timezone("EST") 57 | now = datetime.datetime.now(est) 58 | today = now.date() 59 | # return pull_calendar_response(today) 60 | return jsonify({"calendar": []}) 61 | 62 | 63 | @app.route("/calendar/", methods=["GET"]) 64 | def pull_date(date): 65 | """Return JSON object with all events 2 weeks from the 66 | date passed in as an argument. 67 | """ 68 | d = datetime.datetime.strptime(date, "%Y-%m-%d").date() 69 | return pull_calendar_response(d) 70 | -------------------------------------------------------------------------------- /server/dining/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/server/dining/__init__.py -------------------------------------------------------------------------------- /server/dining/balance.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pandas as pd 4 | from bs4 import BeautifulSoup 5 | from flask import g, jsonify, request 6 | 7 | from server import app 8 | from server.auth import auth 9 | from server.models import Account, DiningBalance, sqldb 10 | from server.penndata import wharton 11 | 12 | 13 | @app.route("/dining/balance", methods=["POST"]) 14 | @auth() 15 | def save_dining_balance(): 16 | html = request.form.get("html") 17 | if "You are not currently signed up" in html: 18 | return jsonify({"hasPlan": False, "balance": None, "error": None}) 19 | 20 | soup = BeautifulSoup(html, "html.parser") 21 | divs = soup.findAll("div", {"class": "info-column"}) 22 | dollars = None 23 | swipes = None 24 | guest_swipes = None 25 | added_swipes = None 26 | if len(divs) >= 4: 27 | for div in divs[:4]: 28 | if "Dining Dollars" in div.text: 29 | dollars = float(div.span.text[1:]) 30 | elif "Regular Visits" in div.text: 31 | swipes = int(div.span.text) 32 | elif "Guest Visits" in div.text: 33 | guest_swipes = int(div.span.text) 34 | elif "Add-on Visits" in div.text: 35 | added_swipes = int(div.span.text) 36 | else: 37 | return jsonify({"success": False, "error": "Something went wrong parsing HTML."}), 400 38 | 39 | total_swipes = swipes + added_swipes 40 | dining_balance = DiningBalance( 41 | account_id=g.account.id, 42 | dining_dollars=dollars, 43 | swipes=total_swipes, 44 | guest_swipes=guest_swipes, 45 | ) 46 | sqldb.session.add(dining_balance) 47 | sqldb.session.commit() 48 | 49 | balance = {"dollars": dollars, "swipes": total_swipes, "guest_swipes": guest_swipes} 50 | return jsonify({"hasPlan": True, "balance": balance, "error": None}) 51 | 52 | 53 | @app.route("/dining/balance", methods=["GET"]) 54 | @auth(nullable=False) 55 | def get_dining_balance(): 56 | account = g.account 57 | if not account: 58 | # DEPRECATED 59 | try: 60 | account = Account.get_account() 61 | except ValueError as e: 62 | return jsonify({"success": False, "error": str(e)}), 400 63 | 64 | dining_balance = ( 65 | DiningBalance.query.filter_by(account_id=account.id) 66 | .order_by(DiningBalance.created_at.desc()) 67 | .first() 68 | ) 69 | 70 | if dining_balance: 71 | dining_dollars = dining_balance.dining_dollars 72 | swipes = dining_balance.swipes 73 | guest_swipes = dining_balance.guest_swipes 74 | created_at = dining_balance.created_at 75 | timestamp = created_at.strftime("%Y-%m-%dT%H:%M:%S") + "-{}".format( 76 | wharton.get_dst_gmt_timezone() 77 | ) 78 | 79 | return jsonify( 80 | { 81 | "balance": { 82 | "dining_dollars": dining_dollars, 83 | "swipes": swipes, 84 | "guest_swipes": guest_swipes, 85 | "timestamp": timestamp, 86 | } 87 | } 88 | ) 89 | else: 90 | return jsonify({"balance": None}) 91 | 92 | 93 | @app.route("/dining/balances", methods=["GET"]) 94 | @auth() 95 | def get_average_balances_by_day(): 96 | start_date_str = request.args.get("start_date") 97 | end_date_str = request.args.get("end_date") 98 | 99 | if start_date_str and end_date_str: 100 | start_date = datetime.datetime.strptime(start_date_str, "%Y-%m-%d") 101 | end_date = datetime.datetime.strptime(end_date_str, "%Y-%m-%d") 102 | dining_balance = DiningBalance.query.filter_by(account_id=g.account.id).filter( 103 | DiningBalance.created_at >= start_date, DiningBalance.created_at <= end_date 104 | ) 105 | else: 106 | dining_balance = DiningBalance.query.filter_by(account_id=g.account.id) 107 | 108 | balance_array = [] 109 | if dining_balance: 110 | for balance in dining_balance: 111 | balance_array.append( 112 | { 113 | "dining_dollars": balance.dining_dollars, 114 | "swipes": balance.swipes, 115 | "guest_swipes": balance.guest_swipes, 116 | "timestamp": balance.created_at.strftime("%Y-%m-%d"), 117 | } 118 | ) 119 | 120 | df = pd.DataFrame(balance_array).groupby("timestamp").agg(lambda x: x.mean()).reset_index() 121 | return jsonify({"balance": df.to_dict("records")}) 122 | 123 | return jsonify({"balance": None}) 124 | 125 | 126 | @app.route("/dining/projection", methods=["GET"]) 127 | @auth() 128 | def get_dining_projection(): 129 | dining_balance = DiningBalance.query.filter_by(account_id=g.account.id) 130 | date = request.args.get("date") 131 | 132 | if date: 133 | date = datetime.datetime.strptime(date, "%Y-%m-%d").date() 134 | else: 135 | today = datetime.date.today() 136 | month = int(today.strftime("%m")) 137 | if month <= 5: 138 | date = today.replace(month=5, day=5) 139 | elif month <= 8: 140 | date = today.replace(month=8, day=20) 141 | else: 142 | date = today.replace(month=12, day=15) 143 | 144 | balance_array = [] 145 | if dining_balance: 146 | for balance in dining_balance: 147 | balance_array.append( 148 | { 149 | "dining_dollars": balance.dining_dollars, 150 | "swipes": balance.swipes, 151 | "timestamp": balance.created_at.strftime("%Y-%m-%d"), 152 | } 153 | ) 154 | 155 | df = pd.DataFrame(balance_array).groupby("timestamp").agg(lambda x: x.mean()).reset_index() 156 | 157 | if df.iloc[-1, 0] == 0.0 and df.iloc[-1, 1] == 0.0: 158 | return jsonify( 159 | { 160 | "projection": { 161 | "swipes_day_left": 0.0, 162 | "dining_dollars_day_left": 0.0, 163 | "swipes_left_on_date": 0.0, 164 | "dollars_left_on_date": 0.0, 165 | } 166 | } 167 | ) 168 | 169 | df["timestamp"] = pd.to_datetime(df["timestamp"], format="%Y-%m-%d") 170 | 171 | last_day_before_sem = df[(df["dining_dollars"] == 0) & (df["swipes"] == 0)] 172 | if last_day_before_sem.any().any(): 173 | last_zero_timestamp = last_day_before_sem.tail(1).iloc[0]["timestamp"] 174 | df = df[df["timestamp"] > last_zero_timestamp] 175 | 176 | if len(df.index) <= 5: 177 | return jsonify({"success": False, "error": "Insufficient previous transactions"}), 501 178 | 179 | num_days = abs((df.tail(1).iloc[0]["timestamp"] - df.head(1).iloc[0]["timestamp"]).days) + 1 180 | num_swipes = df.head(1).iloc[0]["swipes"] - df.tail(1).iloc[0]["swipes"] 181 | num_dollars = df.head(1).iloc[0]["dining_dollars"] - df.tail(1).iloc[0]["dining_dollars"] 182 | swipes_per_day = num_swipes / num_days 183 | dollars_per_day = num_dollars / num_days 184 | 185 | swipe_days_left = df.tail(1).iloc[0]["swipes"] / swipes_per_day if num_swipes else 0.0 186 | dollars_days_left = ( 187 | df.tail(1).iloc[0]["dining_dollars"] / dollars_per_day if num_dollars else 0.0 188 | ) 189 | 190 | day_difference = abs((date - datetime.date.today()).days) + 1 191 | swipes_left = ( 192 | df.tail(1).iloc[0]["swipes"] - (swipes_per_day * day_difference) if num_swipes else 0.0 193 | ) 194 | dollars_left = ( 195 | df.tail(1).iloc[0]["dining_dollars"] - (dollars_per_day * day_difference) 196 | if num_dollars 197 | else 0.0 198 | ) 199 | 200 | if swipes_left <= 0: 201 | swipes_left = 0.0 202 | if dollars_left <= 0: 203 | dollars_left = 0.0 204 | 205 | return jsonify( 206 | { 207 | "projection": { 208 | "swipes_day_left": swipe_days_left, 209 | "dining_dollars_day_left": dollars_days_left, 210 | "swipes_left_on_date": swipes_left, 211 | "dollars_left_on_date": dollars_left, 212 | } 213 | } 214 | ) 215 | 216 | return jsonify({"projection": None}) 217 | 218 | 219 | # DEPRECATED 220 | @app.route("/dining/balance/v2", methods=["POST"]) 221 | def parse_and_save_dining_balance(): 222 | try: 223 | account = Account.get_account() 224 | except ValueError as e: 225 | return jsonify({"success": False, "error": str(e)}), 400 226 | 227 | html = request.form.get("html") 228 | if "You are not currently signed up" in html: 229 | return jsonify({"hasPlan": False, "balance": None, "error": None}) 230 | 231 | soup = BeautifulSoup(html, "html.parser") 232 | divs = soup.findAll("div", {"class": "info-column"}) 233 | dollars = None 234 | swipes = None 235 | guest_swipes = None 236 | added_swipes = None 237 | if len(divs) >= 4: 238 | for div in divs[:4]: 239 | if "Dining Dollars" in div.text: 240 | dollars = float(div.span.text[1:]) 241 | elif "Regular Visits" in div.text: 242 | swipes = int(div.span.text) 243 | elif "Guest Visits" in div.text: 244 | guest_swipes = int(div.span.text) 245 | elif "Add-on Visits" in div.text: 246 | added_swipes = int(div.span.text) 247 | else: 248 | return jsonify({"success": False, "error": "Something went wrong parsing HTML."}), 400 249 | 250 | total_swipes = swipes + added_swipes 251 | dining_balance = DiningBalance( 252 | account_id=account.id, 253 | dining_dollars=dollars, 254 | swipes=total_swipes, 255 | guest_swipes=guest_swipes, 256 | ) 257 | sqldb.session.add(dining_balance) 258 | sqldb.session.commit() 259 | 260 | balance = {"dollars": dollars, "swipes": total_swipes, "guest_swipes": guest_swipes} 261 | return jsonify({"hasPlan": True, "balance": balance, "error": None}) 262 | -------------------------------------------------------------------------------- /server/dining/diningImages.csv: -------------------------------------------------------------------------------- 1 | id,imageURL 2 | 593,https://s3.us-east-2.amazonaws.com/labs.api/dining/1920-commons.jpg 3 | 636,https://s3.us-east-2.amazonaws.com/labs.api/dining/Hill+House.jpg 4 | 637,https://s3.us-east-2.amazonaws.com/labs.api/dining/kceh.jpg 5 | 638,https://s3.us-east-2.amazonaws.com/labs.api/dining/hillel.jpg 6 | 639,https://s3.us-east-2.amazonaws.com/labs.api/dining/houston.jpg 7 | 642,https://s3.us-east-2.amazonaws.com/labs.api/dining/Penn.JoesCafe-Int5.72W.jpg 8 | 1442,https://s3.us-east-2.amazonaws.com/labs.api/dining/nch.jpg 9 | 747,https://s3.us-east-2.amazonaws.com/labs.api/dining/mcclelland.jpg 10 | 1057,https://s3.us-east-2.amazonaws.com/labs.api/dining/gourmetgrocer.jpg 11 | 1163,https://s3.us-east-2.amazonaws.com/labs.api/dining/starbucks.jpg 12 | 1731,https://s3.us-east-2.amazonaws.com/labs.api/dining/nch.jpg 13 | 1732,https://s3.us-east-2.amazonaws.com/labs.api/dining/MBA+Cafe.jpg 14 | 1733,https://s3.us-east-2.amazonaws.com/labs.api/dining/Pret+A+Manger.jpg 15 | 641,https://s3.us-east-2.amazonaws.com/labs.api/dining/accenture.png 16 | -------------------------------------------------------------------------------- /server/dining/diningRedis.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import json 3 | import os 4 | 5 | import redis 6 | 7 | from server import app 8 | 9 | 10 | db = redis.StrictRedis().from_url(app.config["REDIS_URL"]) 11 | with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "diningImages.csv")) as f: 12 | reader = csv.DictReader(f) 13 | for row in reader: 14 | db.set("venue:%s" % (row["id"]), json.dumps(row["imageURL"])) 15 | -------------------------------------------------------------------------------- /server/dining/hours_menus.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from server import app, db 4 | from server.base import cached_route 5 | from server.penndata import din, dinV2 6 | 7 | 8 | @app.route("/dining/v2/venues", methods=["GET"]) 9 | def retrieve_venues_v2(): 10 | def get_data(): 11 | return dinV2.venues()["result_data"] 12 | 13 | # Cache the result for 24 hours 14 | td = datetime.timedelta(days=1) 15 | return cached_route("dining:v2:venues", td, get_data) 16 | 17 | 18 | @app.route("/dining/v2/menu//", methods=["GET"]) 19 | def retrieve_menu_v2(venue_id, date): 20 | def get_data(): 21 | return dinV2.menu(venue_id, date)["result_data"] 22 | 23 | # Cache the result for 24 hours 24 | td = datetime.timedelta(days=1) 25 | return cached_route("dining:v2:menu:%s:%s" % (venue_id, date), td, get_data) 26 | 27 | 28 | @app.route("/dining/v2/item/", methods=["GET"]) 29 | def retrieve_item_v2(item_id): 30 | def get_data(): 31 | return dinV2.item(item_id)["result_data"] 32 | 33 | # Cache the result for 24 hours 34 | td = datetime.timedelta(days=1) 35 | return cached_route("dining:v2:item:%s" % item_id, td, get_data) 36 | 37 | 38 | @app.route("/dining/venues", methods=["GET"]) 39 | def retrieve_venues(): 40 | def get_data(): 41 | def sortByStart(elem): 42 | return elem["open"] 43 | 44 | json = din.venues()["result_data"] 45 | venues = json["document"]["venue"] 46 | for venue in venues: 47 | days = venue.get("dateHours") 48 | if days: 49 | for day in days: 50 | meals = day["meal"] 51 | new_meals = [] 52 | for meal in meals: 53 | if ( 54 | venue["name"] == "English House" 55 | and day["date"] <= "2020-03-13" 56 | and meal["type"] == "Lunch" 57 | ): 58 | # Hack to fix English House hours during Spring Break 2020 because Bon Appetit won't do it 59 | # THIS SHOULD BE REMOVED AFTER SPRING BREAK 60 | continue 61 | new_meals.append(meal) 62 | new_meals.sort(key=sortByStart) 63 | day["meal"] = new_meals 64 | 65 | imageUrlJSON = db.get("venue:%s" % (str(venue["id"]))) 66 | if imageUrlJSON: 67 | venue["imageURL"] = imageUrlJSON.decode("utf8").replace('"', "") 68 | else: 69 | venue["imageURL"] = None 70 | return json 71 | 72 | # Cache the result for 24 hours 73 | # TEMPORARILY CHANGED CACHE TO 15 MINUTES WHILE BON APPETIT WORKS TO FIX API 74 | td = datetime.timedelta(minutes=15) 75 | return cached_route("dining:venues", td, get_data) 76 | 77 | 78 | @app.route("/dining/hours/", methods=["GET"]) 79 | def retrieve_hours(venue_id): 80 | def get_data(): 81 | return dinV2.hours(venue_id)["result_data"] 82 | 83 | # Cache the result for 24 hours 84 | td = datetime.timedelta(days=1) 85 | return cached_route("dining:v2:hours:%s" % venue_id, td, get_data) 86 | 87 | 88 | @app.route("/dining/weekly_menu/", methods=["GET"]) 89 | def retrieve_weekly_menu(venue_id): 90 | # Cache the result for 24 hours 91 | td = datetime.timedelta(days=1) 92 | 93 | def get_data(): 94 | menu = din.menu_weekly(venue_id) 95 | return menu["result_data"] 96 | 97 | return cached_route("dining:venues:weekly:%s" % venue_id, td, get_data) 98 | 99 | 100 | @app.route("/dining/daily_menu/", methods=["GET"]) 101 | def retrieve_daily_menu(venue_id): 102 | now = datetime.datetime.today() 103 | end_time = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(hours=4) 104 | 105 | def get_data(): 106 | return din.menu_daily(venue_id)["result_data"] 107 | 108 | return cached_route("dining:venues:daily:%s" % venue_id, end_time - now, get_data) 109 | -------------------------------------------------------------------------------- /server/dining/preferences.py: -------------------------------------------------------------------------------- 1 | from flask import g, jsonify, request 2 | from sqlalchemy import func 3 | 4 | from server import app, sqldb 5 | from server.auth import auth 6 | from server.models import DiningPreference, User 7 | 8 | 9 | @app.route("/dining/preferences", methods=["POST"]) 10 | @auth(nullable=True) 11 | def save_dining_preferences(): 12 | try: 13 | user = User.get_or_create() 14 | except ValueError as e: 15 | return jsonify({"success": False, "error": str(e)}) 16 | 17 | venues = request.form.get("venues") 18 | 19 | # delete old preferences for user 20 | DiningPreference.query.filter_by(user_id=user.id).delete() 21 | 22 | if venues: 23 | venue_ids = [int(x) for x in venues.split(",")] 24 | 25 | account_id = g.account.id if g.account else None 26 | for venue_id in venue_ids: 27 | dining_preference = DiningPreference( 28 | user_id=user.id, account=account_id, venue_id=venue_id 29 | ) 30 | sqldb.session.add(dining_preference) 31 | sqldb.session.commit() 32 | 33 | return jsonify({"success": True, "error": None}) 34 | 35 | 36 | @app.route("/dining/preferences", methods=["GET"]) 37 | def get_dining_preferences(): 38 | try: 39 | user = User.get_or_create() 40 | except ValueError: 41 | return jsonify({"preferences": []}) 42 | 43 | preferences = ( 44 | sqldb.session.query(DiningPreference.venue_id, func.count(DiningPreference.venue_id)) 45 | .filter_by(user_id=user.id) 46 | .group_by(DiningPreference.venue_id) 47 | .all() 48 | ) 49 | preference_arr = [{"venue_id": x[0], "count": x[1]} for x in preferences] 50 | return jsonify({"preferences": preference_arr}) 51 | -------------------------------------------------------------------------------- /server/dining/transactions.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import datetime 3 | 4 | from flask import g, jsonify, request 5 | 6 | from server import app, sqldb 7 | from server.auth import auth 8 | from server.models import Account, DiningTransaction 9 | 10 | 11 | @app.route("/dining/transactions", methods=["POST"]) 12 | @auth() 13 | def save_dining_dollar_transactions(): 14 | account = g.account 15 | if not account: 16 | # DEPRECATED 17 | try: 18 | account = Account.get_account() 19 | except ValueError as e: 20 | return jsonify({"success": False, "error": str(e)}), 400 21 | 22 | last_transaction = ( 23 | sqldb.session.query(DiningTransaction.date) 24 | .filter_by(account_id=account.id) 25 | .order_by(DiningTransaction.date.desc()) 26 | .first() 27 | ) 28 | 29 | decoded_content = request.form.get("transactions") 30 | cr = csv.reader(decoded_content.splitlines(), delimiter=",") 31 | 32 | # Create list of rows, remove headers, and reverse so in order of date 33 | row_list = list(cr) 34 | row_list.pop(0) 35 | row_list.reverse() 36 | 37 | for row in row_list: 38 | if len(row) == 4: 39 | if row[0] == "No transaction history found for this date range.": 40 | continue 41 | else: 42 | date = datetime.datetime.strptime(row[0], "%m/%d/%Y %I:%M%p") 43 | if last_transaction is None or date > last_transaction.date: 44 | transaction = DiningTransaction( 45 | account_id=account.id, 46 | date=date, 47 | description=row[1], 48 | amount=float(row[2]), 49 | balance=float(row[3]), 50 | ) 51 | sqldb.session.add(transaction) 52 | sqldb.session.commit() 53 | 54 | return jsonify({"success": True, "error": None}) 55 | 56 | 57 | @app.route("/dining/transactions", methods=["GET"]) 58 | @auth(nullable=True) 59 | def get_dining_dollar_transactions(): 60 | account = g.account 61 | if not account: 62 | # DEPRECATED 63 | try: 64 | account = Account.get_account() 65 | except ValueError as e: 66 | return jsonify({"success": False, "error": str(e)}), 400 67 | 68 | transactions = ( 69 | sqldb.session.query(DiningTransaction) 70 | .filter_by(account_id=account.id) 71 | .order_by(DiningTransaction.date.desc()) 72 | ) 73 | 74 | results = [] 75 | 76 | for transaction in transactions: 77 | date = datetime.datetime.strftime(transaction.date, "%Y-%m-%dT%H:%M:%S") 78 | results.append( 79 | { 80 | "date": date, 81 | "description": transaction.description, 82 | "amount": transaction.amount, 83 | "balance": transaction.balance, 84 | } 85 | ) 86 | 87 | return jsonify({"results": results}) 88 | -------------------------------------------------------------------------------- /server/directory.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from flask import jsonify, request 4 | 5 | from server import app 6 | from server.base import cached_route 7 | from server.penndata import penn_dir 8 | 9 | 10 | @app.route("/directory/search", methods=["GET"]) 11 | def detail_search(): 12 | if "name" not in request.args: 13 | return jsonify({"error": "Please specify search parameters in the query string"}) 14 | 15 | name = request.args["name"] 16 | 17 | def get_data(): 18 | 19 | arr = name.split() 20 | params = [] 21 | 22 | if len(arr) > 1: 23 | 24 | if arr[0][-1] == ",": 25 | params = [{"last_name": arr[0][:-1], "first_name": arr[1]}] 26 | else: 27 | params = [ 28 | {"last_name": arr[-1], "first_name": arr[0]}, 29 | {"last_name": arr[0], "first_name": arr[-1]}, 30 | ] 31 | else: 32 | params = [{"last_name": name}, {"first_name": name}] 33 | 34 | ids = set() 35 | final = [] 36 | for param in params: 37 | param["affiliation"] = "FAC" 38 | for param in params: 39 | data = penn_dir.search(param) 40 | for result in data["result_data"]: 41 | person_id = result["person_id"] 42 | if person_id not in ids: 43 | final.append(result) 44 | ids.add(person_id) 45 | 46 | return {"result_data": final} 47 | 48 | td = datetime.timedelta(days=30) 49 | return cached_route("directory:search:%s" % name, td, get_data) 50 | 51 | 52 | @app.route("/directory/person/", methods=["GET"]) 53 | def person_details(person_id): 54 | td = datetime.timedelta(days=30) 55 | 56 | def get_data(): 57 | return penn_dir.person_details(person_id)["result_data"][0] 58 | 59 | return cached_route("directory:person:%s" % person_id, td, get_data) 60 | -------------------------------------------------------------------------------- /server/event.py: -------------------------------------------------------------------------------- 1 | import pytz 2 | from flask import jsonify 3 | 4 | from server import app 5 | from server.models import Event 6 | 7 | 8 | utc = pytz.timezone("UTC") 9 | eastern = pytz.timezone("US/Eastern") 10 | 11 | 12 | @app.route("/events/", methods=["GET"]) 13 | def get_events(type): 14 | events = Event.query.filter_by(type=type) 15 | 16 | events_dict = [ 17 | { 18 | "name": x.name, 19 | "description": x.description, 20 | "image_url": x.image_url, 21 | "start_time": utc.localize(x.start_time).astimezone(eastern).isoformat(), 22 | "end_time": utc.localize(x.end_time).astimezone(eastern).isoformat(), 23 | "email": x.email, 24 | "website": x.website, 25 | "facebook": x.facebook, 26 | } 27 | for x in events 28 | ] 29 | 30 | return jsonify({"events": events_dict}) 31 | -------------------------------------------------------------------------------- /server/fitness.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from server import app 4 | from server.base import cached_route 5 | from server.penndata import fitness 6 | 7 | 8 | @app.route("/fitness/usage", methods=["GET"]) 9 | def fitness_usage(): 10 | def get_data(): 11 | return {"results": fitness.get_usage()} 12 | 13 | td = datetime.timedelta(minutes=30) 14 | return cached_route("fitness:usage", td, get_data) 15 | 16 | 17 | @app.route("/fitness/schedule", methods=["GET"]) 18 | def fitness_schedule(): 19 | def get_data(): 20 | return {"schedule": fitness.get_schedule()} 21 | 22 | td = datetime.timedelta(hours=1) 23 | return cached_route("fitness:schedule", td, get_data) 24 | -------------------------------------------------------------------------------- /server/homepage.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import requests 4 | from flask import g, jsonify, request 5 | from penn.base import APIError 6 | from sqlalchemy import and_, func 7 | 8 | from server import app, sqldb 9 | from server.account.courses import get_courses_in_N_days, get_todays_courses 10 | from server.auth import auth 11 | from server.base import cache_get 12 | from server.calendar3year import pull_todays_calendar 13 | from server.models import Account, DiningPreference, HomeCell, LaundryPreference, User 14 | from server.news import fetch_frontpage_article 15 | from server.portal.posts import get_posts_for_account 16 | from server.studyspaces.groups import get_invites_for_account 17 | from server.studyspaces.models import StudySpacesBooking 18 | from server.studyspaces.reservations import get_reservations 19 | 20 | 21 | @app.route("/homepage", methods=["GET"]) 22 | @auth(nullable=True) 23 | def get_homepage(): 24 | # Find user in database 25 | try: 26 | user = User.get_or_create() 27 | except ValueError as e: 28 | response = jsonify({"err": [str(e)]}) 29 | response.status_code = 400 30 | return response 31 | 32 | account = g.account 33 | if not account: 34 | try: 35 | account = Account.get_account() 36 | except ValueError: 37 | account = None 38 | 39 | if account and account.email and user.email is None: 40 | user.email = account.email 41 | sqldb.session.commit() 42 | 43 | cells = [] 44 | 45 | sessionid = request.args.get("sessionid") 46 | reservations_cell = get_reservations_cell(user, sessionid) 47 | if reservations_cell: 48 | cells.append(reservations_cell) 49 | 50 | # if account and account.is_student(): 51 | # courses = get_courses_cell(account) 52 | # if courses: 53 | # cells.append(courses) 54 | 55 | courses = request.args.get("hasCourses") 56 | if courses == "today": 57 | cells.append(HomeCell("courses", None, 200)) 58 | elif courses == "tomorrow": 59 | cells.append(HomeCell("courses", None, 30)) 60 | 61 | laundry = get_top_laundry_cell(user) 62 | dining = get_dining_cell(user) 63 | cells.extend([dining, laundry]) 64 | 65 | gsr_locations = get_gsr_locations_cell(user, account) 66 | cells.append(gsr_locations) 67 | 68 | version = request.args.get("version") 69 | if version and version <= get_current_version(): 70 | update_cell = HomeCell("new-version-released", None, 10000) 71 | cells.append(update_cell) 72 | 73 | calendar = get_university_event_cell() 74 | if calendar: 75 | cells.append(calendar) 76 | 77 | news = get_news_cell() 78 | if news: 79 | cells.append(news) 80 | 81 | feature = get_feature_announcement_cell() 82 | if feature: 83 | cells.append(feature) 84 | 85 | posts = get_post_cells(account) 86 | if posts: 87 | cells.extend(posts) 88 | 89 | groups_enabled = request.args.get("groupsEnabled") 90 | if groups_enabled: 91 | group_invites = get_group_invite_cell(account) 92 | if group_invites: 93 | cells.append(group_invites) 94 | 95 | cells.sort(key=lambda x: x.weight, reverse=True) 96 | 97 | response = jsonify({"cells": [x.getCell() for x in cells]}) 98 | response.status_code = 200 99 | return response 100 | 101 | 102 | def get_dining_cell(user): 103 | # returns a dining cell 104 | preferences = sqldb.session.query(DiningPreference.venue_id).filter_by(user_id=user.id) 105 | venue_ids = [x.venue_id for x in preferences] 106 | defaults_ids = [593, 1442, 636] 107 | if len(venue_ids) == 0: 108 | venue_ids = defaults_ids 109 | elif len(venue_ids) == 1: 110 | venue_ids.extend(defaults_ids) 111 | venue_ids = list(set(venue_ids))[:3] 112 | 113 | info = {"venues": venue_ids} 114 | return HomeCell("dining", info, 100) 115 | 116 | 117 | def get_laundry_cells(user): 118 | # returns a list of laundry cells 119 | preferences = LaundryPreference.query.filter_by(user_id=user.id) 120 | room_ids = [x.room_id for x in preferences] 121 | 122 | # If the user has no preferences, select Bishop White 123 | if not room_ids: 124 | room_ids.append(0) 125 | 126 | return [HomeCell("laundry", {"room_id": x}) for x in room_ids] 127 | 128 | 129 | def get_top_laundry_cell(user): 130 | # returns user's top laundry cell 131 | top_preference = LaundryPreference.query.filter_by(user_id=user.id).first() 132 | # If no top choice, select bishop white 133 | if top_preference: 134 | return HomeCell("laundry", {"room_id": top_preference.room_id}, 5) 135 | return HomeCell("laundry", {"room_id": 0}, 5) 136 | 137 | 138 | def get_gsr_locations_cell(user, account): 139 | # returns a gsr cell with list of locations 140 | # if student is a Wharton student, show at the top 141 | top_gsrs_query = ( 142 | sqldb.session.query(StudySpacesBooking.lid) 143 | .filter(and_(StudySpacesBooking.user == user.id, StudySpacesBooking.lid.isnot(None))) 144 | .group_by(StudySpacesBooking.lid) 145 | .order_by(func.count(StudySpacesBooking.lid).desc()) 146 | .limit(2) 147 | .all() 148 | ) 149 | preferences = [x for (x,) in top_gsrs_query] 150 | 151 | showHuntsman = account is None or account.email is None or "wharton" in account.email 152 | if showHuntsman: 153 | default_gids = [1, 1086] 154 | weighting = 300 155 | else: 156 | default_gids = [1086, 2587] 157 | weighting = 10 158 | 159 | # Remove duplicates while retaining relative ordering 160 | def f7(seq): 161 | seen = set() 162 | seen_add = seen.add 163 | return [x for x in seq if not (x in seen or seen_add(x))] 164 | 165 | gids = f7(preferences + default_gids)[:2] 166 | return HomeCell("gsr-locations", gids, weighting) 167 | 168 | 169 | def get_university_event_cell(): 170 | # returns a university notification cell 171 | calendar = pull_todays_calendar() 172 | if calendar: 173 | return HomeCell("calendar", calendar, 40) 174 | else: 175 | return None 176 | 177 | 178 | def get_news_cell(): 179 | # returns a news cell 180 | article = fetch_frontpage_article() 181 | if article: 182 | return HomeCell("news", article, 50) 183 | else: 184 | return None 185 | 186 | 187 | def get_feature_announcement_cell(): 188 | # returns an announcement for a new Penn Mobile feature 189 | now = datetime.datetime.now().date() 190 | start = datetime.date(2019, 4, 12) 191 | end = datetime.date(2019, 4, 13) 192 | if now < start or now > end: 193 | return None 194 | 195 | info = { 196 | "source": "Spring Fling", 197 | "title": "Tap to view the Fling schedule, performers, and more!", 198 | "description": None, 199 | "timestamp": "Saturday 4/13", 200 | "image_url": "ADD IMAGE HERE", 201 | "feature": "Spring Fling", 202 | } 203 | return HomeCell("feature", info, 2000) 204 | 205 | 206 | def get_courses_cell(account): 207 | # return a cell containing today's courses 208 | courses = get_todays_courses(account) 209 | 210 | # Return today's courses if last course has not yet ended 211 | now = datetime.datetime.now() 212 | weekday = int(now.strftime("%w")) 213 | if courses: 214 | for course in courses: 215 | end_time = datetime.datetime.strptime(course["end_time"], "%I:%M %p") 216 | if now.hour < end_time.hour or ( 217 | now.hour == end_time.hour and now.minute < end_time.minute 218 | ): 219 | return HomeCell("courses", {"weekday": "Today", "courses": courses}, 200) 220 | elif weekday == 6: 221 | # Return Monday's courses if today is Saturday 222 | courses = get_courses_in_N_days(account, 2) 223 | return HomeCell("courses", {"weekday": "Monday", "courses": courses}, 30) 224 | elif weekday != 0: 225 | # Return empty cell if there are no courses today and today isn't Saturday or Sunday 226 | return HomeCell("courses", {"weekday": "Today", "courses": []}, 30) 227 | 228 | # Return tomorrow's courses if today's last course has ended 229 | courses = get_courses_in_N_days(account, 1) 230 | return HomeCell("courses", {"weekday": "Tomorrow", "courses": courses}, 30) 231 | 232 | 233 | def get_reservations_cell(user, sessionid): 234 | # returns a cell with the user's reservations, weighted extremely high to appear at the top 235 | # returns None if user has no reservations 236 | try: 237 | reservations = get_reservations(user.email, sessionid, 1, 2) 238 | if reservations: 239 | return HomeCell("reservations", reservations, 1000) 240 | else: 241 | return None 242 | except APIError: 243 | return None 244 | except requests.exceptions.Timeout: 245 | return None 246 | 247 | 248 | def get_post_cells(account): 249 | posts = get_posts_for_account(account) 250 | cells = [] 251 | for post in posts: 252 | cell = HomeCell("post", post, 15000) 253 | cells.append(cell) 254 | return cells 255 | 256 | 257 | def get_current_version(): 258 | def get_data(): 259 | r = requests.get(url="http://itunes.apple.com/lookup?bundleId=org.pennlabs.PennMobile") 260 | json = r.json() 261 | version = json["results"][0]["version"] 262 | return version 263 | 264 | td = datetime.timedelta(days=1) 265 | return cache_get("ios_version", td, get_data) 266 | 267 | 268 | def get_group_invite_cell(account): 269 | try: 270 | invites = get_invites_for_account(account) 271 | 272 | if invites: 273 | return HomeCell("invites", invites, 1001) 274 | else: 275 | return None 276 | 277 | except APIError: 278 | return None 279 | -------------------------------------------------------------------------------- /server/housing.py: -------------------------------------------------------------------------------- 1 | import math 2 | from datetime import datetime 3 | 4 | from bs4 import BeautifulSoup 5 | from flask import g, jsonify, request 6 | from sqlalchemy.exc import IntegrityError 7 | 8 | from server import app, sqldb 9 | from server.auth import auth 10 | 11 | 12 | class Housing(sqldb.Model): 13 | account = sqldb.Column(sqldb.VARCHAR(255), sqldb.ForeignKey("account.id"), primary_key=True) 14 | house = sqldb.Column(sqldb.Text, nullable=True) 15 | location = sqldb.Column(sqldb.Text, nullable=True) 16 | address = sqldb.Column(sqldb.Text, nullable=True) 17 | off_campus = sqldb.Column(sqldb.Boolean, nullable=True) 18 | start = sqldb.Column(sqldb.Integer, primary_key=True, default=-1) 19 | end = sqldb.Column(sqldb.Integer, default=-1) 20 | created_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 21 | 22 | 23 | @app.route("/housing", methods=["POST"]) 24 | @auth() 25 | def save_housing_info(): 26 | html = request.form.get("html") 27 | 28 | soup = BeautifulSoup(html, "html.parser") 29 | html = soup.prettify().strip().strip("\t\r\n") 30 | 31 | house, location, address = None, None, None 32 | main = soup.findAll("div", {"class": "interior-main-content col-md-6 col-md-push-3 md:mb-150"})[ 33 | 0 34 | ] 35 | 36 | off_campus = "You don't have any assignments at this time" in html 37 | if off_campus: 38 | # Off campus for 2020 - 2021 school year if today is after January and user has no assignments 39 | today = datetime.today() 40 | start = today.year if today.month > 1 else today.year - 1 41 | end = start + 1 42 | else: 43 | year_text, house_text = None, None 44 | headers = main.findAll("h3") 45 | for h3 in headers: 46 | if "Academic Year" in h3.text: 47 | year_text = h3.text 48 | elif "House Information" in h3.text: 49 | house_text = h3.text 50 | 51 | info = main.findAll("div", {"class": "col-md-8"})[0] 52 | paragraphs = info.findAll("p") 53 | room = paragraphs[0] 54 | address = paragraphs[1] 55 | 56 | split = year_text.strip().split(" ") 57 | start, end = split[len(split) - 3], split[len(split) - 1] 58 | 59 | split = house_text.split("-") 60 | house = split[1].strip() 61 | 62 | split = room.text.split(" ") 63 | location = split[0].strip() 64 | 65 | split = address.text.split(" ") 66 | address = split[0].strip() 67 | 68 | housing = Housing( 69 | account=g.account.id, 70 | house=house, 71 | location=location, 72 | address=address, 73 | off_campus=off_campus, 74 | start=start, 75 | end=end, 76 | ) 77 | 78 | try: 79 | sqldb.session.add(housing) 80 | sqldb.session.commit() 81 | except IntegrityError: 82 | sqldb.session.rollback() 83 | current_result = Housing.query.filter_by(account=g.account.id, start=housing.start).first() 84 | if current_result: 85 | if housing.off_campus or (housing.house and housing.location and housing.address): 86 | current_result.house = house 87 | current_result.location = location 88 | current_result.address = address 89 | current_result.off_campus = off_campus 90 | sqldb.session.commit() 91 | 92 | if housing.start: 93 | return jsonify( 94 | { 95 | "house": housing.house, 96 | "room": housing.location, 97 | "address": housing.address, 98 | "start": int(housing.start), 99 | "end": int(housing.end), 100 | "off_campus": housing.off_campus, 101 | } 102 | ) 103 | else: 104 | return jsonify({"error": "Unable to parse HTML."}), 400 105 | 106 | 107 | @app.route("/housing", methods=["GET"]) 108 | @auth() 109 | def get_housing_info(): 110 | today = datetime.today() 111 | year = today.year if today.month > 5 else today.year - 1 112 | housing = Housing.query.filter_by(account=g.account.id, start=year).first() 113 | if housing: 114 | return jsonify( 115 | { 116 | "result": { 117 | "house": housing.house, 118 | "room": housing.location, 119 | "address": housing.address, 120 | "start": housing.start, 121 | "end": housing.end, 122 | "off_campus": housing.off_campus, 123 | } 124 | } 125 | ) 126 | else: 127 | return jsonify({"result": None}) 128 | 129 | 130 | @app.route("/housing/delete", methods=["POST"]) 131 | @auth() 132 | def delete_housing_info(): 133 | Housing.query.filter_by(account=g.account.id).delete() 134 | sqldb.session.commit() 135 | return jsonify({"success": True}) 136 | 137 | 138 | @app.route("/housing/all", methods=["POST"]) 139 | @auth() 140 | def add_all_housing_info(): 141 | json_arr = request.get_json() 142 | for json in json_arr: 143 | house = json.get("house") 144 | room = json.get("room") 145 | address = json.get("address") 146 | start = json.get("start") 147 | end = json.get("end") 148 | off_campus = json.get("off_campus") 149 | try: 150 | housing = Housing( 151 | account=g.account.id, 152 | house=house, 153 | location=room, 154 | address=address, 155 | off_campus=off_campus, 156 | start=start, 157 | end=end, 158 | ) 159 | sqldb.session.add(housing) 160 | sqldb.session.commit() 161 | except IntegrityError: 162 | sqldb.session.rollback() 163 | current_result = Housing.query.filter_by(account=g.account.id, start=start).first() 164 | if current_result: 165 | current_result.house = house 166 | current_result.location = room 167 | current_result.address = address 168 | current_result.off_campus = off_campus 169 | sqldb.session.commit() 170 | 171 | return jsonify({"success": True}) 172 | 173 | 174 | def get_details_for_location(location): 175 | """ 176 | Ex: 403 Butcher (Bed space: a) 177 | Returns 403, 4, Butcher 178 | """ 179 | split = location.split(" ") 180 | room = int(split[0].strip()) 181 | floor = math.floor(room / 100) 182 | section = split[1].strip() 183 | 184 | return room, floor, section 185 | -------------------------------------------------------------------------------- /server/laundry.py: -------------------------------------------------------------------------------- 1 | import calendar 2 | import datetime 3 | 4 | from flask import g, jsonify, request 5 | from pytz import timezone 6 | from requests.exceptions import HTTPError 7 | from sqlalchemy import Integer, cast, exists, func 8 | 9 | from server import app, sqldb 10 | from server.auth import auth 11 | from server.base import cached_route 12 | from server.models import LaundryPreference, LaundrySnapshot, User 13 | from server.penndata import laundry 14 | 15 | 16 | @app.route("/laundry/halls", methods=["GET"]) 17 | def all_halls(): 18 | try: 19 | return jsonify({"halls": laundry.all_status()}) 20 | except HTTPError: 21 | return jsonify({"error": "The laundry api is currently unavailable."}) 22 | 23 | 24 | @app.route("/laundry/rooms/", methods=["GET"]) 25 | def get_rooms(hall_ids): 26 | est = timezone("EST") 27 | date = datetime.datetime.now(est) 28 | halls = [int(x) for x in hall_ids.split(",")] 29 | output = {"rooms": []} 30 | for hall in halls: 31 | hall_data = laundry.hall_status(hall) 32 | hall_data["id"] = hall 33 | hall_data["usage_data"] = usage_data(hall, date.year, date.month, date.day) 34 | output["rooms"].append(hall_data) 35 | return jsonify(output) 36 | 37 | 38 | @app.route("/laundry/hall/", methods=["GET"]) 39 | def hall(hall_id): 40 | try: 41 | return jsonify(laundry.hall_status(hall_id)) 42 | except ValueError: 43 | return jsonify({"error": "Invalid hall id passed to server."}) 44 | except HTTPError: 45 | return jsonify({"error": "The laundry api is currently unavailable."}) 46 | 47 | 48 | @app.route("/laundry/hall//", methods=["GET"]) 49 | def two_halls(hall_id, hall_id2): 50 | try: 51 | to_ret = {"halls": [laundry.hall_status(hall_id), laundry.hall_status(hall_id2)]} 52 | return jsonify(to_ret) 53 | except ValueError: 54 | return jsonify({"error": "Invalid hall id passed to server."}) 55 | except HTTPError: 56 | return jsonify({"error": "The laundry api is currently unavailable."}) 57 | 58 | 59 | @app.route("/laundry/halls/ids", methods=["GET"]) 60 | def id_to_name(): 61 | try: 62 | return jsonify({"halls": laundry.hall_id_list}) 63 | except HTTPError: 64 | return jsonify({"error": "The laundry api is currently unavailable."}) 65 | 66 | 67 | def safe_division(a, b): 68 | return round(a / float(b), 3) if b > 0 else 0 69 | 70 | 71 | @app.route("/laundry/usage/") 72 | def usage_shortcut(hall_no): 73 | est = timezone("EST") 74 | now = datetime.datetime.now(est) 75 | return usage(hall_no, now.year, now.month, now.day) 76 | 77 | 78 | def usage_data(hall_no, year, month, day): 79 | # turn date info into a date object 80 | # find start range by subtracting 30 days 81 | now = datetime.date(year, month, day) 82 | start = now - datetime.timedelta(days=30) 83 | 84 | # get the current day of the week for today and tomorrow 85 | # python dow is monday = 0, while sql dow is sunday = 0 86 | dow = (now.weekday() + 1) % 7 87 | tmw = (dow + 1) % 7 88 | 89 | # some commands are different between mysql and sqlite 90 | is_mysql = sqldb.engine.name == "mysql" 91 | 92 | # get the laundry information for today based on the day 93 | # of week (if today is tuesday, get all the tuesdays 94 | # in the past 30 days), group them by time, and include 95 | # the first 2 hours of the next day 96 | data = ( 97 | sqldb.session.query( 98 | LaundrySnapshot.date, 99 | ( 100 | func.floor(LaundrySnapshot.time / 60).label("time") 101 | if is_mysql 102 | else cast(LaundrySnapshot.time / 60, Integer).label("time") 103 | ), 104 | func.avg(LaundrySnapshot.washers).label("all_washers"), 105 | func.avg(LaundrySnapshot.dryers).label("all_dryers"), 106 | func.avg(LaundrySnapshot.total_washers).label("all_total_washers"), 107 | func.avg(LaundrySnapshot.total_dryers).label("all_total_dryers"), 108 | ) 109 | .filter( 110 | ( 111 | (LaundrySnapshot.room == hall_no) 112 | & ( 113 | ( 114 | func.dayofweek(LaundrySnapshot.date) == dow + 1 115 | if is_mysql 116 | else func.strftime("%w", LaundrySnapshot.date) == str(dow) 117 | ) 118 | | ( 119 | (LaundrySnapshot.time <= 180 - 1) 120 | & ( 121 | func.dayofweek(LaundrySnapshot.date) == tmw + 1 122 | if is_mysql 123 | else func.strftime("%w", LaundrySnapshot.date) == str(tmw) 124 | ) 125 | ) 126 | ) 127 | & (LaundrySnapshot.date >= start) 128 | ) 129 | ) 130 | .group_by(LaundrySnapshot.date, "time") 131 | .order_by(LaundrySnapshot.date, "time") 132 | .all() 133 | ) 134 | data = [x._asdict() for x in data] 135 | all_dryers = [int(x["all_total_dryers"]) for x in data] 136 | all_washers = [int(x["all_total_washers"]) for x in data] 137 | washer_points = {k: 0 for k in range(27)} 138 | dryer_points = {k: 0 for k in range(27)} 139 | washer_total = {k: 0 for k in range(27)} 140 | dryer_total = {k: 0 for k in range(27)} 141 | for x in data: 142 | hour = int(x["time"]) 143 | # if the value is for tomorrow, add 24 hours 144 | if x["date"].weekday() != now.weekday(): 145 | hour += 24 146 | washer_points[hour] += int(x["all_washers"]) 147 | dryer_points[hour] += int(x["all_dryers"]) 148 | washer_total[hour] += 1 149 | dryer_total[hour] += 1 150 | dates = [x["date"] for x in data] 151 | if not dates: 152 | dates = [now] 153 | return { 154 | "hall_name": laundry.id_to_hall[hall_no], 155 | "location": laundry.id_to_location[hall_no], 156 | "day_of_week": calendar.day_name[now.weekday()], 157 | "start_date": min(dates).strftime("%Y-%m-%d"), 158 | "end_date": max(dates).strftime("%Y-%m-%d"), 159 | "total_number_of_dryers": safe_division(sum(all_dryers), len(all_dryers)), 160 | "total_number_of_washers": safe_division(sum(all_washers), len(all_washers)), 161 | "washer_data": {x: safe_division(washer_points[x], washer_total[x]) for x in washer_points}, 162 | "dryer_data": {x: safe_division(dryer_points[x], dryer_total[x]) for x in dryer_points}, 163 | } 164 | 165 | 166 | @app.route("/laundry/usage//--", methods=["GET"]) 167 | def usage(hall_no, year, month, day): 168 | def get_data(): 169 | return usage_data(hall_no, year, month, day) 170 | 171 | td = datetime.timedelta(minutes=15) 172 | return cached_route("laundry:usage:%s:%s-%s-%s" % (hall_no, year, month, day), td, get_data) 173 | 174 | 175 | def save_data(): 176 | """Retrieves current laundry info and saves it into the database.""" 177 | 178 | # get the number of minutes since midnight 179 | est = timezone("EST") 180 | now = datetime.datetime.now(est) 181 | midnight = now.replace(hour=0, minute=0, second=0, microsecond=0) 182 | date = now.date() 183 | time = round((now - midnight).seconds / 60) 184 | 185 | # check if we already have data for this minute 186 | # if we do, skip 187 | with app.app_context(): 188 | if sqldb.session.query( 189 | exists().where((LaundrySnapshot.date == date) & (LaundrySnapshot.time == time)) 190 | ).scalar(): 191 | return 192 | 193 | # make a dict for hall name -> id 194 | ids = {x["hall_name"]: x["id"] for x in laundry.hall_id_list} 195 | data = laundry.all_status() 196 | for name, room in data.items(): 197 | id = ids[name] 198 | dryers = room["dryers"]["open"] 199 | washers = room["washers"]["open"] 200 | total_dryers = sum( 201 | [room["dryers"][x] for x in ["open", "running", "offline", "out_of_order"]] 202 | ) 203 | total_washers = sum( 204 | [room["washers"][x] for x in ["open", "running", "offline", "out_of_order"]] 205 | ) 206 | item = LaundrySnapshot( 207 | date=date, 208 | time=time, 209 | room=id, 210 | washers=washers, 211 | dryers=dryers, 212 | total_washers=total_washers, 213 | total_dryers=total_dryers, 214 | ) 215 | sqldb.session.add(item) 216 | sqldb.session.commit() 217 | 218 | 219 | @app.route("/laundry/preferences", methods=["POST"]) 220 | @auth(nullable=True) 221 | def save_laundry_preferences(): 222 | try: 223 | user = User.get_or_create() 224 | except ValueError as e: 225 | return jsonify({"success": False, "error": str(e)}) 226 | 227 | room_ids = request.form.get("rooms") 228 | 229 | if not room_ids: 230 | return jsonify({"success": False, "error": "No rooms specified."}) 231 | 232 | # delete old preferences for user 233 | LaundryPreference.query.filter_by(user_id=user.id).delete() 234 | 235 | room_ids = [int(x) for x in room_ids.split(",")] 236 | 237 | account_id = g.account.id if g.account else None 238 | for room_id in room_ids: 239 | laundry_preference = LaundryPreference(user_id=user.id, account=account_id, room_id=room_id) 240 | sqldb.session.add(laundry_preference) 241 | sqldb.session.commit() 242 | 243 | return jsonify({"success": True, "error": None}) 244 | 245 | 246 | @app.route("/laundry/preferences", methods=["GET"]) 247 | def get_laundry_preferences(): 248 | try: 249 | user = User.get_or_create() 250 | except ValueError: 251 | return jsonify({"rooms": []}) 252 | 253 | preferences = LaundryPreference.query.filter_by(user_id=user.id) 254 | room_ids = [x.room_id for x in preferences] 255 | return jsonify({"rooms": room_ids}) 256 | 257 | 258 | @app.route("/laundry/status", methods=["GET"]) 259 | def get_laundry_status(): 260 | def get_data(): 261 | if laundry.check_is_working(): 262 | return {"is_working": True, "error_msg": None} 263 | else: 264 | error_msg = "Penn's laundry server is currently not updating. We hope this will be fixed shortly." 265 | return {"is_working": False, "error_msg": error_msg} 266 | 267 | td = datetime.timedelta(hours=1) 268 | return cached_route("laundry:working", td, get_data) 269 | -------------------------------------------------------------------------------- /server/news.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | from flask import jsonify 4 | from requests.exceptions import ConnectionError 5 | 6 | from server import app 7 | 8 | 9 | BASE_URL = "https://www.thedp.com/" 10 | 11 | 12 | @app.route("/news/", methods=["GET"]) 13 | def fetch_news_article(): 14 | article = fetch_frontpage_article() 15 | if article: 16 | return jsonify({"article": article}) 17 | else: 18 | return jsonify({"error": "Site could not be reached or could not be parsed."}) 19 | 20 | 21 | def fetch_frontpage_article(): 22 | """Returns a list of articles.""" 23 | try: 24 | url = BASE_URL 25 | resp = requests.get(url) 26 | except ConnectionError: 27 | return None 28 | 29 | html = resp.content.decode("utf8") 30 | 31 | soup = BeautifulSoup(html, "html5lib") 32 | 33 | frontpage = soup.find("div", {"class": "col-lg-6 col-md-5 col-sm-12 frontpage-carousel"}) 34 | if frontpage: 35 | title_html = frontpage.find("a", {"class": "frontpage-link large-link"}) 36 | if title_html: 37 | link = title_html["href"] 38 | title = title_html.get_text() 39 | 40 | subtitle_html = frontpage.find("p") 41 | if subtitle_html: 42 | subtitle = subtitle_html.get_text() 43 | 44 | timestamp_html = frontpage.find("div", {"class": "timestamp"}) 45 | if timestamp_html: 46 | timestamp = timestamp_html.get_text() 47 | 48 | image_html = frontpage.find("img") 49 | if image_html: 50 | imageurl = image_html["src"] 51 | 52 | if all(v is not None for v in [title, subtitle, timestamp, imageurl, link]): 53 | article = { 54 | "source": "The Daily Pennsylvanian", 55 | "title": title, 56 | "subtitle": subtitle, 57 | "timestamp": timestamp, 58 | "image_url": imageurl, 59 | "article_url": link, 60 | } 61 | return article 62 | else: 63 | return None 64 | -------------------------------------------------------------------------------- /server/notifications.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | 4 | from apns2.client import APNsClient 5 | from apns2.credentials import TokenCredentials 6 | from apns2.payload import Payload 7 | from flask import g, jsonify, request 8 | from sqlalchemy.exc import IntegrityError 9 | 10 | from server import app, sqldb 11 | from server.auth import auth, internal_auth 12 | from server.models import Account 13 | 14 | 15 | class NotificationToken(sqldb.Model): 16 | account = sqldb.Column(sqldb.VARCHAR(255), sqldb.ForeignKey("account.id"), primary_key=True) 17 | ios_token = sqldb.Column(sqldb.VARCHAR(255), nullable=True) 18 | android_token = sqldb.Column(sqldb.VARCHAR(255), nullable=True) 19 | dev = sqldb.Column(sqldb.Boolean, default=False) 20 | created_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 21 | updated_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 22 | 23 | 24 | class NotificationSetting(sqldb.Model): 25 | account = sqldb.Column(sqldb.VARCHAR(255), sqldb.ForeignKey("account.id"), primary_key=True) 26 | setting = sqldb.Column(sqldb.VARCHAR(255), primary_key=True) 27 | enabled = sqldb.Column(sqldb.Boolean) 28 | created_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 29 | updated_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 30 | 31 | 32 | class Notification(object): 33 | def __init__(self, token, payload): 34 | self.token = token 35 | self.payload = payload 36 | 37 | 38 | @app.route("/notifications/register", methods=["POST"]) 39 | @auth() 40 | def register_push_notification(): 41 | ios_token = request.form.get("ios_token") 42 | android_token = request.form.get("android_token") 43 | isDev = True if request.form.get("dev") else False 44 | 45 | notification_token = NotificationToken( 46 | account=g.account.id, ios_token=ios_token, android_token=android_token, dev=isDev 47 | ) 48 | 49 | try: 50 | sqldb.session.add(notification_token) 51 | sqldb.session.commit() 52 | except IntegrityError: 53 | sqldb.session.rollback() 54 | current_result = NotificationToken.query.filter_by(account=g.account.id).first() 55 | if current_result: 56 | current_result.ios_token = ios_token 57 | current_result.android_token = android_token 58 | current_result.dev = isDev 59 | current_result.updated_at = datetime.now() 60 | sqldb.session.commit() 61 | 62 | return jsonify({"registered": True}) 63 | 64 | 65 | @app.route("/notifications/send", methods=["POST"]) 66 | @auth() 67 | def send_push_notification_to_account(): 68 | title = request.form.get("title") 69 | body = request.form.get("body") 70 | token = NotificationToken.query.filter_by(account=g.account.id).first() 71 | 72 | if not token or not token.ios_token: 73 | return jsonify({"error": "A device token has not been registered on the server."}), 400 74 | 75 | send_push_notification(token.ios_token, title, body, token.dev) 76 | return jsonify({"success": True}) 77 | 78 | 79 | @app.route("/notifications/send/internal", methods=["POST"]) 80 | @internal_auth 81 | def send_test_push_notification(): 82 | pennkey = request.form.get("pennkey") 83 | title = request.form.get("title") 84 | body = request.form.get("body") 85 | if not pennkey: 86 | return jsonify({"error": "Missing pennkey."}), 400 87 | 88 | account = Account.query.filter_by(pennkey=pennkey).first() 89 | if not account: 90 | return jsonify({"error": "Account not found."}), 400 91 | 92 | token = NotificationToken.query.filter_by(account=account.id).first() 93 | 94 | if not token or not token.ios_token: 95 | return ( 96 | jsonify( 97 | {"error": "A device token has not been registered on the server for this account."} 98 | ), 99 | 400, 100 | ) 101 | 102 | # Only development tokens can be tested (not production) 103 | send_push_notification(token.ios_token, title, body, token.dev) 104 | return jsonify({"success": True}) 105 | 106 | 107 | @app.route("/notifications/send/token/internal", methods=["POST"]) 108 | @internal_auth 109 | def send_test_push_notification_with_token(): 110 | token = request.form.get("token") 111 | title = request.form.get("title") 112 | body = request.form.get("body") 113 | if not token: 114 | return jsonify({"error": "Missing token."}), 400 115 | 116 | if not token: 117 | return ( 118 | jsonify( 119 | {"error": "A device token has not been registered on the server for this account."} 120 | ), 121 | 400, 122 | ) 123 | 124 | # Only development tokens can be tested (not production) 125 | send_push_notification(token, title, body, True) 126 | return jsonify({"success": True}) 127 | 128 | 129 | def send_push_notification(token, title, body, isDev=False): 130 | client = get_client(isDev) 131 | alert = {"title": title, "body": body} 132 | payload = Payload(alert=alert, sound="default", badge=0) 133 | topic = "org.pennlabs.PennMobile" 134 | client.send_notification(token, payload, topic) 135 | 136 | 137 | def send_push_notification_batch(notifications, isDev=False): 138 | client = get_client(isDev) 139 | topic = "org.pennlabs.PennMobile" 140 | client.send_notification_batch(notifications=notifications, topic=topic) 141 | 142 | 143 | def get_client(isDev): 144 | auth_key_path = os.path.join( 145 | os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "ios_key.p8" 146 | ) 147 | auth_key_id = "443RV92X4F" 148 | team_id = "VU59R57FGM" 149 | token_credentials = TokenCredentials( 150 | auth_key_path=auth_key_path, auth_key_id=auth_key_id, team_id=team_id 151 | ) 152 | client = APNsClient(credentials=token_credentials, use_sandbox=isDev) 153 | return client 154 | 155 | 156 | """ Notification Settings """ 157 | 158 | 159 | @app.route("/notifications/settings", methods=["POST"]) 160 | @auth() 161 | def save_notification_settings(): 162 | settings = request.get_json() 163 | for setting in settings: 164 | enabled = settings[setting] 165 | notifSetting = NotificationSetting(account=g.account.id, setting=setting, enabled=enabled) 166 | try: 167 | sqldb.session.add(notifSetting) 168 | sqldb.session.commit() 169 | except IntegrityError: 170 | sqldb.session.rollback() 171 | notifSetting = NotificationSetting.query.filter_by( 172 | account=g.account.id, setting=setting 173 | ).first() 174 | if notifSetting.enabled != enabled: 175 | notifSetting.enabled = enabled 176 | notifSetting.updated_at = datetime.now() 177 | sqldb.session.commit() 178 | return jsonify({"success": True}) 179 | 180 | 181 | @app.route("/notifications/settings", methods=["GET"]) 182 | @auth() 183 | def get_notification_settings_endpoint(): 184 | jsonArr = get_notification_settings(g.account) 185 | return jsonify({"settings": jsonArr}) 186 | 187 | 188 | def get_notification_settings(account): 189 | settings = NotificationSetting.query.filter_by(account=account.id).all() 190 | jsonArr = {} 191 | for setting in settings: 192 | jsonArr[setting.setting] = setting.enabled 193 | return jsonArr 194 | -------------------------------------------------------------------------------- /server/nso.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import requests 4 | from flask import Response 5 | 6 | from server import app 7 | 8 | 9 | @app.route("/nso") 10 | def get_nso_events(): 11 | r = requests.get("http://www.nso.upenn.edu/event-calendar.rss") 12 | split = r.text.split("\n") 13 | # TODO: Rework into for-loop 14 | filtered = [ 15 | i if "" + parseDate(i) + " EST" for i in split 16 | ] 17 | filtered = [ 18 | i if ("", methods=["GET"]) 27 | def get_course_reviews(course_id): 28 | now = datetime.datetime.today() 29 | end_day = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(days=15) 30 | td = end_day - now 31 | 32 | def get_data(): 33 | return average_course_review(course_id) 34 | 35 | return cached_route("pcr:course:%s" % course_id, td, get_data) 36 | -------------------------------------------------------------------------------- /server/penndata.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | 3 | from penn import Calendar, Dining, DiningV2, Directory, Fitness, Laundry, Map, Registrar, StudySpaces, Transit, Wharton 4 | 5 | 6 | din = Dining(getenv("DIN_USERNAME"), getenv("DIN_PASSWORD")) 7 | dinV2 = DiningV2(getenv("DIN_USERNAME"), getenv("DIN_PASSWORD")) 8 | reg = Registrar(getenv("REG_USERNAME"), getenv("REG_PASSWORD")) 9 | penn_dir = Directory(getenv("DIR_USERNAME"), getenv("DIR_PASSWORD")) 10 | map_search = Map(getenv("NEM_USERNAME"), getenv("NEM_PASSWORD")) 11 | transit = Transit(getenv("TRANSIT_USERNAME"), getenv("TRANSIT_PASSWORD")) 12 | laundry = Laundry() 13 | studyspaces = StudySpaces(getenv("LIBCAL_ID"), getenv("LIBCAL_SECRET")) 14 | wharton = Wharton() 15 | fitness = Fitness(getenv("FITNESS_TOKEN")) 16 | calendar = Calendar() 17 | wharton = Wharton() 18 | depts = { 19 | "AAMW": "Art & Arch of Med. World", 20 | "ACCT": "Accounting", 21 | "AFRC": "Africana Studies", 22 | "AFST": "African Studies Program", 23 | "ALAN": "Asian Languages", 24 | "AMCS": "Applied Math & Computatnl Sci.", 25 | "ANAT": "Anatomy", 26 | "ANCH": "Ancient History", 27 | "ANEL": "Ancient Near East Languages", 28 | "ANTH": "Anthropology", 29 | "ARAB": "Arabic", 30 | "ARCH": "Architecture", 31 | "ARTH": "Art History", 32 | "ASAM": "Asian American Studies", 33 | "ASTR": "Astronomy", 34 | "BCHE": "Biochemistry (Undergrads)", 35 | "BE": "Bioengineering", 36 | "BENG": "Bengali", 37 | "BEPP": "Business Econ & Public Policy", 38 | "BFMD": "Benjamin Franklin Seminars-Med", 39 | "BIBB": "Biological Basis of Behavior", 40 | "BIOE": "Bioethics", 41 | "BIOL": "Biology", 42 | "BIOM": "Biomedical Studies", 43 | "BMB": "Biochemistry & Molecular Biophy", 44 | "BSTA": "Biostatistics", 45 | "CAMB": "Cell and Molecular Biology", 46 | "CBE": "Chemical & Biomolecular Engr", 47 | "CHEM": "Chemistry", 48 | "CHIN": "Chinese", 49 | "CINE": "Cinema Studies", 50 | "CIS": "Computer and Information Sci", 51 | "CIT": "Computer and Information Tech", 52 | "CLST": "Classical Studies", 53 | "COGS": "Cognitive Science", 54 | "COLL": "College", 55 | "COML": "Comparative Literature", 56 | "COMM": "Communications", 57 | "CPLN": "City Planning", 58 | "CRIM": "Criminology", 59 | "DEMG": "Demography", 60 | "DORT": "Orthodontics", 61 | "DOSP": "Oral Surgery and Pharmacology", 62 | "DPED": "Pediatric Dentistry", 63 | "DRST": "Restorative Dentistry", 64 | "DTCH": "Dutch", 65 | "DYNM": "Organizational Dynamics", 66 | "EALC": "East Asian Languages & Civilztn", 67 | "EAS": "Engineering & Applied Science", 68 | "ECON": "Economics", 69 | "EDUC": "Education", 70 | "EEUR": "East European", 71 | "ENGL": "English", 72 | "ENGR": "Engineering", 73 | "ENM": "Engineering Mathematics", 74 | "ENVS": "Environmental Studies", 75 | "EPID": "Epidemiology", 76 | "ESE": "Electric & Systems Engineering", 77 | "FNAR": "Fine Arts", 78 | "FNCE": "Finance", 79 | "FOLK": "Folklore", 80 | "FREN": "French", 81 | "FRSM": "Non-Sas Freshman Seminar", 82 | "GAFL": "Government Administration", 83 | "GAS": "Graduate Arts & Sciences", 84 | "GCB": "Genomics & Comp. Biology", 85 | "GEOL": "Geology", 86 | "GREK": "Greek", 87 | "GRMN": "Germanic Languages", 88 | "GSWS": "Gender,Sexuality & Women's Stud", 89 | "GUJR": "Gujarati", 90 | "HCMG": "Health Care Management", 91 | "HEBR": "Hebrew", 92 | "HIND": "Hindi", 93 | "HIST": "History", 94 | "HPR": "Health Policy Research", 95 | "HSOC": "Health & Societies", 96 | "HSPV": "Historic Preservation", 97 | "HSSC": "History & Sociology of Science", 98 | "IMUN": "Immunology", 99 | "INTG": "Integrated Studies", 100 | "INTL": "International Programs", 101 | "INTR": "International Relations", 102 | "IPD": "Integrated Product Design", 103 | "ITAL": "Italian", 104 | "JPAN": "Japanese", 105 | "JWST": "Jewish Studies Program", 106 | "KORN": "Korean", 107 | "LALS": "Latin American & Latino Studies", 108 | "LARP": "Landscape Arch & Regional Plan", 109 | "LATN": "Latin", 110 | "LAW": "Law", 111 | "LGIC": "Logic, Information and Comp.", 112 | "LGST": "Legal Studies & Business Ethics", 113 | "LING": "Linguistics", 114 | "LSMP": "Life Sciences Management Prog", 115 | "MAPP": "Master of Applied Positive Psyc", 116 | "MATH": "Mathematics", 117 | "MEAM": "Mech Engr and Applied Mech", 118 | "MED": "Medical", 119 | "MGEC": "Management of Economics", 120 | "MGMT": "Management", 121 | "MKTG": "Marketing", 122 | "MLA": "Master of Liberal Arts Program", 123 | "MLYM": "Malayalam", 124 | "MMP": "Master of Medical Physics", 125 | "MSCI": "Military Science", 126 | "MSE": "Materials Science and Engineer", 127 | "MSSP": "Social Policy", 128 | "MTR": "Mstr Sci Transltl Research", 129 | "MUSA": "Master of Urban Spatial Analyt", 130 | "MUSC": "Music", 131 | "NANO": "Nanotechnology", 132 | "NELC": "Near Eastern Languages & Civlzt", 133 | "NETS": "Networked and Social Systems", 134 | "NGG": "Neuroscience", 135 | "NPLD": "Nonprofit Leadership", 136 | "NSCI": "Naval Science", 137 | "NURS": "Nursing", 138 | "OPIM": "Operations and Information Mgmt", 139 | "PERS": "Persian", 140 | "PHIL": "Philosophy", 141 | "PHRM": "Pharmacology", 142 | "PHYS": "Physics", 143 | "PPE": "Philosophy, Politics, Economics", 144 | "PRTG": "Portuguese", 145 | "PSCI": "Political Science", 146 | "PSYC": "Psychology", 147 | "PUBH": "Public Health Studies", 148 | "PUNJ": "Punjabi", 149 | "REAL": "Real Estate", 150 | "RELS": "Religious Studies", 151 | "ROML": "Romance Languages", 152 | "RUSS": "Russian", 153 | "SAST": "South Asia Studies", 154 | "SCND": "Scandinavian", 155 | "SKRT": "Sanskrit", 156 | "SLAV": "Slavic", 157 | "SOCI": "Sociology", 158 | "SPAN": "Spanish", 159 | "STAT": "Statistics", 160 | "STSC": "Science, Technology & Society", 161 | "SWRK": "Social Work", 162 | "TAML": "Tamil", 163 | "TCOM": "Telecommunications & Networking", 164 | "TELU": "Telugu", 165 | "THAR": "Theatre Arts", 166 | "TURK": "Turkish", 167 | "URBS": "Urban Studies", 168 | "URDU": "Urdu", 169 | "VCSN": "Clinical Studies - Nbc Elect", 170 | "VCSP": "Clinical Studies - Phila Elect", 171 | "VIPR": "Viper", 172 | "VISR": "Vet School Ind Study & Research", 173 | "VLST": "Visual Studies", 174 | "VMED": "Csp/Csn Medicine Courses", 175 | "WH": "Wharton Undergraduate", 176 | "WHCP": "Wharton Communication Pgm", 177 | "WHG": "Wharton Graduate", 178 | "WRIT": "Writing Program", 179 | "YDSH": "Yiddish", 180 | } 181 | -------------------------------------------------------------------------------- /server/polls/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/server/polls/__init__.py -------------------------------------------------------------------------------- /server/polls/archive.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from flask import jsonify, request 4 | from pytz import timezone 5 | 6 | from server import app 7 | from server.models import Poll, PollOption, PollVote 8 | 9 | 10 | """ 11 | Endpoint: /api/polls 12 | HTTP Methods: GET 13 | Response Formats: JSON 14 | Parameters: account 15 | 16 | Returns list of polls 17 | """ 18 | 19 | 20 | @app.route("/api/polls", methods=["GET"]) 21 | def get_all_polls(): 22 | archive = request.args.get("archives") 23 | email = request.args.get("email") 24 | 25 | polls = Poll.query.all() 26 | votes = PollVote.query.filter_by(email=email).all() 27 | 28 | est = timezone("EST") 29 | now = datetime.now(est).replace(tzinfo=None) 30 | 31 | json_arr = [] 32 | if not email or not votes: 33 | for poll in polls: 34 | if archive and now > poll.expiration: 35 | poll_json = get_poll_json(poll) 36 | json_arr.append(poll_json) 37 | else: 38 | if poll.expiration >= now: 39 | poll_json = get_poll_json(poll) 40 | json_arr.append(poll_json) 41 | else: 42 | for vote in votes: 43 | poll = Poll.query.filter_by(id=vote.poll).first() 44 | if archive and now > poll.expiration: 45 | poll_json = get_poll_json(poll) 46 | poll_json["optionChosen"] = vote.choice 47 | json_arr.append(poll_json) 48 | else: 49 | if poll.expiration >= now: 50 | poll_json = get_poll_json(poll) 51 | poll_json["optionChosen"] = vote.choice 52 | json_arr.append(poll_json) 53 | 54 | return jsonify({"polls": json_arr}) 55 | 56 | 57 | def get_poll_json(poll): 58 | poll_json = { 59 | "id": poll.id, 60 | "approved": poll.approved, 61 | "question": poll.question, 62 | "orgAuthor": poll.source, 63 | "expiration": poll.expiration, 64 | "optionChosen": None, 65 | "options": [] 66 | } 67 | options = PollOption.query.filter_by(poll=poll.id).all() 68 | for obj in options: 69 | poll_json["options"].append({ 70 | "id": obj.id, 71 | "optionText": obj.choice, 72 | "votes": PollVote.query.filter_by(choice=obj.id).count(), 73 | "votesByYear": [ 74 | { 75 | "demographic": "year_3", 76 | "votes": PollVote.query.filter_by(choice=obj.id, year="2021").count() 77 | }, 78 | { 79 | "demographic": "year_2", 80 | "votes": PollVote.query.filter_by(choice=obj.id, year="2022").count() 81 | }, 82 | { 83 | "demographic": "year_1", 84 | "votes": PollVote.query.filter_by(choice=obj.id, year="2023").count() 85 | }, 86 | { 87 | "demographic": "year_0", 88 | "votes": PollVote.query.filter_by(choice=obj.id, year="2024").count() 89 | } 90 | ], 91 | "votesBySchool": [ 92 | { 93 | "demographic": "WH", 94 | "votes": PollVote.query.filter_by(choice=obj.id, school="WH").count() 95 | }, 96 | { 97 | "demographic": "COL", 98 | "votes": PollVote.query.filter_by(choice=obj.id, school="COL").count() 99 | }, 100 | { 101 | "demographic": "EAS", 102 | "votes": PollVote.query.filter_by(choice=obj.id, school="EAS").count() 103 | }, 104 | { 105 | "demographic": "NURS", 106 | "votes": PollVote.query.filter_by(choice=obj.id, school="NURS").count() 107 | } 108 | ] 109 | }) 110 | 111 | return poll_json 112 | -------------------------------------------------------------------------------- /server/polls/creation.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from flask import jsonify, request 4 | 5 | from server import app, sqldb 6 | from server.models import Poll, PollOption 7 | 8 | 9 | """ 10 | Endpoint: /api/polls 11 | HTTP Methods: POST 12 | Response Formats: JSON 13 | Content-Type: application/json 14 | Parameters: question, orgAuthor, expiration, options 15 | 16 | Creates new poll 17 | If successful, returns poll ID 18 | """ 19 | 20 | 21 | @app.route("/api/polls", methods=["POST"]) 22 | def create_poll(): 23 | data = request.get_json() 24 | 25 | question = data.get("question") 26 | organization = data.get("orgAuthor") 27 | expiration_str = data.get("expiration") 28 | options = list(data.get("options")) 29 | 30 | if any(x is None for x in [question, organization, expiration_str, options]): 31 | return jsonify({"error": "Parameter is missing"}), 400 32 | 33 | expiration = datetime.strptime(expiration_str, "%Y-%m-%dT%H:%M:%S") 34 | 35 | poll = Poll( 36 | question=question, 37 | source=organization, 38 | expiration=expiration 39 | ) 40 | sqldb.session.add(poll) 41 | sqldb.session.commit() 42 | 43 | for option_str in options: 44 | poll_option = PollOption(poll=poll.id, choice=option_str) 45 | sqldb.session.add(poll_option) 46 | 47 | sqldb.session.commit() 48 | 49 | return jsonify({"poll_id": poll.id}) 50 | 51 | 52 | """ 53 | Endpoint: /api/polls/ 54 | HTTP Methods: PUT 55 | Response Formats: JSON 56 | Content-Type: application/json 57 | Parameters: question, orgAuthor, expiration, options 58 | 59 | Updates existing poll 60 | If successful, returns bool 61 | """ 62 | 63 | 64 | """@app.route("/api/polls/", methods=["PUT"]) 65 | def update_poll(): 66 | data = request.get_json() 67 | poll = Poll.query.filter_by(id=poll_id).first() 68 | if not poll: 69 | return jsonify({"error": "Poll not found."}), 400 70 | 71 | question = data.get("question") 72 | organization = data.get("orgAuthor") 73 | expiration_str = data.get("expiration") 74 | options = list(data.get("options")) 75 | 76 | if all(x is None for x in [question, organization, expiration_str, options]): 77 | return jsonify({"error": "Must provide parameter to update"}) 78 | 79 | if question: 80 | poll.question = question 81 | 82 | if organization: 83 | poll.source = organization 84 | 85 | if expiration_str: 86 | expiration = datetime.strptime(expiration_str, "%Y-%m-%dT%H:%M:%S") 87 | poll.expiration = expiration 88 | 89 | if options: 90 | PollOption.query.filter_by(poll=poll.id).delete() 91 | PollVote.query.filter_by(poll=poll.id).delete() 92 | for option_str in options: 93 | poll_option = PollOption(poll=poll.id, choice=option_str) 94 | sqldb.session.add(poll_option) 95 | 96 | sqldb.session.commit() 97 | 98 | return jsonify({"success": True})""" 99 | 100 | 101 | """ 102 | Endpoint: /api/polls/ 103 | HTTP Methods: DELETE 104 | Response Formats: JSON 105 | Content-Type: application/json 106 | 107 | Deletes existing poll 108 | If successful, returns bool 109 | """ 110 | 111 | 112 | """@app.route("/api/polls/", methods=["DELETE"]) 113 | def update_poll(): 114 | poll = Poll.query.filter_by(id=poll_id).first() 115 | if not poll: 116 | return jsonify({"error": "Poll not found."}), 400 117 | 118 | Poll.query.filter_by(id=poll.id).delete() 119 | PollOption.query.filter_by(poll=poll.id).delete() 120 | PollVote.query.filter_by(poll=poll.id).delete() 121 | 122 | return jsonify({"success": True})""" 123 | -------------------------------------------------------------------------------- /server/polls/vote.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | 3 | from server import app, sqldb 4 | from server.models import Poll, PollOption, PollVote 5 | 6 | 7 | """ 8 | Endpoint: /api/choosePollOption 9 | HTTP Methods: POST 10 | Response Formats: JSON 11 | Content-Type: application/json 12 | Parameters: poll_id, option_id, school, year 13 | 14 | Adds new vote 15 | If successful, returns bool 16 | """ 17 | 18 | 19 | @app.route("/api/choosePollOption", methods=["POST"]) 20 | def add_vote(): 21 | data = request.get_json() 22 | 23 | poll_id = data.get("poll_id") 24 | choice = data.get("option_id") 25 | school = data.get("school") 26 | year = data.get("year") 27 | email = data.get("email") 28 | 29 | if any(x is None for x in [poll_id, choice, school, year, email]): 30 | return jsonify({"error": "Parameter is missing"}), 400 31 | 32 | poll = Poll.query.filter_by(id=poll_id).first() 33 | opt = PollOption.query.filter_by(id=choice).first() 34 | 35 | if not poll: 36 | return jsonify({"error": "Poll not found."}), 400 37 | if not opt: 38 | return jsonify({"error": "Poll option not found."}), 400 39 | 40 | exists = PollVote.query.filter_by(poll=poll_id, email=email).first() 41 | if exists: 42 | PollVote.query.filter_by(poll=poll_id, email=email).delete() 43 | 44 | poll_vote = PollVote( 45 | poll=poll_id, 46 | choice=choice, 47 | school=school, 48 | year=year, 49 | email=email 50 | ) 51 | sqldb.session.add(poll_vote) 52 | sqldb.session.commit() 53 | 54 | return jsonify({"success": True}) 55 | -------------------------------------------------------------------------------- /server/portal/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/server/portal/__init__.py -------------------------------------------------------------------------------- /server/portal/account.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime, timedelta 3 | 4 | from flask import jsonify, redirect, request 5 | from pytz import timezone 6 | from sqlalchemy import exists 7 | 8 | from server import app, bcrypt, sqldb 9 | from server.models import Post, PostAccount, PostAccountEmail, PostTester 10 | 11 | 12 | """ 13 | Endpoint: /portal/account/new 14 | HTTP Methods: POST 15 | Response Formats: JSON 16 | Content-Type: application/x-www-form-urlencoded 17 | Parameters: name, email, password 18 | 19 | Creates new account 20 | If successful, returns account ID 21 | """ 22 | 23 | 24 | @app.route("/portal/account/new", methods=["POST"]) 25 | def create_account(): 26 | name = request.form.get("name") 27 | email = request.form.get("email") 28 | password = request.form.get("password") 29 | encrypted_password = bcrypt.generate_password_hash(password) 30 | 31 | if any(x is None for x in [name, email, encrypted_password]): 32 | return jsonify({"error": "Parameter is missing"}), 400 33 | 34 | account_exists = sqldb.session.query(exists().where(PostAccount.email == email)).scalar() 35 | if account_exists: 36 | return jsonify({"msg": "An account already exists for this email."}), 400 37 | 38 | account = PostAccount(name=name, email=email, encrypted_password=encrypted_password) 39 | sqldb.session.add(account) 40 | sqldb.session.commit() 41 | return jsonify({"account_id": account.id}) 42 | 43 | 44 | """ 45 | Endpoint: /portal/account/login 46 | HTTP Methods: POST 47 | Response Formats: JSON 48 | Content-Type: application/x-www-form-urlencoded 49 | Parameters: email, password 50 | 51 | Logins to existing account 52 | If successful, returns account ID 53 | """ 54 | 55 | 56 | @app.route("/portal/account/login", methods=["POST"]) 57 | def login(): 58 | email = request.form.get("email") 59 | password = request.form.get("password") 60 | 61 | if any(x is None for x in [email, password]): 62 | return jsonify({"error": "Parameter is missing"}), 400 63 | 64 | account = PostAccount.query.filter(PostAccount.email == email).first() 65 | is_correct_password = bcrypt.check_password_hash(account.encrypted_password, password) 66 | 67 | if account and is_correct_password: 68 | account.sign_in_count = account.sign_in_count + 1 69 | account.last_sign_in_at = datetime.now() 70 | sqldb.session.commit() 71 | return jsonify({"account_id": account.id}) 72 | else: 73 | return jsonify({"error": "Unable to authenticate"}), 400 74 | 75 | 76 | """ 77 | Endpoint: /portal/account 78 | HTTP Methods: GET 79 | Response Formats: JSON 80 | Parameters: account_id 81 | 82 | Get all relevant information for an account 83 | """ 84 | 85 | 86 | @app.route("/portal/account", methods=["GET"]) 87 | def get_account_info(): 88 | try: 89 | account_id = request.args.get("account_id") 90 | account = PostAccount.get_account(account_id) 91 | except ValueError as e: 92 | return jsonify({"error": str(e)}), 400 93 | 94 | if account.email == "pennappslabs@gmail.com": 95 | isAdmin = True 96 | else: 97 | isAdmin = False 98 | 99 | verified_emails = ( 100 | sqldb.session.query(PostAccountEmail.email) 101 | .filter_by(account=account.id, verified=True) 102 | .all() 103 | ) 104 | account_json = { 105 | "id": account.id, 106 | "name": account.name, 107 | "email": account.email, 108 | "verified_emails": verified_emails, 109 | "is_admin": isAdmin, 110 | } 111 | return jsonify({"account": account_json}) 112 | 113 | 114 | """ 115 | Endpoint: /portal/account/reset/request 116 | HTTP Methods: POST 117 | Response Formats: JSON 118 | Content-Type: application/x-www-form-urlencoded 119 | Parameters: email 120 | 121 | Add password reset token to account 122 | Sends email with link with reset token to the account's email 123 | """ 124 | 125 | 126 | @app.route("/portal/account/reset/request", methods=["POST"]) 127 | def request_account_password_reset_token(): 128 | email = request.form.get("email") 129 | account = PostAccount.query.filter_by(email=email).first() 130 | if not account: 131 | return jsonify({"error": "Account not found."}), 400 132 | 133 | # TODO: send verification email 134 | token = str(uuid.uuid4()) 135 | print(token) 136 | account.reset_password_token = token 137 | account.reset_password_token_sent_at = datetime.now() 138 | sqldb.session.commit() 139 | return jsonify({"msg": "An email has been sent to reset your password."}) 140 | 141 | 142 | """ 143 | Endpoint: /portal/account/reset 144 | HTTP Methods: GET 145 | Response Formats: JSON, HTML 146 | Parameters: token 147 | 148 | Verify a reset password token 149 | """ 150 | 151 | 152 | @app.route("/portal/account/reset", methods=["GET"]) 153 | def verify_account_password_reset(): 154 | token = request.args.get("token") 155 | now = datetime.now() 156 | account = PostAccount.query.filter_by(reset_password_token=token).first() 157 | if not account: 158 | return jsonify({"error": "Invalid auth token. Please try again."}) 159 | elif ( 160 | account.reset_password_token_sent_at 161 | and account.reset_password_token_sent_at + timedelta(minutes=30) < now 162 | ): 163 | return jsonify({"error": "This token has expired."}) 164 | else: 165 | return redirect("https://pennlabs.org?token={}".format(token), code=302) 166 | 167 | 168 | """ 169 | Endpoint: /portal/account/reset 170 | HTTP Methods: POST 171 | Response Formats: JSON 172 | Content-Type: application/x-www-form-urlencoded 173 | Parameters: token, password 174 | 175 | Reset password and remove password reset token from account 176 | """ 177 | 178 | 179 | @app.route("/portal/account/reset", methods=["POST"]) 180 | def reset_account_password(): 181 | token = request.form.get("token") 182 | password = request.form.get("password") 183 | encrypted_password = bcrypt.generate_password_hash(password) 184 | now = datetime.now() 185 | account = PostAccount.query.filter_by(reset_password_token=token).first() 186 | if not account: 187 | return jsonify({"error": "Invalid auth token. Please try again."}) 188 | elif ( 189 | account.reset_password_token_sent_at 190 | and account.reset_password_token_sent_at + timedelta(minutes=30) < now 191 | ): 192 | return jsonify({"error": "This token has expired."}) 193 | elif not encrypted_password: 194 | return jsonify({"error": "Invalid password. Please try again."}) 195 | 196 | account.encrypted_password = encrypted_password 197 | account.updated_at = datetime.now() 198 | account.reset_password_token = None 199 | account.reset_password_token_sent_at = None 200 | sqldb.session.commit() 201 | return jsonify({"msg": "Your password has been reset."}) 202 | 203 | 204 | """ 205 | Endpoint: /portal/email/verify 206 | HTTP Methods: GET 207 | Response Formats: JSON, HTML 208 | Parameters: token, account_email 209 | 210 | Verifies a test email for an account and adds that test email to all upcoming posts 211 | """ 212 | 213 | 214 | @app.route("/portal/email/verify", methods=["GET"]) 215 | def verify_account_email_token(): 216 | token = request.args.get("token") 217 | account_email = PostAccountEmail.query.filter_by(auth_token=token).first() 218 | if not account_email: 219 | return jsonify({"error": "Invalid auth token. Please try again."}) 220 | elif account_email.verified: 221 | return jsonify({"error": "This email has already been verified for this account."}) 222 | else: 223 | account_email.verified = True 224 | est = timezone("EST") 225 | now = datetime.now(est).replace(tzinfo=None) 226 | upcoming_posts = ( 227 | Post.query.filter(Post.account == account_email.account) 228 | .filter(Post.end_date >= now) 229 | .all() 230 | ) 231 | for post in upcoming_posts: 232 | tester = PostTester(post=post.id, email=account_email.email) 233 | sqldb.session.add(tester) 234 | sqldb.session.commit() 235 | return redirect("https://pennlabs.org", code=302) 236 | -------------------------------------------------------------------------------- /server/portal/creation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | from datetime import date, datetime 4 | 5 | import tinify 6 | from flask import jsonify, request 7 | 8 | from server import app, sqldb 9 | from server.models import (Major, Post, PostAccount, PostAccountEmail, 10 | PostFilter, PostStatus, PostTargetEmail, PostTester) 11 | 12 | 13 | """ 14 | Example: JSON Encoding 15 | { 16 | 'account_id': '7900fffd-0223-4381-a61d-9a16a24ca4b7', 17 | 'image_url': 'https://i.imgur.com/CmhAG25.jpg', 18 | 'image_url_cropped': 'https://i.imgur.com/CmhAG25.jpg', 19 | 'post_url': 'https://pennlabs.org/', 20 | 'source': 'Penn Labs', 21 | 'subtitle': 'A small subtitle', 22 | 'time_label': 'Today', 23 | 'title': 'Testing a new feature!', 24 | 'start_date': '2019-05-23T08:00:00', 25 | 'end_date': '2019-05-24T00:00:00', 26 | 'filters': [ 27 | { 28 | 'type': 'class', 29 | 'filter': '2020' 30 | }, 31 | { 32 | 'type': 'class', 33 | 'filter': '2021' 34 | }, 35 | { 36 | 'type': 'major', 37 | 'filter': 'CIS' 38 | } 39 | ], 40 | 'testers': [ 41 | 'amyg@upenn.edu' 42 | ], 43 | 'emails': [ 44 | 'benfranklin@upenn.edu', 45 | 'elonmusk@upenn.edu' 46 | ], 47 | 'comments': 'This is a post to test Penn Mobile. Please approve!' 48 | } 49 | """ 50 | 51 | 52 | """ 53 | Endpoint: /portal/post/new 54 | HTTP Methods: POST 55 | Response Formats: JSON 56 | Content-Type: application/json 57 | Parameters: account_id, source, title, subtitle, time_label, post_url, image_url, filters, testers, 58 | emails, start_date, end_date, comments, image_url_cropped 59 | 60 | Creates new post 61 | If successful, returns post ID 62 | """ 63 | 64 | 65 | @app.route("/portal/post/new", methods=["POST"]) 66 | def create_post(): 67 | data = request.get_json() 68 | 69 | try: 70 | account_id = data.get("account_id") 71 | account = PostAccount.get_account(account_id) 72 | except ValueError as e: 73 | return jsonify({"error": str(e)}), 400 74 | 75 | source = data.get("source") 76 | title = data.get("title") 77 | subtitle = data.get("subtitle") 78 | time_label = data.get("time_label") 79 | post_url = data.get("post_url") 80 | image_url = data.get("image_url") 81 | image_url_cropped = data.get("image_url_cropped") 82 | filters = list(data.get("filters")) 83 | testers = list(data.get("testers")) 84 | emails = list(data.get("emails")) 85 | 86 | start_date_str = data.get("start_date") 87 | end_date_str = data.get("end_date") 88 | 89 | if any(x is None for x in [image_url, start_date_str, end_date_str]): 90 | return jsonify({"error": "Parameter is missing"}), 400 91 | 92 | start_date = datetime.strptime(start_date_str, "%Y-%m-%dT%H:%M:%S") 93 | end_date = datetime.strptime(end_date_str, "%Y-%m-%dT%H:%M:%S") 94 | 95 | post = Post( 96 | account=account.id, 97 | source=source, 98 | title=title, 99 | subtitle=subtitle, 100 | time_label=time_label, 101 | post_url=post_url, 102 | image_url=image_url, 103 | image_url_cropped=image_url_cropped, 104 | start_date=start_date, 105 | end_date=end_date, 106 | filters=(True if filters else False), 107 | testers=(True if testers else False), 108 | emails=(True if emails else False), 109 | ) 110 | sqldb.session.add(post) 111 | sqldb.session.commit() 112 | 113 | add_filters_testers_emails(account, post, filters, testers, emails) 114 | 115 | msg = data.get("comments") 116 | update_status(post, "Submitted", msg) 117 | 118 | return jsonify({"post_id": post.id}) 119 | 120 | 121 | """ 122 | Endpoint: /portal/post/update 123 | HTTP Methods: POST 124 | Response Formats: JSON 125 | Content-Type: application/json 126 | Parameters: account_id, post_id, source, title, subtitle, time_label, post_url, image_url, filters, 127 | testers, emails, start_date, end_date, comments, image_url_cropped 128 | 129 | Modifies existing post 130 | If successful, returns post ID 131 | """ 132 | 133 | 134 | @app.route("/portal/post/update", methods=["POST"]) 135 | def update_post(): 136 | data = request.get_json() 137 | 138 | try: 139 | account_id = data.get("account_id") 140 | account = PostAccount.get_account(account_id) 141 | post_id = data.get("post_id") 142 | post = Post.get_post(post_id) 143 | except ValueError as e: 144 | return jsonify({"error": str(e)}), 400 145 | 146 | if post.account != account.id: 147 | return jsonify({"error": "Account not authorized to update this post."}), 400 148 | 149 | image_url = data.get("image_url") 150 | image_url_cropped = data.get("image_url_cropped") 151 | start_date_str = data.get("start_date") 152 | end_date_str = data.get("end_date") 153 | 154 | if any(x is None for x in [image_url, start_date_str, end_date_str]): 155 | return jsonify({"error": "Parameter is missing"}), 400 156 | 157 | post.source = data.get("source") 158 | post.title = data.get("title") 159 | post.subtitle = data.get("subtitle") 160 | post.time_label = data.get("time_label") 161 | post.post_url = data.get("post_url") 162 | post.image_url = image_url 163 | post.image_url_cropped = image_url_cropped 164 | 165 | post.start_date = datetime.strptime(start_date_str, "%Y-%m-%dT%H:%M:%S") 166 | post.end_date = datetime.strptime(end_date_str, "%Y-%m-%dT%H:%M:%S") 167 | 168 | filters = list(data.get("filters")) 169 | testers = list(data.get("testers")) 170 | emails = list(data.get("emails")) 171 | post.filters = True if filters else False 172 | post.testers = True if testers else False 173 | post.emails = True if emails else False 174 | 175 | PostFilter.query.filter_by(post=post.id).delete() 176 | PostTester.query.filter_by(post=post.id).delete() 177 | PostTargetEmail.query.filter_by(post=post.id).delete() 178 | 179 | add_filters_testers_emails(account, post, filters, testers, emails) 180 | 181 | msg = data.get("comments") 182 | update_status(post, "Updated", msg) 183 | 184 | return jsonify({"post_id": post.id}) 185 | 186 | 187 | """ 188 | Endpoint: /portal/post/image 189 | HTTP Methods: POST 190 | Response Formats: JSON 191 | Content-Type: multipart/form-data 192 | Parameters: account, post id, image 193 | 194 | Uploads image to server 195 | If successful, returns image URL 196 | """ 197 | 198 | 199 | @app.route("/portal/post/image", methods=["POST"]) 200 | def save_image(): 201 | if "image" not in request.files: 202 | return jsonify({"error": "No file passed to server"}), 400 203 | 204 | file = request.files["image"] 205 | if not file.filename: 206 | return jsonify({"error": "File must have a filename"}), 400 207 | 208 | # Validate account 209 | try: 210 | account_id = request.form.get("account") 211 | account = PostAccount.get_account(account_id) 212 | except ValueError as e: 213 | return jsonify({"error": str(e)}), 400 214 | 215 | now = datetime.now() 216 | timestamp = datetime.timestamp(now) 217 | 218 | # if request.args.original: 219 | # s3.upload_fileobj(file, 'penn.mobile.portal/images/{}'.format(account.name), file.filename) 220 | 221 | source_data = file.read() 222 | read_image = tinify.from_buffer(source_data) # .resize(method='cover', width=600, height=300) 223 | aws_url = read_image.store( 224 | service="s3", 225 | aws_access_key_id=os.environ.get("AWS_KEY"), 226 | aws_secret_access_key=os.environ.get("AWS_SECRET"), 227 | region="us-east-1", 228 | path="penn.mobile.portal/images/{}/{}-{}".format(account.name, timestamp, file.filename), 229 | ).location 230 | 231 | return jsonify({"image_url": aws_url}) 232 | 233 | 234 | """ 235 | Endpoint: /portal/post/approve 236 | HTTP Methods: POST 237 | Response Formats: JSON 238 | Content-Type: application/x-www-form-urlencoded 239 | Parameters: account_id, post_id, approved, rejected, msg 240 | 241 | Approve post for view 242 | If successful, returns post ID 243 | """ 244 | 245 | 246 | @app.route("/portal/post/approve", methods=["POST"]) 247 | def approve_post(): 248 | try: 249 | account_id = request.form.get("account_id") 250 | account = PostAccount.get_account(account_id) 251 | post_id = request.form.get("post_id") 252 | post = Post.get_post(post_id) 253 | except ValueError as e: 254 | return jsonify({"error": str(e)}), 400 255 | 256 | # Verify that this account is Penn Labs 257 | if account.email != "pennappslabs@gmail.com": 258 | return ( 259 | jsonify( 260 | {"error": "This account does not have permission to issue post approval decisions."} 261 | ), 262 | 400, 263 | ) 264 | 265 | approved = bool(request.form.get("approved")) 266 | rejected = bool(request.form.get("rejected")) 267 | if approved: 268 | post.approved = True 269 | update_status(post, "Approved", None) 270 | elif rejected: 271 | # TODO: Send rejection email 272 | post.approved = False 273 | msg = request.form.get("msg") 274 | if not msg: 275 | return jsonify({"error": "Post rejections must include a reason"}), 400 276 | update_status(post, "Rejected", msg) 277 | else: 278 | # TODO: Send changes requested email 279 | post.approved = False 280 | msg = request.form.get("msg") 281 | if not msg: 282 | return jsonify({"error": "Requests for post changes must include a reason"}), 400 283 | update_status(post, "Changes", msg) 284 | 285 | return jsonify({"post_id": post.id}) 286 | 287 | 288 | """ 289 | Endpoint: /portal/filters 290 | HTTP Methods: GET 291 | Response Formats: JSON 292 | 293 | Returns post filters 294 | """ 295 | 296 | 297 | @app.route("/portal/filters", methods=["GET"]) 298 | def get_filters(): 299 | filters_by_type = {} 300 | filters_by_type["email-only"] = {"name": "Email-Filtering Only", "filter": "none"} 301 | filters_by_type["school"] = [ 302 | {"name": "Wharton Undegraduate (WH)", "filter": "WH"}, 303 | {"name": "College of Arts & Sciences (SAS)", "filter": "COL"}, 304 | {"name": "Engineering & Applied Science (SEAS)", "filter": "EAS"}, 305 | {"name": "Nursing Undegraduate (NURS)", "filter": "NURS"}, 306 | ] 307 | 308 | today = date.today() 309 | senior_class_year = today.year 310 | if today.month >= 6: 311 | # If after May, current senior class will graduate in following year 312 | senior_class_year = senior_class_year + 1 313 | 314 | class_filters = [] 315 | for i in range(4): 316 | name = "Class of {}".format(senior_class_year + i) 317 | filter = str(senior_class_year + i) 318 | class_filters.append({"name": name, "filter": filter}) 319 | filters_by_type["class"] = class_filters 320 | 321 | major_filters = [] 322 | majors = Major.query.all() 323 | for major in majors: 324 | major_filters.append({"name": major.name, "filter": major.code}) 325 | filters_by_type["major"] = major_filters 326 | 327 | return jsonify({"filters_by_type": filters_by_type}) 328 | 329 | 330 | # Adds filters, testers, and emails to post. If tester is not verified, a verification email is sent and added later. 331 | def add_filters_testers_emails(account, post, filters, testers, emails): 332 | for filter_obj_str in filters: 333 | filter_obj = dict(filter_obj_str) 334 | post_filter = PostFilter(post=post.id, type=filter_obj["type"], filter=filter_obj["filter"]) 335 | sqldb.session.add(post_filter) 336 | 337 | verified_testers = PostAccountEmail.query.filter_by(account=account.id, verified=True).all() 338 | unverified_testers = PostAccountEmail.query.filter_by(account=account.id, verified=False).all() 339 | for tester in testers: 340 | if any(x.email == tester for x in verified_testers): 341 | post_tester = PostTester(post=post.id, email=tester) 342 | sqldb.session.add(post_tester) 343 | else: 344 | # TODO: send verification email 345 | token = str(uuid.uuid4()) 346 | if any(tester == x.email for x in unverified_testers): 347 | unverified_tester = next(x for x in unverified_testers if x.email == tester) 348 | unverified_tester.auth_token = token 349 | else: 350 | account_email = PostAccountEmail(account=account.id, email=tester, auth_token=token) 351 | sqldb.session.add(account_email) 352 | # print('Email {} with link: localhost:5000/portal/email/verify?token={}'.format(tester, token)) 353 | 354 | for email in emails: 355 | post_email = PostTargetEmail(post=post.id, email=email) 356 | sqldb.session.add(post_email) 357 | 358 | sqldb.session.commit() 359 | 360 | 361 | def update_status(post, update, msg): 362 | status = PostStatus(post=post.id, status=update, msg=msg) 363 | sqldb.session.add(status) 364 | sqldb.session.commit() 365 | -------------------------------------------------------------------------------- /server/privacy.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from flask import g, jsonify, request 4 | from sqlalchemy.exc import IntegrityError 5 | 6 | from server import app, sqldb 7 | from server.auth import anonymous_auth, auth 8 | 9 | 10 | class PrivacySetting(sqldb.Model): 11 | account = sqldb.Column(sqldb.VARCHAR(255), sqldb.ForeignKey("account.id"), primary_key=True) 12 | setting = sqldb.Column(sqldb.VARCHAR(255), primary_key=True) 13 | enabled = sqldb.Column(sqldb.Boolean) 14 | created_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 15 | updated_at = sqldb.Column(sqldb.DateTime, server_default=sqldb.func.now()) 16 | 17 | 18 | @app.route("/privacy/settings", methods=["POST"]) 19 | @auth() 20 | def save_privacy_settings(): 21 | settings = request.get_json() 22 | for setting in settings: 23 | enabled = settings[setting] 24 | privSetting = PrivacySetting(account=g.account.id, setting=setting, enabled=enabled) 25 | try: 26 | sqldb.session.add(privSetting) 27 | sqldb.session.commit() 28 | except IntegrityError: 29 | sqldb.session.rollback() 30 | privSetting = PrivacySetting.query.filter_by( 31 | account=g.account.id, setting=setting 32 | ).first() 33 | if privSetting.enabled != enabled: 34 | privSetting.enabled = enabled 35 | privSetting.updated_at = datetime.now() 36 | sqldb.session.commit() 37 | return jsonify({"success": True}) 38 | 39 | 40 | @app.route("/privacy/settings", methods=["GET"]) 41 | @auth() 42 | def get_privacy_settings_endpoint(): 43 | jsonArr = get_privacy_settings(g.account) 44 | return jsonify({"settings": jsonArr}) 45 | 46 | 47 | def get_privacy_settings(account): 48 | settings = PrivacySetting.query.filter_by(account=account.id).all() 49 | jsonArr = {} 50 | for setting in settings: 51 | jsonArr[setting.setting] = setting.enabled 52 | return jsonArr 53 | 54 | 55 | @app.route("/privacy/anonymous/register", methods=["POST"]) 56 | @anonymous_auth 57 | def register_device_key_password_hash(): 58 | return jsonify({"success": True}) 59 | -------------------------------------------------------------------------------- /server/registrar.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import re 3 | import string 4 | 5 | from flask import request 6 | 7 | from server import app 8 | from server.base import cached_route 9 | from server.penndata import depts, reg 10 | 11 | 12 | def is_dept(keyword): 13 | return keyword.upper() in depts.keys() 14 | 15 | 16 | def get_serializable_course(course): 17 | return { 18 | "_id": str(course.get("_id", "")), 19 | "dept": course.get("dept", ""), 20 | "title": course.get("title", ""), 21 | "courseNumber": course.get("courseNumber", ""), 22 | "credits": course.get("credits"), 23 | "sectionNumber": course.get("sectionNumber", ""), 24 | "type": course.get("type", ""), 25 | "times": course.get("times", ""), 26 | "days": course.get("days", ""), 27 | "hours": course.get("hours", ""), 28 | "building": course.get("building"), 29 | "roomNumber": course.get("roomNumber"), 30 | "prof": course.get("prof"), 31 | } 32 | 33 | 34 | def search_course(course): 35 | params = dict() 36 | if len(course.get("dept", "")) > 0: 37 | id_param = "" 38 | id_param += course.get("dept").lower() 39 | if len(course.get("courseNumber", "")) > 0: 40 | id_param += "-" + course.get("courseNumber").lower() 41 | if len(course.get("sectionNumber", "")) > 0: 42 | id_param += course.get("sectionNumber").lower() 43 | params["course_id"] = id_param 44 | 45 | if len(course["desc_search"]) > 0: 46 | params["description"] = course["desc_search"] 47 | 48 | if len(params) == 0: 49 | return None 50 | final_courses = reg.search(params) 51 | return {"courses": list(final_courses)} 52 | 53 | 54 | def get_type_search(search_query): 55 | course = {"courseNumber": "", "sectionNumber": "", "dept": "", "desc_search": ""} 56 | search_punc = re.sub("[%s]" % re.escape(string.punctuation), " ", search_query) 57 | 58 | def repl(matchobj): 59 | return matchobj.group(0)[0] + " " + matchobj.group(0)[1] 60 | 61 | search_presplit = re.sub("(\\d[a-zA-z]|[a-zA-z]\\d)", repl, search_punc) 62 | split = search_presplit.split() 63 | found_desc = False 64 | in_desc = False 65 | for s in split: 66 | s = s.strip() 67 | if s.isalpha() and is_dept(s.upper()): 68 | in_desc = False 69 | course["dept"] = s.upper() 70 | elif s.isdigit(): 71 | in_desc = False 72 | if len(s) == 3: 73 | course["courseNumber"] = s 74 | if len(s) == 6: 75 | course["courseNumber"] = s[:3] 76 | course["sectionNumber"] = s[-3:] 77 | else: 78 | if not found_desc or in_desc: 79 | found_desc = True 80 | in_desc = True 81 | if len(course["desc_search"]) == 0: 82 | course["desc_search"] = s 83 | else: 84 | course["desc_search"] += " " + s 85 | return course 86 | 87 | 88 | @app.route("/registrar/search", methods=["GET"]) 89 | def search(): 90 | search_query = request.args["q"] 91 | 92 | def get_data(): 93 | query_results = search_course(get_type_search(search_query)) 94 | if query_results is None: 95 | return {"error": "The search query could not be processed."} 96 | else: 97 | return query_results 98 | 99 | return cached_route("registrar_query:%s" % search_query, datetime.timedelta(days=1), get_data) 100 | 101 | 102 | @app.route("/registrar/search/instructor", methods=["GET"]) 103 | def search_instructor(): 104 | query = request.args["q"] 105 | 106 | def get_data(): 107 | results = reg.search({"instructor": query}) 108 | if results is None: 109 | return {"error": "The search query could not be processed."} 110 | else: 111 | return {"courses": list(results)} 112 | 113 | return cached_route( 114 | "registrar_query_instructor:%s" % query, datetime.timedelta(days=1), get_data 115 | ) 116 | -------------------------------------------------------------------------------- /server/studyspaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/server/studyspaces/__init__.py -------------------------------------------------------------------------------- /server/studyspaces/availability.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from flask import jsonify, request 4 | from penn.base import APIError 5 | 6 | from server import app, sqldb 7 | from server.base import cached_route 8 | from server.penndata import studyspaces, wharton 9 | from server.studyspaces.book import get_wharton_sessionid, save_wharton_sessionid 10 | from server.studyspaces.models import GSRRoomName 11 | 12 | 13 | @app.route("/studyspaces/availability/", methods=["GET"]) 14 | def parse_times_wrapper(building): 15 | """ 16 | Returns JSON containing all rooms for a given building. 17 | 18 | Usage: 19 | /studyspaces/availability/ gives all rooms for the next 24 hours 20 | /studyspaces/availability/?start=2018-25-01 gives all rooms in the start date 21 | /studyspaces/availability/?start=...&end=... gives all rooms between the two days 22 | """ 23 | if "date" in request.args: 24 | start = request.args.get("date") 25 | end = request.args.get("date") 26 | else: 27 | start = request.args.get("start") 28 | end = request.args.get("end") 29 | 30 | try: 31 | rooms = parse_times(building, start, end) 32 | except APIError as e: 33 | return jsonify({"error": str(e)}), 400 34 | return jsonify(rooms) 35 | 36 | 37 | def parse_times(lid, start=None, end=None): 38 | if lid == 1: 39 | sessionid = get_wharton_sessionid(public=True) 40 | rooms = wharton.get_wharton_gsrs(sessionid, date=start) 41 | rooms = wharton.switch_format(rooms) 42 | save_wharton_sessionid() 43 | else: 44 | rooms = studyspaces.get_rooms(lid, start, end) 45 | rooms["location_id"] = rooms["id"] 46 | rooms["rooms"] = [] 47 | for room_list in rooms["categories"]: 48 | for room in room_list["rooms"]: 49 | room["thumbnail"] = room["image"] 50 | del room["image"] 51 | room["room_id"] = room["id"] 52 | del room["id"] 53 | room["gid"] = room_list["cid"] 54 | room["lid"] = lid 55 | room["times"] = room["availability"] 56 | del room["availability"] 57 | for time in room["times"]: 58 | time["available"] = True 59 | time["start"] = time["from"] 60 | time["end"] = time["to"] 61 | del time["from"] 62 | del time["to"] 63 | rooms["rooms"].append(room) 64 | return rooms 65 | 66 | 67 | @app.route("/studyspaces/locations", methods=["GET"]) 68 | def display_id_pairs(): 69 | """ 70 | Returns JSON containing a list of buildings with their ids. 71 | """ 72 | 73 | def get_data(): 74 | return { 75 | "locations": studyspaces.get_buildings() 76 | + [{"lid": 1, "name": "Huntsman Hall", "service": "wharton"}] 77 | } 78 | 79 | return cached_route("studyspaces:locations", datetime.timedelta(days=1), get_data) 80 | 81 | 82 | def get_room_name(lid, rid): 83 | """ 84 | Returns the name of a given room ID 85 | """ 86 | rooms = parse_times(lid) 87 | for room in rooms["rooms"]: 88 | if room["room_id"] == rid: 89 | new_name = GSRRoomName(lid=lid, gid=room["gid"], rid=rid, name=room["name"]) 90 | sqldb.session.add(new_name) 91 | sqldb.session.commit() 92 | return room["name"] 93 | return None 94 | -------------------------------------------------------------------------------- /server/studyspaces/book.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dateutil.parser import parse 4 | from flask import g, jsonify, request 5 | 6 | from server import app, db, sqldb 7 | from server.auth import auth 8 | from server.models import User 9 | from server.penndata import studyspaces, wharton 10 | from server.studyspaces.models import StudySpacesBooking 11 | from server.studyspaces.reservations import get_reservations 12 | 13 | 14 | def get_wharton_sessionid(public=False): 15 | """ Try to get a GSR session id. """ 16 | sessionid = request.args.get("sessionid") 17 | cache_key = "studyspaces:gsr:sessionid" 18 | 19 | if sessionid: 20 | return sessionid 21 | 22 | if public: 23 | if db.exists(cache_key): 24 | return db.get(cache_key).decode("utf8") 25 | 26 | return os.environ.get("GSR_SESSIONID") 27 | 28 | return None 29 | 30 | 31 | def save_wharton_sessionid(): 32 | sessionid = request.args.get("sessionid") 33 | cache_key = "studyspaces:gsr:sessionid" 34 | 35 | if sessionid: 36 | db.set(cache_key, sessionid, ex=604800) 37 | 38 | 39 | @app.route("/studyspaces/book", methods=["POST"]) 40 | @auth(nullable=True) 41 | def book_room(): 42 | """ 43 | Books a room. 44 | """ 45 | try: 46 | room = int(request.form["room"]) 47 | except (KeyError, ValueError): 48 | return jsonify({"results": False, "error": "Please specify a correct room id!"}), 400 49 | 50 | try: 51 | start = parse(request.form["start"]) 52 | end = parse(request.form["end"]) 53 | except KeyError: 54 | return ( 55 | jsonify({"results": False, "error": "No start and end parameters passed to server!"}), 56 | 400, 57 | ) 58 | 59 | try: 60 | lid = int(request.form["lid"]) 61 | except (KeyError, ValueError): 62 | lid = None 63 | 64 | email = None 65 | 66 | if lid == 1: 67 | sessionid = request.form.get("sessionid") 68 | if not sessionid: 69 | return ( 70 | jsonify( 71 | { 72 | "results": False, 73 | "error": "You must pass a sessionid when booking a Wharton GSR!", 74 | } 75 | ), 76 | 400, 77 | ) 78 | resp = wharton.book_reservation(sessionid, room, start, end) 79 | resp["results"] = resp["success"] 80 | room_booked = resp["success"] 81 | del resp["success"] 82 | if room_booked: 83 | save_wharton_sessionid() 84 | booking_id = None 85 | 86 | # Look up the reservation to get the booking id 87 | reservations = get_reservations(None, sessionid, 0) 88 | for reservation in reservations: 89 | resStart = parse(reservation["fromDate"]) 90 | resEnd = parse(reservation["toDate"]) 91 | if start == resStart and end == resEnd: 92 | booking_id = reservation["booking_id"] 93 | break 94 | else: 95 | contact = {} 96 | for arg, field in [ 97 | ("fname", "firstname"), 98 | ("lname", "lastname"), 99 | ("email", "email"), 100 | ("nickname", "groupname"), 101 | ]: 102 | try: 103 | contact[arg] = request.form[field] 104 | except KeyError: 105 | return jsonify( 106 | {"results": False, "error": "'{}' is a required parameter!".format(field)} 107 | ) 108 | 109 | email = contact.get("email") 110 | contact["custom"] = {} 111 | contact["custom"]["q3699"] = get_affiliation(email) 112 | for arg, field in [ 113 | ("q2533", "phone"), 114 | ("q2555", "size"), 115 | ("q2537", "size"), 116 | ("q3699", "affiliation"), 117 | ]: 118 | try: 119 | contact["custom"][arg] = request.form[field] 120 | except KeyError: 121 | pass 122 | 123 | resp = studyspaces.book_room(room, start.isoformat(), end.isoformat(), **contact) 124 | room_booked = resp.get("results") 125 | booking_id = resp.get("booking_id") 126 | 127 | try: 128 | user = User.get_user() 129 | user_id = user.id 130 | if email and user.email != email: 131 | user.email = email 132 | sqldb.session.commit() 133 | else: 134 | email = user.email 135 | except ValueError: 136 | user_id = None 137 | 138 | account_id = g.account.id if g.account else None 139 | 140 | if room_booked: 141 | save_booking( 142 | lid=lid, 143 | rid=room, 144 | email=email, 145 | start=start.replace(tzinfo=None), 146 | end=end.replace(tzinfo=None), 147 | booking_id=booking_id, 148 | user=user_id, 149 | account=account_id, 150 | ) 151 | return jsonify(resp) 152 | 153 | 154 | def get_affiliation(email): 155 | if "wharton" in email: 156 | return "Wharton" 157 | elif "seas" in email: 158 | return "SEAS" 159 | elif "sas" in email: 160 | return "SAS" 161 | else: 162 | return "Other" 163 | 164 | 165 | def save_booking(**info): 166 | item = StudySpacesBooking(**info) 167 | 168 | sqldb.session.add(item) 169 | sqldb.session.commit() 170 | -------------------------------------------------------------------------------- /server/studyspaces/cancel.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | from penn.base import APIError 3 | 4 | from server import app, sqldb 5 | from server.models import User 6 | from server.penndata import studyspaces, wharton 7 | from server.studyspaces.book import save_booking, save_wharton_sessionid 8 | from server.studyspaces.models import StudySpacesBooking 9 | 10 | 11 | @app.route("/studyspaces/cancel", methods=["POST"]) 12 | def cancel_room(): 13 | """ 14 | Cancels a booked room. 15 | """ 16 | try: 17 | user = User.get_user() 18 | except ValueError as err: 19 | print(err) 20 | return jsonify({"error": str(err)}) 21 | 22 | booking_id = request.form.get("booking_id") 23 | if not booking_id: 24 | return jsonify({"error": "No booking id sent to server!"}) 25 | if "," in booking_id: 26 | return jsonify({"error": "Only one booking may be cancelled at a time."}) 27 | 28 | booking = StudySpacesBooking.query.filter_by(booking_id=booking_id).first() 29 | if booking: 30 | if (booking.user is not None) and (booking.user != user.id): 31 | return ( 32 | jsonify({"error": "Unauthorized: This reservation was booked by someone else."}), 33 | 400, 34 | ) 35 | if booking.is_cancelled: 36 | return jsonify({"error": "This reservation has already been cancelled."}), 400 37 | 38 | if booking_id.isdigit(): 39 | sessionid = request.form.get("sessionid") 40 | if not sessionid: 41 | return jsonify({"error": "No session id sent to server."}), 400 42 | try: 43 | wharton.delete_booking(sessionid, booking_id) 44 | save_wharton_sessionid() 45 | if booking: 46 | booking.is_cancelled = True 47 | sqldb.session.commit() 48 | else: 49 | save_booking( 50 | lid=1, email=user.email, booking_id=booking_id, is_cancelled=True, user=user.id 51 | ) 52 | return jsonify({"result": [{"booking_id": booking_id, "cancelled": True}]}) 53 | except APIError as e: 54 | return jsonify({"error": str(e)}), 400 55 | else: 56 | resp = studyspaces.cancel_room(booking_id) 57 | if "error" not in resp: 58 | if booking: 59 | booking.is_cancelled = True 60 | sqldb.session.commit() 61 | else: 62 | save_booking( 63 | email=user.email, booking_id=booking_id, is_cancelled=True, user=user.id 64 | ) 65 | return jsonify({"result": resp}) 66 | -------------------------------------------------------------------------------- /server/studyspaces/deprecated.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | from penn.base import APIError 3 | 4 | from server import app 5 | from server.penndata import wharton 6 | from server.studyspaces.book import get_wharton_sessionid, save_wharton_sessionid 7 | 8 | 9 | @app.route("/studyspaces/gsr", methods=["GET"]) 10 | def get_wharton_gsrs_temp_route(): 11 | """ Temporary endpoint to allow non-authenticated users to access the list of GSRs. """ 12 | date = request.args.get("date") 13 | try: 14 | data = wharton.get_wharton_gsrs(get_wharton_sessionid(public=True), date) 15 | save_wharton_sessionid() 16 | return jsonify(data) 17 | except APIError as error: 18 | return jsonify({"error": str(error)}), 400 19 | 20 | 21 | @app.route("/studyspaces/gsr/reservations", methods=["GET"]) 22 | def get_wharton_gsr_reservations(): 23 | """ 24 | Returns JSON containing a list of Wharton GSR reservations. 25 | """ 26 | 27 | sessionid = get_wharton_sessionid() 28 | 29 | if not sessionid: 30 | return jsonify({"error": "No Session ID provided."}) 31 | 32 | try: 33 | reservations = wharton.get_reservations(sessionid) 34 | save_wharton_sessionid() 35 | return jsonify({"reservations": reservations}) 36 | except APIError as e: 37 | return jsonify({"error": str(e)}), 400 38 | 39 | 40 | @app.route("/studyspaces/gsr/delete", methods=["POST"]) 41 | def delete_wharton_gsr_reservation(): 42 | """ 43 | Deletes a Wharton GSR reservation 44 | """ 45 | booking = request.form.get("booking") 46 | sessionid = request.form.get("sessionid") 47 | if not booking: 48 | return jsonify({"error": "No booking sent to server."}) 49 | if not sessionid: 50 | return jsonify({"error": "No session id sent to server."}) 51 | 52 | try: 53 | result = wharton.delete_booking(sessionid, booking) 54 | save_wharton_sessionid() 55 | return jsonify({"result": result}) 56 | except APIError as e: 57 | return jsonify({"error": str(e)}), 400 58 | -------------------------------------------------------------------------------- /server/studyspaces/groups.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from flask import request 3 | from penn.base import APIError 4 | 5 | 6 | def get_invites_for_account(account, timeout=5): 7 | """ 8 | Gets a users invites for any gsr groups. 9 | Return invites along with details about the gsr group (color, name, group id) 10 | """ 11 | 12 | x_authorization = request.headers.get("X-Authorization") 13 | authorization = request.headers.get("Authorization") 14 | 15 | if not authorization or not x_authorization: 16 | return None 17 | 18 | headers = {"Authorization": authorization if authorization else x_authorization} 19 | invite_url = "https://studentlife.pennlabs.org/users/me/invites" 20 | try: 21 | r = requests.get(url=invite_url, headers=headers, timeout=timeout) 22 | except requests.exceptions.HTTPError as error: 23 | raise APIError("Server Error: {}".format(error)) 24 | except requests.exceptions.ConnectTimeout: 25 | raise APIError("Timeout Error") 26 | 27 | json = r.json() 28 | return json 29 | -------------------------------------------------------------------------------- /server/studyspaces/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from pytz import timezone 4 | 5 | from server import sqldb 6 | 7 | 8 | def get_est_date(): 9 | est = timezone("EST") 10 | return datetime.now(est).replace(tzinfo=None) 11 | 12 | 13 | class StudySpacesBooking(sqldb.Model): 14 | id = sqldb.Column(sqldb.Integer, primary_key=True) 15 | account = sqldb.Column(sqldb.VARCHAR(255), sqldb.ForeignKey("account.id"), nullable=True) 16 | user = sqldb.Column(sqldb.Integer, sqldb.ForeignKey("user.id"), nullable=True) 17 | booking_id = sqldb.Column(sqldb.Text, nullable=True) 18 | date = sqldb.Column(sqldb.DateTime, default=get_est_date) 19 | lid = sqldb.Column(sqldb.Integer, nullable=True) 20 | rid = sqldb.Column(sqldb.Integer, nullable=True) 21 | email = sqldb.Column(sqldb.Text, nullable=True) 22 | start = sqldb.Column(sqldb.DateTime, nullable=True) 23 | end = sqldb.Column(sqldb.DateTime, nullable=True) 24 | is_cancelled = sqldb.Column(sqldb.Boolean, default=False) 25 | reminder_sent = sqldb.Column(sqldb.Boolean, default=False) 26 | 27 | 28 | class GSRRoomName(sqldb.Model): 29 | lid = sqldb.Column(sqldb.Integer, primary_key=True) 30 | gid = sqldb.Column(sqldb.Integer, primary_key=True) 31 | rid = sqldb.Column(sqldb.Integer, primary_key=True) 32 | name = sqldb.Column(sqldb.VARCHAR(255)) 33 | image_url = sqldb.Column(sqldb.VARCHAR(255), nullable=True) 34 | -------------------------------------------------------------------------------- /server/studyspaces/notifications.py: -------------------------------------------------------------------------------- 1 | import math 2 | from datetime import datetime, timedelta 3 | 4 | from apns2.payload import Payload 5 | from flask import jsonify 6 | from pytz import timezone 7 | from sqlalchemy import and_, not_ 8 | 9 | from server import app, sqldb 10 | from server.auth import internal_auth 11 | from server.notifications import Notification, NotificationSetting, NotificationToken, send_push_notification_batch 12 | from server.penndata import wharton 13 | from server.studyspaces.availability import get_room_name 14 | from server.studyspaces.models import GSRRoomName, StudySpacesBooking 15 | 16 | 17 | @app.route("/studyspaces/reminders/send", methods=["POST"]) 18 | @internal_auth 19 | def request_send_reminders(): 20 | run_query() 21 | return jsonify({"result": "success"}) 22 | 23 | 24 | def send_reminders(): 25 | with app.app_context(): 26 | run_query() 27 | 28 | 29 | def run_query(): 30 | # Query logic 31 | # Get bookings that meet the following criteria: 32 | # 1) Start within the next 10 minutes 33 | # 2) Booked more than 30 minutes before the start time 34 | # 3) Have not been cancelled 35 | # 4) Have not been sent a reminder yet 36 | # 5) Have an associated account with an iOS push notification token 37 | 38 | est = timezone("EST") 39 | now = datetime.now(est).replace(tzinfo=None) 40 | check_start_date = now + timedelta(minutes=10) 41 | get_gsr = ( 42 | StudySpacesBooking.query.filter(StudySpacesBooking.start <= check_start_date) 43 | .filter(StudySpacesBooking.start > now) 44 | .filter(StudySpacesBooking.date < StudySpacesBooking.start - timedelta(minutes=30)) 45 | .filter(not_(StudySpacesBooking.is_cancelled)) 46 | .filter(not_(StudySpacesBooking.reminder_sent)) 47 | .filter(StudySpacesBooking.account is not None) 48 | .subquery() 49 | ) 50 | 51 | get_tokens = NotificationToken.query.filter(NotificationToken.ios_token is not None).subquery() 52 | 53 | lacks_permission = ( 54 | sqldb.session.query(NotificationSetting.account) 55 | .filter(NotificationSetting.setting == "upcomingStudyRoomReminder") 56 | .filter(NotificationSetting.enabled == 0) 57 | .subquery() 58 | ) 59 | 60 | join_qry = ( 61 | sqldb.session.query( 62 | get_gsr.c.id, 63 | get_gsr.c.lid, 64 | get_gsr.c.rid, 65 | get_gsr.c.booking_id, 66 | GSRRoomName.name, 67 | GSRRoomName.image_url, 68 | get_gsr.c.start, 69 | get_gsr.c.end, 70 | get_tokens.c.ios_token, 71 | get_tokens.c.dev, 72 | ) 73 | .select_from(get_gsr) 74 | .join(get_tokens, get_gsr.c.account == get_tokens.c.account) 75 | .join( 76 | GSRRoomName, 77 | and_(get_gsr.c.lid == GSRRoomName.lid, get_gsr.c.rid == GSRRoomName.rid), 78 | isouter=True, 79 | ) 80 | .join(lacks_permission, lacks_permission.c.account == get_gsr.c.account, isouter=True) 81 | .filter(lacks_permission.c.account.is_(None)) 82 | .all() 83 | ) 84 | 85 | reservation_ids = [] 86 | notifications = [] 87 | dev_notifications = [] 88 | for res_id, lid, rid, bid, name, image_url, start, end, token, dev in join_qry: 89 | minutes_to_start = int(math.ceil((start - now).seconds / 60)) 90 | title = "Upcoming GSR" 91 | if not name: 92 | # Fetch name from API if it does not already exist in the DB 93 | name = get_room_name(lid, rid) 94 | if name: 95 | body = "You have reserved {} starting in {} minutes".format(name, minutes_to_start) 96 | else: 97 | body = "You have a reservation starting in {} minutes".format(minutes_to_start) 98 | alert = {"title": title, "body": body} 99 | timezone_hours = wharton.get_dst_gmt_timezone() 100 | custom = { 101 | "reservation": { 102 | "room_name": name, 103 | "image_url": image_url, 104 | "start": "{}-{}".format( 105 | datetime.strftime(start, "%Y-%m-%dT%H:%M:%S"), timezone_hours 106 | ), 107 | "end": "{}-{}".format(datetime.strftime(end, "%Y-%m-%dT%H:%M:%S"), timezone_hours), 108 | "booking_id": bid, 109 | } 110 | } 111 | payload = Payload( 112 | alert=alert, 113 | sound="default", 114 | badge=0, 115 | category="UPCOMING_GSR", 116 | mutable_content=True, 117 | custom=custom, 118 | ) 119 | notification = Notification(token=token, payload=payload) 120 | if dev: 121 | dev_notifications.append(notification) 122 | else: 123 | notifications.append(notification) 124 | reservation_ids.append(res_id) 125 | 126 | if notifications: 127 | send_push_notification_batch(notifications, False) 128 | if dev_notifications: 129 | send_push_notification_batch(dev_notifications, True) 130 | 131 | # Flag each booking as SENT so that a duplicate notification is not accidentally sent 132 | bookings = StudySpacesBooking.query.filter( 133 | StudySpacesBooking.id.in_(tuple(reservation_ids)) 134 | ).all() 135 | for booking in bookings: 136 | booking.reminder_sent = True 137 | sqldb.session.commit() 138 | -------------------------------------------------------------------------------- /server/studyspaces/reservations.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | 3 | from flask import jsonify, request 4 | from penn.base import APIError 5 | from pytz import timezone 6 | 7 | from server import app 8 | from server.penndata import studyspaces, wharton 9 | from server.studyspaces.models import StudySpacesBooking 10 | 11 | 12 | @app.route("/studyspaces/reservations", methods=["GET"]) 13 | def get_reservations_endpoint(): 14 | """ 15 | Gets a users reservations. 16 | """ 17 | 18 | email = request.args.get("email") 19 | sessionid = request.args.get("sessionid") 20 | if not email and not sessionid: 21 | return jsonify({"error": "A session id or email must be sent to server."}), 400 22 | 23 | libcal_search_span = request.args.get("libcal_search_span") 24 | if libcal_search_span: 25 | try: 26 | libcal_search_span = int(libcal_search_span) 27 | except ValueError: 28 | return jsonify({"error": "Search span must be an integer."}), 400 29 | else: 30 | libcal_search_span = 3 31 | 32 | try: 33 | reservations = get_reservations(email, sessionid, libcal_search_span) 34 | return jsonify({"reservations": reservations}) 35 | except APIError as e: 36 | return jsonify({"error": str(e)}), 400 37 | 38 | 39 | def get_reservations(email, sessionid, libcal_search_span, timeout=5): 40 | reservations = [] 41 | if sessionid: 42 | try: 43 | gsr_reservations = wharton.get_reservations(sessionid, timeout) 44 | timezone_hours = wharton.get_dst_gmt_timezone() 45 | 46 | for res in gsr_reservations: 47 | res["service"] = "wharton" 48 | res["booking_id"] = str(res["booking_id"]) 49 | res["name"] = res["location"] 50 | res["gid"] = 1 51 | res["lid"] = 1 52 | res["info"] = None 53 | del res["location"] 54 | 55 | date = datetime.strptime(res["date"], "%b %d, %Y") 56 | date_str = datetime.strftime(date, "%Y-%m-%d") 57 | 58 | if res["startTime"] == "midnight": 59 | res["fromDate"] = date_str + "T00:00:00-{}".format(timezone_hours) 60 | elif res["startTime"] == "noon": 61 | res["fromDate"] = date_str + "T12:00:00-{}".format(timezone_hours) 62 | else: 63 | start_str = res["startTime"].replace(".", "").upper() 64 | try: 65 | start_time = datetime.strptime(start_str, "%I:%M %p") 66 | except ValueError: 67 | start_time = datetime.strptime(start_str, "%I %p") 68 | start_str = datetime.strftime(start_time, "%H:%M:%S") 69 | res["fromDate"] = "{}T{}-{}".format(date_str, start_str, timezone_hours) 70 | 71 | if res["endTime"] == "midnight": 72 | date += timedelta(days=1) 73 | date_str = datetime.strftime(date, "%Y-%m-%d") 74 | res["toDate"] = date_str + "T00:00:00-{}".format(timezone_hours) 75 | elif res["endTime"] == "noon": 76 | res["toDate"] = date_str + "T12:00:00-{}".format(timezone_hours) 77 | else: 78 | end_str = res["endTime"].replace(".", "").upper() 79 | try: 80 | end_time = datetime.strptime(end_str, "%I:%M %p") 81 | except ValueError: 82 | end_time = datetime.strptime(end_str, "%I %p") 83 | end_str = datetime.strftime(end_time, "%H:%M:%S") 84 | res["toDate"] = "{}T{}-{}".format(date_str, end_str, timezone_hours) 85 | 86 | del res["date"] 87 | del res["startTime"] 88 | del res["endTime"] 89 | 90 | reservations.extend(gsr_reservations) 91 | 92 | except APIError: 93 | pass 94 | 95 | if email: 96 | confirmed_reservations = [] 97 | try: 98 | 99 | def is_not_cancelled_in_db(booking_id): 100 | booking = StudySpacesBooking.query.filter_by(booking_id=booking_id).first() 101 | return not (booking and booking.is_cancelled) 102 | 103 | est = timezone("US/Eastern") 104 | now = datetime.now(est).replace(tzinfo=None) 105 | dateFormat = "%Y-%m-%d" 106 | i = 0 107 | while len(confirmed_reservations) == 0 and i < libcal_search_span: 108 | date = now + timedelta(days=i) 109 | dateStr = datetime.strftime(date, dateFormat) 110 | libcal_reservations = studyspaces.get_reservations(email, dateStr, timeout) 111 | confirmed_reservations = [ 112 | res 113 | for res in libcal_reservations 114 | if ( 115 | type(res) == dict 116 | and res["status"] == "Confirmed" 117 | and datetime.strptime(res["toDate"][:-6], "%Y-%m-%dT%H:%M:%S") >= now 118 | ) 119 | ] 120 | confirmed_reservations = [ 121 | res for res in confirmed_reservations if is_not_cancelled_in_db(res["bookId"]) 122 | ] 123 | i += 1 124 | 125 | except APIError: 126 | pass 127 | 128 | # Fetch reservations in database that are not being returned by API 129 | db_bookings = StudySpacesBooking.query.filter_by(email=email) 130 | db_booking_ids = [ 131 | str(x.booking_id) 132 | for x in db_bookings 133 | if x.end and x.end > now and not str(x.booking_id).isdigit() and not x.is_cancelled 134 | ] 135 | reservation_ids = [x["bookId"] for x in confirmed_reservations] 136 | missing_booking_ids = list(set(db_booking_ids) - set(reservation_ids)) 137 | if missing_booking_ids: 138 | missing_bookings_str = ",".join(missing_booking_ids) 139 | missing_reservations = studyspaces.get_reservations_for_booking_ids( 140 | missing_bookings_str 141 | ) 142 | confirmed_missing_reservations = [ 143 | res for res in missing_reservations if res["status"] == "Confirmed" 144 | ] 145 | confirmed_reservations.extend(confirmed_missing_reservations) 146 | 147 | for res in confirmed_reservations: 148 | res["service"] = "libcal" 149 | res["booking_id"] = res["bookId"] 150 | res["room_id"] = res["eid"] 151 | res["gid"] = res["cid"] 152 | del res["bookId"] 153 | del res["eid"] 154 | del res["cid"] 155 | del res["status"] 156 | del res["email"] 157 | del res["firstName"] 158 | del res["lastName"] 159 | 160 | room_ids = ",".join(list(set([str(x["room_id"]) for x in confirmed_reservations]))) 161 | if room_ids: 162 | rooms = studyspaces.get_room_info(room_ids) 163 | for room in rooms: 164 | room["thumbnail"] = room["image"] 165 | del room["image"] 166 | del room["formid"] 167 | 168 | for res in confirmed_reservations: 169 | room = [x for x in rooms if x["id"] == res["room_id"]][0] 170 | res["name"] = room["name"] 171 | res["info"] = room 172 | del res["room_id"] 173 | reservations.extend(confirmed_reservations) 174 | 175 | return reservations 176 | -------------------------------------------------------------------------------- /server/studyspaces/search.py: -------------------------------------------------------------------------------- 1 | from flask import jsonify, request 2 | from sqlalchemy import and_ 3 | 4 | from server import app 5 | from server.models import Account 6 | 7 | 8 | @app.route("/studyspaces/user/search", methods=["GET"]) 9 | def get_nam(): 10 | """ 11 | Gets users that match search query 12 | """ 13 | 14 | query = request.args.get("query") 15 | if not query: 16 | return jsonify({"error": "Query argument not found."}), 400 17 | 18 | if len(query) <= 1: 19 | return jsonify({"error": "Query is too short. Minimum length is two characters."}), 400 20 | 21 | first = None 22 | last = None 23 | if " " in query: 24 | split = query.split(" ") 25 | if len(split) >= 2: 26 | first = split[0] 27 | last = split[1] 28 | 29 | users = [] 30 | if first and last: 31 | and_matches = Account.query.filter( 32 | and_(Account.first.like("{}%".format(first)), Account.last.like("{}%".format(last))) 33 | ).all() 34 | users.extend(and_matches) 35 | 36 | if not users: 37 | first_letter_matches_and_last_name = Account.query.filter( 38 | and_( 39 | Account.first.like("{}%".format(first[:1])), 40 | Account.last.like("{}%".format(last)), 41 | ) 42 | ).all() 43 | users.extend(first_letter_matches_and_last_name) 44 | 45 | last_name_matches = Account.query.filter(Account.last.like("{}%".format(last))).all() 46 | last_name_matches = sorted(last_name_matches, key=lambda x: x.first) 47 | users.extend(last_name_matches) 48 | else: 49 | starting_query = "{}%".format(query) 50 | general_query = "%{}%".format(query) 51 | 52 | starting_exact_matches = Account.query.filter(Account.first.like(query)).all() 53 | starting_exact_matches = sorted(starting_exact_matches, key=lambda x: x.last) 54 | 55 | starting_first_name_matches = Account.query.filter(Account.first.like(starting_query)).all() 56 | starting_first_name_matches = sorted(starting_first_name_matches, key=lambda x: x.last) 57 | 58 | starting_last_name_matches = Account.query.filter(Account.last.like(starting_query)).all() 59 | general_first_name_matches = Account.query.filter(Account.first.like(general_query)).all() 60 | general_last_name_matches = Account.query.filter(Account.last.like(general_query)).all() 61 | 62 | users.extend(starting_exact_matches) 63 | users.extend(starting_first_name_matches) 64 | users.extend(starting_last_name_matches) 65 | users.extend(general_first_name_matches) 66 | users.extend(general_last_name_matches) 67 | 68 | if not users: 69 | # If no users found by search first or last name, search the pennkey 70 | starting_pennkey_matches = Account.query.filter( 71 | Account.pennkey.like("{}%".format(query)) 72 | ).all() 73 | general_pennkey_matches = Account.query.filter( 74 | Account.pennkey.like("%{}%".format(query)) 75 | ).all() 76 | 77 | users.extend(starting_pennkey_matches) 78 | users.extend(general_pennkey_matches) 79 | 80 | seen_pennkeys = set() 81 | filtered_users = [] 82 | for user in users: 83 | if user.pennkey not in seen_pennkeys: 84 | filtered_users.append(user) 85 | seen_pennkeys.add(user.pennkey) 86 | 87 | filtered_users = [ 88 | {"first": x.first, "last": x.last, "pennkey": x.pennkey, "email": x.email, } 89 | for x in filtered_users 90 | ] 91 | 92 | return jsonify({"results": filtered_users}) 93 | -------------------------------------------------------------------------------- /server/transit.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import datetime 3 | from functools import reduce 4 | 5 | import requests 6 | from flask import jsonify, request 7 | 8 | from server import app 9 | from server.base import cache_get, cached_route 10 | from server.penndata import transit 11 | from server.utils import haversine 12 | 13 | 14 | def get_stop_info(): 15 | return {"result_data": populate_stop_info(transit.stopinventory())} 16 | 17 | 18 | @app.route("/transit/stops", methods=["GET"]) 19 | def transit_stops(): 20 | now = datetime.datetime.today() 21 | endDay = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(days=1) 22 | 23 | return cached_route("transit:stops", endDay - now, get_stop_info) 24 | 25 | 26 | @app.route("/transit/routes", methods=["GET"]) 27 | def transit_routes(): 28 | """ Returns routes in the format: 29 | { 30 | result_data: [ 31 | { 32 | route_name: 'Route 1' 33 | stops: [] 34 | } 35 | ] 36 | } 37 | """ 38 | 39 | def get_data(): 40 | return {"result_data": routes_with_directions(populate_route_info())} 41 | 42 | # cache lasts a month, since directions data is time intensive, and routes don't really change. 43 | data = cached_route("transit:routes", datetime.timedelta(days=30), get_data) 44 | return data 45 | 46 | 47 | @app.route("/transit/routing", methods=["GET"]) 48 | def fastest_route(): 49 | """ Returns the path which routes the user to their destination with the 50 | shortest total walking distance. 51 | """ 52 | now = datetime.datetime.today() 53 | endDay = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(days=1) 54 | 55 | def get_data(): 56 | return {"result_data": routes_with_directions(populate_route_info())} 57 | 58 | # Retrieve routes, generating using get_data if necessary 59 | route_data = cache_get("transit:routes", endDay - now, get_data)["result_data"] 60 | 61 | latFrom, lonFrom = float(request.args["latFrom"]), float(request.args["lonFrom"]) 62 | latTo, lonTo = float(request.args["latTo"]), float(request.args["lonTo"]) 63 | 64 | # Total walking distance 65 | bird_dist = haversine(lonFrom, latFrom, lonTo, latTo) 66 | possible_routes = [] 67 | for route in route_data: 68 | route_name = route["route_name"] 69 | stops = route["stops"] 70 | minFrom = -1 71 | minTo = -1 72 | fromStop = None 73 | toStop = None 74 | # Calculate the closest stops to the start and destination 75 | for stop in stops: 76 | distFrom = haversine( 77 | lonFrom, latFrom, float(stop["Longitude"]), float(stop["Latitude"]) 78 | ) 79 | distTo = haversine(lonTo, latTo, float(stop["Longitude"]), float(stop["Latitude"])) 80 | if minFrom == -1: 81 | fromStop = stop 82 | minFrom = distFrom 83 | elif distFrom < minFrom: 84 | fromStop = stop 85 | minFrom = distFrom 86 | 87 | if minTo == -1: 88 | minTo = distTo 89 | toStop = stop 90 | elif distTo < minTo: 91 | minTo = distTo 92 | toStop = stop 93 | 94 | # If the stops found are not in the right order, this isn't a route we want 95 | if fromStop and toStop and fromStop["order"] < toStop["order"]: 96 | 97 | def add_path_points(loc, stop): 98 | if stop["order"] == fromStop["order"]: 99 | return loc + [stop] 100 | else: 101 | path_to = stop["path_to"] 102 | del stop["path_to"] 103 | return loc + path_to + [stop] 104 | 105 | # Reduce the path_to field of each stop into a single path array for 106 | # map drawing 107 | path = reduce(add_path_points, stops[fromStop["order"]:toStop["order"] + 1], []) 108 | possible_routes.append( 109 | { 110 | "route_name": route_name, 111 | "walkingDistanceBefore": minFrom, 112 | "path": path, 113 | "walkingDistanceAfter": minTo, 114 | } 115 | ) 116 | 117 | if len(possible_routes) == 0: 118 | return jsonify( 119 | {"Error": "We couldn't find a helpful Penn Transit route for those locations."} 120 | ) 121 | # Choose the route with the minimum total walking distance 122 | final_route = min( 123 | possible_routes, key=lambda x: x["walkingDistanceBefore"] + x["walkingDistanceAfter"] 124 | ) 125 | 126 | if final_route["walkingDistanceBefore"] + final_route["walkingDistanceAfter"] > bird_dist: 127 | return jsonify( 128 | {"Error": "We couldn't find a helpful Penn Transit route for those locations."} 129 | ) 130 | 131 | return jsonify({"result_data": final_route}) 132 | 133 | 134 | pennride_id = {"Campus Loop": 291, "PennBUS East": 229, "PennBUS West": 230} 135 | 136 | 137 | def routes_with_directions(route_data): 138 | """Takes route data in the format 139 | [ 140 | { 141 | route_name: 'Route 1' 142 | stops: [] 143 | }, 144 | { 145 | route_name: 'PennBUS West' 146 | stops: [ 147 | { 148 | BusStopName: 'The Quad, 3700 Spruce St.', 149 | Latitude: 39.9, 150 | Longitude: -75.2, 151 | BusStopId: 29207, 152 | order: 0, 153 | path_to: [ 154 | { 155 | Latitude: 39.95, 156 | Longitude: -75.19 157 | } 158 | ] 159 | } 160 | ] 161 | } 162 | ] 163 | and populates each stop['path_to'] with map waypoints between it and the previous 164 | stop. These are used to give full, correct paths when routing. 165 | """ 166 | 167 | def is_stop(waypoint, stop, epsilon=0.0002): 168 | """Return whether waypoint is actually a stop based on a margin of error""" 169 | diff_latitude = abs(waypoint["Latitude"] - stop["Latitude"]) 170 | diff_longitude = abs(waypoint["Longitude"] - stop["Longitude"]) 171 | return diff_latitude + diff_longitude > epsilon 172 | 173 | for route in route_data: 174 | url = "http://www.pennrides.com/Route/%d/Waypoints/" % pennride_id[route["route_name"]] 175 | r = requests.get(url) 176 | all_waypoints = r.json()[0] 177 | i = 0 178 | for stop in route["stops"]: 179 | stop["path_to"] = [] 180 | 181 | while is_stop(all_waypoints[i], stop): 182 | stop["path_to"].append(all_waypoints[i]) 183 | i += 1 184 | if i >= len(all_waypoints): 185 | raise ValueError("pennrides and ISC Data do not match") 186 | i += 1 187 | return route_data 188 | 189 | 190 | def populate_stop_info(stops): 191 | """ 192 | Uses transit configuration data to populate route information for each stop. 193 | """ 194 | try: 195 | stop_dict = { 196 | stop["BusStopName"]: stop for stop in stops["result_data"] if "BusStopName" in stop 197 | } 198 | config = transit.configuration() 199 | for route in config["result_data"]["ConfigurationData"]["Route"]: 200 | for d in route["Direction"]: 201 | for stop in d["Stop"]: 202 | if stop["title"] in stop_dict: 203 | if "routes" not in stop_dict[stop["title"]]: 204 | stop_dict[stop["title"]]["routes"] = dict() 205 | stop_dict[stop["title"]]["routes"][route["key"]] = int(stop["stopOrder"]) 206 | return list(stop_dict.values()) 207 | except KeyError: 208 | return {"error": "JSON error in building stops"} 209 | 210 | 211 | def populate_route_info(): 212 | """ 213 | This function retrieves the list of stops an return a map from route names 214 | to corresponding arrays of stops. It also filters out all routes 215 | other than Campus Loop, PennBUS East, and PennBUS West. Finally, it also 216 | removes the last stop on the Campus Loop, which appears to be incorrect. 217 | """ 218 | # retrieve from cache, or generate and store in cache 219 | stop_info = cache_get("transit:stops", datetime.timedelta(days=1), get_stop_info) 220 | 221 | routes = dict() 222 | 223 | for stop in stop_info["result_data"]: 224 | if "routes" in stop: 225 | items = stop["routes"].items() 226 | del stop["routes"] 227 | for route_name, val in items: 228 | to_insert = copy.deepcopy(stop) 229 | to_insert["order"] = val 230 | 231 | if route_name in routes: 232 | routes[route_name].append(to_insert) 233 | else: 234 | routes[route_name] = [to_insert] 235 | for route in routes: 236 | routes[route] = sorted(routes[route], key=lambda stop: stop["order"]) 237 | 238 | # Filter out bad routes 239 | good_routes = ["PennBUS East", "PennBUS West", "Campus Loop"] 240 | 241 | routes = [{"route_name": key, "stops": routes[key]} for key in routes if key in good_routes] 242 | 243 | return routes 244 | -------------------------------------------------------------------------------- /server/utils.py: -------------------------------------------------------------------------------- 1 | from math import asin, cos, radians, sin, sqrt 2 | 3 | 4 | def haversine(lon1, lat1, lon2, lat2): 5 | """ 6 | Calculate the great circle distance between two points 7 | on the earth (specified in decimal degrees) 8 | """ 9 | # convert decimal degrees to radians 10 | lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) 11 | 12 | # haversine formula 13 | dlon = lon2 - lon1 14 | dlat = lat2 - lat1 15 | a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 16 | c = 2 * asin(sqrt(a)) 17 | r = 3956 # Radius of earth in miles. 18 | return c * r 19 | -------------------------------------------------------------------------------- /server/weather.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | 4 | import requests 5 | 6 | from server import app 7 | from server.base import cached_route 8 | 9 | 10 | @app.route("/weather", methods=["GET"]) 11 | def retrieve_weather_data(): 12 | """Retrieves the current weather from the Open Weather Map API. 13 | Stores data in a cache whenever data is retrieved; cache is updated 14 | if it has not been updated within 10 minutes. 15 | """ 16 | OWM_API_KEY = os.getenv("OWM_API_KEY") 17 | 18 | def get_data(): 19 | url = ( 20 | "http://api.openweathermap.org/data/2.5/weather?q=Philadelphia&units=imperial&APPID=%s" 21 | % OWM_API_KEY 22 | ) 23 | json = requests.get(url).json() 24 | return {"weather_data": json} 25 | 26 | td = datetime.timedelta(seconds=600) 27 | 28 | return cached_route("weather", td, get_data) 29 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = W503 3 | max-line-length = 120 4 | exclude = .venv, venv 5 | inline-quotes = double 6 | 7 | [isort] 8 | balanced_wrapping = True 9 | default_section = THIRDPARTY 10 | known_first_party = server 11 | line_length = 120 12 | lines_after_imports = 2 13 | multi_line_output = 0 14 | 15 | [coverage:run] 16 | omit = */tests/*, */.venv/*, */virtualenvs/*, runserver.py 17 | source = . 18 | 19 | [junit-xml] 20 | always-on = true 21 | path = test-results/nose2-junit.xml 22 | test_fullname = true 23 | 24 | [uwsgi] 25 | http-socket = :80 26 | chdir = /app/ 27 | module = wsgi:application 28 | master = true 29 | processes = 5 30 | threads = 2 31 | enable-threads = true 32 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pennlabs/labs-api-server/4f4c3299594fb93e0c7894965db2a297a44ab35c/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_dining.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import unittest 4 | 5 | import server 6 | from server.models import Account, DiningBalance, sqldb 7 | 8 | 9 | class DiningApiTests(unittest.TestCase): 10 | def setUp(self): 11 | server.app.config["TESTING"] = True 12 | 13 | def testDiningVenues(self): 14 | with server.app.test_request_context(): 15 | # Simple test. Did the request go through? 16 | venue_data = server.dining.hours_menus.retrieve_venues() 17 | venue_dict = json.loads(venue_data.data.decode("utf8")) 18 | venues = venue_dict["document"]["venue"] 19 | self.assertTrue(len(venues[0]["venueType"]) > 0) 20 | 21 | def testDiningV2Venues(self): 22 | with server.app.test_request_context(): 23 | venue_res = server.dining.hours_menus.retrieve_venues_v2() 24 | venue_dict = json.loads(venue_res.data.decode("utf8")) 25 | self.assertEquals("1920 Commons", venue_dict["document"]["venue"][0]["name"]) 26 | 27 | def testDiningV2Menu(self): 28 | with server.app.test_request_context(): 29 | menu_res = server.dining.hours_menus.retrieve_menu_v2("593", "2016-02-08") 30 | menu_dict = json.loads(menu_res.data.decode("utf8")) 31 | self.assertTrue(len(menu_dict["days"][0]["cafes"]["593"]["dayparts"]) > 0) 32 | 33 | def testDiningV2Hours(self): 34 | with server.app.test_request_context(): 35 | hours_res = server.dining.hours_menus.retrieve_hours("593") 36 | hours_dict = json.loads(hours_res.data.decode("utf8")) 37 | self.assertEquals("1920 Commons", hours_dict["cafes"]["593"]["name"]) 38 | 39 | def testDiningV2Item(self): 40 | with server.app.test_request_context(): 41 | item_res = server.dining.hours_menus.retrieve_item_v2("3899220") 42 | item_dict = json.loads(item_res.data.decode("utf8")) 43 | self.assertEquals( 44 | "tomato tzatziki sauce and pita", item_dict["items"]["3899220"]["label"] 45 | ) 46 | 47 | def testDiningWeeklyMenu(self): 48 | with server.app.test_request_context(): 49 | menu_res = server.dining.hours_menus.retrieve_weekly_menu("593") 50 | menu_dict = json.loads(menu_res.data.decode("utf8")) 51 | self.assertTrue("1920 Commons" in menu_dict["Document"]["location"]) 52 | 53 | def testDiningDailyMenu(self): 54 | with server.app.test_request_context(): 55 | menu_res = server.dining.hours_menus.retrieve_daily_menu("593") 56 | menu_dict = json.loads(menu_res.data.decode("utf8")) 57 | self.assertEquals("1920 Commons", menu_dict["Document"]["location"]) 58 | 59 | @classmethod 60 | def setUpClass(self): 61 | with server.app.test_request_context(): 62 | dollars = 200 63 | swipes = 20 64 | date = datetime.datetime.strptime("2018-09-01", "%Y-%m-%d") 65 | account = Account( 66 | id="12345", 67 | first="Carin", 68 | last="Gan", 69 | pennkey="12345", 70 | email="caringan@penn.edu", 71 | image_url="test", 72 | created_at=datetime.datetime.strptime("2018-08-01", "%Y-%m-%d"), 73 | ) 74 | sqldb.session.add(account) 75 | sqldb.session.commit() 76 | for x in range(0, 11): 77 | date = date + datetime.timedelta(days=7) 78 | item = DiningBalance( 79 | account_id="12345", 80 | dining_dollars=dollars, 81 | swipes=swipes, 82 | guest_swipes=1, 83 | created_at=date, 84 | ) 85 | sqldb.session.add(item) 86 | dollars -= 10 87 | swipes -= 1 88 | sqldb.session.commit() 89 | 90 | # def testDiningBalances(self): 91 | # with server.app.test_client() as c: 92 | # res = json.loads(c.get('/dining/balances', headers={'X-Account-ID': '12345'}).data.decode('utf8')) 93 | # self.assertEquals(len(res['balance']), 11) 94 | # self.assertEquals(res['balance'][0]['dining_dollars'], 200) 95 | # self.assertEquals(res['balance'][0]['swipes'], 20) 96 | # self.assertEquals(res['balance'][0]['guest_swipes'], 1) 97 | # self.assertEquals(res['balance'][9]['dining_dollars'], 110) 98 | # self.assertEquals(res['balance'][9]['swipes'], 11) 99 | 100 | # def testDiningBalancesWithParam(self): 101 | # with server.app.test_client() as c: 102 | # res = json.loads(c.get('/dining/balances?start_date=2018-09-08&end_date=2018-09-30', 103 | # headers={'X-Account-ID': '12345'}).data.decode('utf8')) 104 | # self.assertEquals(len(res['balance']), 4) 105 | # self.assertEquals(res['balance'][3]['dining_dollars'], 170) 106 | # self.assertEquals(res['balance'][3]['swipes'], 17) 107 | # self.assertEquals(res['balance'][3]['guest_swipes'], 1) 108 | 109 | # def testDiningProjection(self): 110 | # with server.app.test_client() as c: 111 | # res = json.loads(c.get('/dining/projection?date=2018-11-17', 112 | # headers={'X-Account-ID': '12345'}).data.decode('utf8')) 113 | # self.assertEquals(res['projection']['dining_dollars_day_left'], 71) 114 | # self.assertEquals(res['projection']['swipes_day_left'], 71) 115 | -------------------------------------------------------------------------------- /tests/test_general.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import unittest 4 | 5 | import server 6 | 7 | 8 | class MobileAppApiTests(unittest.TestCase): 9 | def setUp(self): 10 | server.app.config["TESTING"] = True 11 | 12 | def testDirectorySearch(self): 13 | with server.app.test_request_context("/?name=Zdancewic"): 14 | res = server.directory.detail_search() 15 | steve = json.loads(res.data.decode("utf8")) 16 | self.assertEquals("stevez@cis.upenn.edu", steve["result_data"][0]["list_email"]) 17 | 18 | def testDirectoryPersonDetails(self): 19 | with server.app.test_request_context(): 20 | res = server.directory.person_details("aed1617a1508f282dee235fda2b8c170") 21 | person_data = json.loads(res.data.decode("utf8")) 22 | self.assertEquals("STEPHAN A ZDANCEWIC", person_data["detail_name"]) 23 | 24 | def testRegistarCourseSearch(self): 25 | with server.app.test_request_context("/?q=cis 110"): 26 | res = server.registrar.search() 27 | course_data = json.loads(res.data.decode("utf8")) 28 | for val in course_data["courses"]: 29 | self.assertEquals("110", val["course_number"]) 30 | 31 | def testRegistrarCourseSearchNoNumber(self): 32 | with server.app.test_request_context("/?q=cis"): 33 | res = server.registrar.search() 34 | course_data = json.loads(res.data.decode("utf8")) 35 | for val in course_data["courses"]: 36 | self.assertEquals("CIS", val["course_department"]) 37 | 38 | # def testTransitStopInventory(self): 39 | # with server.app.test_request_context(): 40 | # res = json.loads(server.transit.transit_stops().data.decode( 41 | # 'utf8')) 42 | # self.assertTrue(len(res['result_data']) > 0) 43 | 44 | # def testTransitBasicRouting(self): 45 | # query = '/?latFrom=39.9529075495845&lonFrom=-75.1925700902939&latTo=39.9447689912513&lonTo=-75.1751947402954' 46 | # with server.app.test_request_context(query): 47 | # res = json.loads(server.transit.fastest_route().data.decode('utf8')) 48 | # self.assertEquals(res['Error'], 'We couldn't find a helpful Penn Transit route for those locations.') 49 | 50 | def testWeather(self): 51 | with server.app.test_request_context(): 52 | res = json.loads(server.weather.retrieve_weather_data().data.decode("utf8")) 53 | self.assertTrue(len(res) > 0) 54 | s = res["weather_data"] 55 | self.assertTrue("clouds" in s) 56 | self.assertTrue("name" in s) 57 | self.assertTrue("coord" in s) 58 | self.assertTrue("sys" in s) 59 | self.assertTrue("base" in s) 60 | self.assertTrue("visibility" in s) 61 | self.assertTrue("cod" in s) 62 | self.assertTrue("weather" in s) 63 | self.assertTrue("dt" in s) 64 | self.assertTrue("main" in s) 65 | self.assertTrue("id" in s) 66 | self.assertTrue("wind" in s) 67 | 68 | # def testFitness(self): 69 | # with server.app.test_request_context(): 70 | # resp = json.loads(server.fitness.fitness_usage().data.decode('utf8')) 71 | # self.assertTrue(len(resp['results']) > 0) 72 | # for location in resp['results']: 73 | # self.assertTrue('updated' in location) 74 | 75 | def testCalendarToday(self): 76 | with server.app.test_request_context(): 77 | res = json.loads(server.calendar3year.pull_today().data.decode("utf8")) 78 | s = res["calendar"] 79 | today = datetime.datetime.now().date() 80 | for event in s: 81 | self.assertTrue("end" in event) 82 | self.assertTrue("name" in event) 83 | self.assertTrue("start" in event) 84 | d = datetime.datetime.strptime(event["start"], "%Y-%m-%d").date() 85 | self.assertTrue((d - today).total_seconds() <= 1209600) 86 | 87 | def testCalendarDate(self): 88 | with server.app.test_request_context(): 89 | ind = "2017-01-01" 90 | chosen_date = datetime.date(2017, 1, 1) 91 | res = json.loads(server.calendar3year.pull_date(ind).data.decode("utf8")) 92 | s = res["calendar"] 93 | for event in s: 94 | self.assertTrue("end" in event) 95 | self.assertTrue("name" in event) 96 | self.assertTrue("start" in event) 97 | d = datetime.datetime.strptime(event["start"], "%Y-%m-%d").date() 98 | self.assertTrue((d - chosen_date).total_seconds() <= 1209600) 99 | -------------------------------------------------------------------------------- /tests/test_laundry.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import unittest 4 | 5 | import mock 6 | 7 | import server 8 | from server.models import LaundrySnapshot, sqldb 9 | 10 | 11 | class LaundryApiTests(unittest.TestCase): 12 | def setUp(self): 13 | server.app.config["TESTING"] = True 14 | 15 | @classmethod 16 | def setUpClass(self): 17 | with server.app.test_request_context(): 18 | for x in range(0, 24 * 60, 60): 19 | item = LaundrySnapshot( 20 | date=datetime.date(2017, 1, 1), 21 | time=x, 22 | room=1, 23 | washers=3, 24 | dryers=3, 25 | total_washers=3, 26 | total_dryers=3, 27 | ) 28 | sqldb.session.add(item) 29 | for x in range(0, 24 * 60, 60): 30 | item = LaundrySnapshot( 31 | date=datetime.date(2017, 1, 1) - datetime.timedelta(days=7), 32 | time=x, 33 | room=1, 34 | washers=0, 35 | dryers=0, 36 | total_washers=3, 37 | total_dryers=3, 38 | ) 39 | sqldb.session.add(item) 40 | sqldb.session.commit() 41 | 42 | def fakeLaundryGet(url, *args, **kwargs): 43 | if "suds.kite.upenn.edu" in url: 44 | with open("tests/laundry_snapshot.html", "rb") as f: 45 | m = mock.MagicMock(content=f.read()) 46 | return m 47 | else: 48 | raise NotImplementedError 49 | 50 | @mock.patch("penn.laundry.requests.get", fakeLaundryGet) 51 | def testLaundryAllHalls(self): 52 | with server.app.test_request_context(): 53 | res = json.loads(server.laundry.all_halls().data.decode("utf8"))["halls"] 54 | self.assertTrue(len(res) > 45) 55 | self.assertTrue("English House" in res) 56 | for info in res.values(): 57 | for t in ["washers", "dryers"]: 58 | self.assertTrue(info[t]["running"] >= 0) 59 | self.assertTrue(info[t]["offline"] >= 0) 60 | self.assertTrue(info[t]["out_of_order"] >= 0) 61 | self.assertTrue(info[t]["open"] >= 0) 62 | 63 | @mock.patch("requests.get", fakeLaundryGet) 64 | def testLaundryOneHall(self): 65 | with server.app.test_request_context(): 66 | res = json.loads(server.laundry.hall(26).data.decode("utf8")) 67 | self.assertEquals(res["hall_name"], "Harrison Floor 20") 68 | 69 | def testLaundryUsage(self): 70 | with server.app.test_request_context(): 71 | request = server.laundry.usage(20, 2017, 1, 1) 72 | res = json.loads(request.data.decode("utf8")) 73 | self.assertEquals(res["hall_name"], "Harrison Floor 08") 74 | self.assertEquals(res["location"], "Harrison") 75 | self.assertEquals(res["day_of_week"], "Sunday") 76 | self.assertEquals(res["end_date"], "2017-01-01") 77 | self.assertEquals(len(res["washer_data"]), 27) 78 | self.assertEquals(len(res["dryer_data"]), 27) 79 | 80 | def testLaundryDatabase(self): 81 | with server.app.test_request_context(): 82 | request = server.laundry.usage(1, 2017, 1, 1) 83 | res = json.loads(request.data.decode("utf8")) 84 | self.assertEquals(res["total_number_of_washers"], 3) 85 | self.assertEquals(res["total_number_of_dryers"], 3) 86 | for x in range(0, 23): 87 | self.assertEquals(res["washer_data"][str(x)], 1.5) 88 | self.assertEquals(res["dryer_data"][str(x)], 1.5) 89 | 90 | def testLaundryPreferences(self): 91 | with server.app.test_client() as c: 92 | resp = json.loads( 93 | c.get("/laundry/preferences", headers={"X-Device-ID": "testing"}).data.decode( 94 | "utf8" 95 | ) 96 | ) 97 | self.assertEquals(resp["rooms"], []) 98 | 99 | c.post( 100 | "/laundry/preferences", 101 | headers={"X-Device-ID": "testing"}, 102 | data={"rooms": "1,2,3", "platform": "Android"}, 103 | ) 104 | 105 | resp = json.loads( 106 | c.get("/laundry/preferences", headers={"X-Device-ID": "testing"}).data.decode( 107 | "utf8" 108 | ) 109 | ) 110 | self.assertEquals(resp["rooms"], [1, 2, 3]) 111 | -------------------------------------------------------------------------------- /tests/test_studyspaces.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | 4 | import mock 5 | 6 | import server 7 | from server.models import sqldb 8 | from server.studyspaces.models import StudySpacesBooking 9 | 10 | 11 | class StudySpacesApiTests(unittest.TestCase): 12 | def setUp(self): 13 | server.app.config["TESTING"] = True 14 | 15 | def testStudyspaceBooking(self): 16 | with server.app.test_client() as c: 17 | # fake the actual booking 18 | with mock.patch( 19 | "penn.studyspaces.StudySpaces.book_room", 20 | return_value={"success": "booking placed", "results": True}, 21 | ): 22 | resp = c.post( 23 | "/studyspaces/book", 24 | data={ 25 | "building": 1, 26 | "room": 1, 27 | "start": "2017-02-08 10:00:00", 28 | "end": "2017-02-08 10:30:00", 29 | "firstname": "Test", 30 | "lastname": "Test", 31 | "email": "test@example.com", 32 | "groupname": "Testing", 33 | "phone": "000-000-0000", 34 | "size": 1, 35 | }, 36 | headers={"X-Device-Id": "test"}, 37 | ) 38 | res = json.loads(resp.data.decode("utf8")) 39 | self.assertTrue(len(res) > 0) 40 | 41 | # make sure the booking is saved to the database 42 | self.assertEquals(sqldb.session.query(StudySpacesBooking).count(), 1) 43 | 44 | def testStudyspaceCancelFailure(self): 45 | """Booking cancellation should not succeed if it is not in our database.""" 46 | 47 | with server.app.test_client() as c: 48 | resp = c.post( 49 | "/studyspaces/cancel", data={"booking_id": "definitely_not_a_valid_booking_id_123"} 50 | ) 51 | res = json.loads(resp.data.decode("utf8")) 52 | self.assertTrue("error" in res) 53 | -------------------------------------------------------------------------------- /weather.json: -------------------------------------------------------------------------------- 1 | { 2 | "weather":{ 3 | "coord":{ 4 | "lon":-75.16, 5 | "lat":39.95 6 | }, 7 | "weather":[ 8 | { 9 | "id":800, 10 | "main":"Clear", 11 | "description":"clear sky", 12 | "icon":"01d" 13 | } 14 | ], 15 | "base":"stations", 16 | "main":{ 17 | "temp":62.49, 18 | "pressure":1022, 19 | "humidity":51, 20 | "temp_min":59, 21 | "temp_max":64.4 22 | }, 23 | "visibility":16093, 24 | "wind":{ 25 | "speed":9.01, 26 | "deg":300.001 27 | }, 28 | "clouds":{ 29 | "all":1 30 | }, 31 | "dt":1478382660, 32 | "sys":{ 33 | "type":1, 34 | "id":2361, 35 | "message":0.1837, 36 | "country":"US", 37 | "sunrise":1478345747, 38 | "sunset":1478382735 39 | }, 40 | "id":4560349, 41 | "name":"Philadelphia", 42 | "cod":200 43 | } 44 | } -------------------------------------------------------------------------------- /wsgi.py: -------------------------------------------------------------------------------- 1 | from server import app as application 2 | 3 | 4 | application.config["JSONIFY_PRETTYPRINT_REGULAR"] = True 5 | --------------------------------------------------------------------------------