├── .github
└── workflows
│ └── release-containers.yml
├── .gitignore
├── .holo
├── branches
│ └── helm-chart
│ │ └── _paws-data-pipeline.toml
└── config.toml
├── GettingStarted.md
├── LICENSE
├── README.md
├── documentation
├── Data Lake Conceptual Flows v2.jpg
├── Data Lake Conceptual Flows v2.pptx
├── Hack Night PAWS Presentation PDF.pdf
├── NARRATIVE.md
├── PAWS_Digital_Ecosystem_v3_for_C4P.jpg
├── PDP_UserFlow.png
├── SALESFORCE.md
├── documentation-images
│ ├── ReadMe.md
│ ├── SF-Pic2
│ ├── SF-Pic3
│ ├── SF-Pic4
│ ├── SF-Pic5
│ ├── SF-Pic6
│ ├── SF-Pic7
│ ├── SF-Pic8
│ ├── SF-pic-1
│ └── exec_status.svg
└── matching_rules.md
└── src
├── README.md
├── client
├── .dockerignore
├── .gitignore
├── Dockerfile
├── README.md
├── default.conf.template
├── nginx.conf
├── package-lock.json
├── package.json
├── public
│ ├── images
│ │ ├── logo.png
│ │ └── paws-logo-64.png
│ ├── index.html
│ └── robots.txt
└── src
│ ├── App.js
│ ├── App.test.js
│ ├── assets
│ ├── font
│ │ ├── NowayRound-Regular.woff
│ │ └── font.css
│ ├── header-logo.png
│ └── startImage.jpg
│ ├── components
│ ├── AlertBanner.jsx
│ ├── Header.js
│ ├── Refresh.js
│ ├── RefreshDlg.js
│ └── TabPanel.js
│ ├── contexts
│ └── AlertContext.jsx
│ ├── hooks
│ └── useAlert.js
│ ├── index.js
│ ├── pages
│ ├── About.js
│ ├── Admin
│ │ ├── Admin.js
│ │ └── Components
│ │ │ ├── AnalysisBox.jsx
│ │ │ ├── AnalysisTable.jsx
│ │ │ ├── Loading.jsx
│ │ │ ├── UploadBox.jsx
│ │ │ └── UploadsTable.jsx
│ ├── Check
│ │ └── Check.js
│ ├── DataView360
│ │ ├── Search
│ │ │ ├── Search.js
│ │ │ └── components
│ │ │ │ └── SearchBar.js
│ │ └── View
│ │ │ ├── View360.js
│ │ │ └── components
│ │ │ ├── Adoptions.js
│ │ │ ├── AnimalInfo.js
│ │ │ ├── CollapsibleTable.js
│ │ │ ├── ContactInfo.js
│ │ │ ├── DataTableHeader.js
│ │ │ ├── Donations.js
│ │ │ ├── EventsModal.js
│ │ │ ├── Fosters.js
│ │ │ ├── SupportOverview.js
│ │ │ ├── Volunteer.js
│ │ │ ├── VolunteerActivity.js
│ │ │ └── VolunteerHistory.js
│ ├── Home.js
│ ├── Login
│ │ ├── Login.js
│ │ ├── Logout.js
│ │ └── useToken.js
│ └── UserManagement
│ │ ├── Components
│ │ ├── Dialog
│ │ │ ├── ChangePasswordDialog.jsx
│ │ │ ├── NewUserDialog.jsx
│ │ │ ├── UpdateUserDialog.jsx
│ │ │ └── UserDialog.jsx
│ │ ├── RolesRadioGroup.jsx
│ │ └── UserRow.jsx
│ │ ├── UserManagement.jsx
│ │ └── Validations.js
│ ├── serviceWorker.js
│ ├── setupProxy.js
│ ├── setupTests.js
│ ├── theme
│ └── defaultTheme.js
│ └── utils
│ ├── api.js
│ └── utils.js
├── deploy_from_tar_docker-compose.sh
├── docker-compose.yml
├── helm-chart
├── .helmignore
├── Chart.yaml
├── templates
│ ├── NOTES.txt
│ ├── _helpers.tpl
│ ├── deployment.yaml
│ ├── hpa.yaml
│ ├── ingress.yaml
│ ├── pvc.yaml
│ ├── pvc_server.yaml
│ ├── service.yaml
│ └── serviceaccount.yaml
└── values.yaml
├── k8s_conf
├── _pv_claims.yaml
├── api_server.yaml
├── db_server.yaml
├── front_end.yaml
└── kind_w_reg.sh
├── package-lock.json
├── run_cluster.sh
├── scheduler
├── Dockerfile
└── cronfile
└── server
├── .dockerignore
├── Dockerfile
├── alembic.ini
├── alembic
├── README
├── env.py
├── generate_rfm_mapping.py
├── insert_rfm_edges.sql
├── populate_rfm_mapping.sql
├── script.py.mako
└── versions
│ ├── 05e0693f8cbb_key_value_table.py
│ ├── 36c4ecbfd11a_add_pdp_users_full_name.py
│ ├── 40be910424f0_update_rfm_mapping_remove_rfm_edges.py
│ ├── 41da831646e4_pdp_users_role_fk_from_roles.py
│ ├── 45a668fa6325_postgres_matching.py
│ ├── 494e064d69a3_tables_for_rfm_data.py
│ ├── 57b547e9b464_create_rfm_edges_table.py
│ ├── 6b8cf99be000_add_user_journal_table.py
│ ├── 7138d52f92d6_add_uniqueness_constraints.py
│ ├── 72d50d531bd5_fix_pdp_users_timestamp.py
│ ├── 783cabf889d9_inital_schema_setup.py
│ ├── 90f471ac445c_create_sl_events.py
│ ├── 9687db7928ee_shelterluv_animals.py
│ ├── a3ba63dee8f4_rmv_details_size_limit.py
│ ├── bfb1262d3195_create_execution_status_table.py
│ ├── d0841384d5d7_explicitly_create_vshifts.py
│ ├── d80cb6df0fa2_rmv_shifts_uniqueness_constraint.py
│ ├── e3ef522bd3d9_explicit_create_sfd.py
│ ├── f3d30db17bed_change_pdp_users_password_to_bytea.py
│ ├── fc7325372396_merge_heads.py
│ └── fd187937528b_create_pdp_contacts_table.py
├── api
├── API_ingest
│ ├── __init__.py
│ ├── dropbox_handler.py
│ ├── ingest_sources_from_api.py
│ ├── salesforce_contacts.py
│ ├── shelterluv_animals.py
│ ├── shelterluv_db.py
│ ├── shelterluv_people.py
│ ├── sl_animal_events.py
│ ├── updated_data.py
│ └── volgistics_db.py
├── __init__.py
├── admin_api.py
├── api.py
├── common_api.py
├── fake_data.py
├── file_uploader.py
├── internal_api.py
├── jwt_ops.py
├── pem.py
└── user_api.py
├── app.py
├── bin
├── export_secrets.sh
├── startServer.sh
└── uwsgi.ini
├── config.py
├── constants.py
├── db_setup
├── README.md
├── __init__.py
└── base_users.py
├── donations_importer.py
├── models.py
├── pipeline
├── __init__.py
├── flow_script.py
└── log_db.py
├── pub_sub
├── __init__.py
├── salesforce_message_publisher.py
└── stubs
│ ├── __init__.py
│ ├── pubsub_api_pb2.py
│ └── pubsub_api_pb2_grpc.py
├── requirements.txt
├── rfm-edges.txt
├── secrets_dict.py
├── test_api.py
├── volgistics_importer.py
└── wsgi.py
/.github/workflows/release-containers.yml:
--------------------------------------------------------------------------------
1 | name: Release Containers
2 |
3 | on:
4 | push:
5 | tags: [ 'v*' ]
6 |
7 |
8 | jobs:
9 | release-containers:
10 | name: Build and Push
11 | runs-on: ubuntu-latest
12 | steps:
13 |
14 | - uses: actions/checkout@v2
15 |
16 | - name: Login to ghcr.io Docker registry
17 | uses: docker/login-action@v1
18 | with:
19 | registry: ghcr.io
20 | username: ${{ github.repository_owner }}
21 | password: ${{ secrets.GITHUB_TOKEN }}
22 |
23 | - name: Compute Docker container image addresses
24 | run: |
25 | DOCKER_REPOSITORY="ghcr.io/${GITHUB_REPOSITORY,,}"
26 | DOCKER_TAG="${GITHUB_REF:11}"
27 | echo "DOCKER_REPOSITORY=${DOCKER_REPOSITORY}" >> $GITHUB_ENV
28 | echo "DOCKER_TAG=${DOCKER_TAG}" >> $GITHUB_ENV
29 | echo "Using: ${DOCKER_REPOSITORY}/*:${DOCKER_TAG}"
30 |
31 | - name: 'Pull latest existing Docker container image: server'
32 | run: docker pull "${DOCKER_REPOSITORY}/server:latest" || true
33 |
34 | - name: 'Pull latest existing Docker container image: client'
35 | run: docker pull "${DOCKER_REPOSITORY}/client:latest" || true
36 |
37 | - name: 'Build Docker container image: server'
38 | run: |
39 | docker build \
40 | --cache-from "${DOCKER_REPOSITORY}/server:latest" \
41 | --tag "${DOCKER_REPOSITORY}/server:latest" \
42 | --tag "${DOCKER_REPOSITORY}/server:${DOCKER_TAG}" \
43 | ./src/server
44 |
45 | - name: Push Docker container image server:latest"
46 | run: docker push "${DOCKER_REPOSITORY}/server:latest"
47 |
48 | - name: Push Docker container image server:v*"
49 | run: docker push "${DOCKER_REPOSITORY}/server:${DOCKER_TAG}"
50 |
51 | - name: 'Build Docker container image: client'
52 | run: |
53 | docker build \
54 | --cache-from "${DOCKER_REPOSITORY}/client:latest" \
55 | --tag "${DOCKER_REPOSITORY}/client:latest" \
56 | --tag "${DOCKER_REPOSITORY}/client:${DOCKER_TAG}" \
57 | ./src/client
58 |
59 | - name: Push Docker container image client:latest"
60 | run: docker push "${DOCKER_REPOSITORY}/client:latest"
61 |
62 | - name: Push Docker container image client:v*"
63 | run: docker push "${DOCKER_REPOSITORY}/client:${DOCKER_TAG}"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | temp/
2 | /src.tar.gz
3 | db_data/
4 | /local_files/
5 | node_modules
6 | .vscode/
7 | *.pyc
8 | __pycache__
9 | .cache/
10 | venv
11 | /dist/
12 | src/server/pipeline/__pycache__
13 | src/server/api/__pycache__
14 | /src/client/.idea
15 | /.idea
16 | start_env.sh
17 | *.DS_Store
18 | /src/server/venv/
19 | /src/local_files/
20 | /src/server/secrets_dict.py
21 | /src/server/local_files/
22 | .mypy_cache/
23 | *secrets*
24 | *kustomization*
25 | src/.venv/
26 | src/server/secrets_dict.py
27 | *.pem
--------------------------------------------------------------------------------
/.holo/branches/helm-chart/_paws-data-pipeline.toml:
--------------------------------------------------------------------------------
1 | [holomapping]
2 | root = "src/helm-chart"
3 | files = "**"
--------------------------------------------------------------------------------
/.holo/config.toml:
--------------------------------------------------------------------------------
1 | [holospace]
2 | name = "paws-data-pipeline"
3 |
--------------------------------------------------------------------------------
/GettingStarted.md:
--------------------------------------------------------------------------------
1 | # Getting Started
2 |
3 | #### All of our code is in containers
4 | - Install Docker - `https://docs.docker.com/install`
5 | - Install Docker Compose - `https://docs.docker.com/compose/install/`
6 |
7 | ## Running everything (server and client)
8 | - navigate to src directory `cd .../PAWS-DATA-PIPELINE/src`
9 | - docker compose `docker-compose up`
10 | - access the client going to `http://localhost:3000`
11 | ## Running the client (front-end) locally
12 | - navigate to src directory `cd .../PAWS-DATA-PIPELINE/src`
13 | - docker compose `docker-compose run server`
14 | - start the frontend with the proxy`npm run start:local`
15 |
16 | ## Running just server (back-end) locally
17 | - navigate to src directory `cd .../PAWS-DATA-PIPELINE/src`
18 | - set the PAWS_API_HOST=localhost or your ip in `docker-compose.yml`
19 | - docker compose `docker-compose up` and stop the server
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 PAWS Data Pipeline Developers
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # [The Philadelphia Animal Welfare Society (PAWS)](phillypaws.org)
2 |
3 | As the city's largest animal rescue partner and no-kill animal shelter,
4 | the [Philadelphia Animal Welfare Society (PAWS)](phillypaws.org) is working to make Philadelphia
5 | a place where every healthy and treatable pet is guaranteed a home. Since inception over 10 years ago,
6 | PAWS has rescued and placed 27,000+ animals in adoptive and foster homes, and has worked to prevent pet homelessness
7 | by providing 86,000+ low-cost spay/neuter services and affordable vet care to 227,000+
8 | clinic patients. PAWS is funded 100% through donations, with 91 cents of every dollar collected going
9 | directly to the animals. Therefore, PAWS' rescue work (including 3 shelters and all rescue and
10 | animal care programs), administration and development efforts are coordinated by only about
11 | 70 staff members complemented by over 1500 volunteers.
12 |
13 | ## [The Data Pipeline](https://codeforphilly.org/projects/paws_data_pipeline)
14 |
15 | Through all of its operational and service activities, PAWS accumulates data regarding donations,
16 | adoptions, fosters, volunteers, merchandise sales, event attendees (to name a few),
17 | each in their own system and/or manual tally. This vital data that can
18 | drive insights remains siloed and is usually difficult to extract, manipulate, and analyze.
19 |
20 | This project provides PAWS with an easy-to-use and easy-to-support tool to extract
21 | constituent data from multiple source systems, standardize extracted data, match constituents across data sources,
22 | load relevant data into Salesforce, and run an automation in Salesforce to produce an RFM score.
23 | Through these processes, the PAWS data pipeline has laid the groundwork for facilitating an up-to-date 360-degree view of PAWS constituents, and
24 | flexible ongoing data analysis and insights discovery.
25 |
26 | ## Uses
27 |
28 | - The pipeline can inform the PAWS development team of new constiuents through volunteer or foster engagegement
29 | - Instead of manually matching constituents from volunteering, donations and foster/adoptions, PAWS staff only need to upload the volunteer dataset into the pipeline, and the pipeline handles the matching
30 | - Volunteer and Foster data are automatically loaded into the constituent's SalesForce profile
31 | - An RFM score is calculated for each constituent using the most recent data
32 | - Data analyses can use the output of the PDP matching logic to join datasets from different sources; PAWS can benefit from such analyses in the following ways:
33 | - PAWS operations can be better informed and use data-driven decisions to guide programs and maximize effectiveness;
34 | - Supporters can be further engaged by suggesting additional opportunities for involvement based upon pattern analysis;
35 | - Multi-dimensional supporters can be consistently (and accurately) acknowledged for all the ways they support PAWS (i.e. a volunteer who donates and also fosters kittens), not to mention opportunities to further tap the potential of these enthusiastic supporters.
36 |
37 | ## [Code of Conduct](https://codeforphilly.org/pages/code_of_conduct)
38 |
39 | This is a Code for Philly project operating under their code of conduct.
40 |
41 | ## Links
42 |
43 | [Slack Channel](https://codeforphilly.org/chat?channel=paws_data_pipeline)
44 | [Wiki](https://github.com/CodeForPhilly/paws-data-pipeline/wiki)
45 |
--------------------------------------------------------------------------------
/documentation/Data Lake Conceptual Flows v2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/Data Lake Conceptual Flows v2.jpg
--------------------------------------------------------------------------------
/documentation/Data Lake Conceptual Flows v2.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/Data Lake Conceptual Flows v2.pptx
--------------------------------------------------------------------------------
/documentation/Hack Night PAWS Presentation PDF.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/Hack Night PAWS Presentation PDF.pdf
--------------------------------------------------------------------------------
/documentation/PAWS_Digital_Ecosystem_v3_for_C4P.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/PAWS_Digital_Ecosystem_v3_for_C4P.jpg
--------------------------------------------------------------------------------
/documentation/PDP_UserFlow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/PDP_UserFlow.png
--------------------------------------------------------------------------------
/documentation/SALESFORCE.md:
--------------------------------------------------------------------------------
1 | # Getting into Chris's Salesforce Trailhead Sandbox
2 |
3 | Two years ago I mocked up a bunch of stuff in a Salesforce training (Trailhead) instance. They lasted 30 days back then. So it's gone. My purpose was to see how non-SFDC data from other PAWS systems can be imported and associated with constituents (Salesforce Contacts) in SFDC. Call it a Proof-of-Concept.
4 |
5 | I started another sandbox up a few months ago, and reloaded some of the data I used back then. I also re-applied a number of customizations / configurations (although I did not take very good notes back then!).
6 |
7 | This gets you into that sandbox.
8 |
9 | The Non-Profit Success Pack (NPSP) is a set of Salesforce customizations intended to better serve Non-Profits.
10 | Installation of that seems to be 50/50 in this sandbox (which is all done in an automated fashion by SFDC automation), but it serves the purpose until we revive the Saleforce thread of this project and spend quality time with Weston getting a correct standbox in place.
11 |
12 | The rest of this is showing the results of my proof-of-concept this time around. I have some screenshots here to help you get in and look around. Follow along. I don't think you can mess anything up but please try not to anyway.
13 |
14 | Go to www.salesforce.com
15 | sign in as chris@brave-fox-riktj8.com Password is code1234
16 |
17 | You will come to a screen like this:
18 |
19 | 
20 |
21 | Click on that rubics cube looking thing to bring up the app chooser
22 |
23 | 
24 |
25 | Then pick View All (click where circled)
26 |
27 | 
28 |
29 | Scroll Down (by scrolling up if you use a mac)
30 |
31 | 
32 |
33 | Pick the Non Profit Success Pack NPSP - the one in the middle (it's the one with the Lightning Experience.. .cooler)
34 |
35 | 
36 |
37 | This is a two part step. First click on the Contacts tab at the top. That should bring up recently used Contacts. If not, click around to bring up recent contacts. It should be an option under the Contacts tab if you hover or click for the pull-down menu.
38 |
39 | Then click on our buddy Aaron. Fake name and address
40 |
41 | 
42 |
43 | This is Aaron's contact record. Note two two "Related" pieces of information. Volunteer info especially.
44 |
45 | In PAWS' real Salesforce instance, you'd see a ton of valuable info, including donation history, addresses, family relationships, etc.
46 |
47 | 
48 |
49 | If you hover over Volunteer Hours, it will pop up a list (it's a subset) of the shifts he's worked. These are Volunteer Shift instances which I pulled from REAL Volgistics data (but just associated it with "Aaron"). There is some background work to do be done to set up the shift types. But it can be done once and in the very rare instances where a new shift shows up we can have a maintenance step to load the new Volunteer Shifts (and if needed Volunteer Campaigns).
50 |
51 | 
52 |
53 | If you go back and scroll around the Contact record, you'll see all sorts of good info. Donation History (which I haven't loaded any of), and the Volunteer info is at the bottom. This is a POC to show what it might look like for PAWS to see not only donation info but also Volunteer activity. And we can add Animals to this as well. Weston has a mock-up of that done, although he doesn't have the Volunteer hours. We can pull this together over time to get a real solid POC.
54 |
55 | I loaded all data via file import wizard available in Salesforce. API in and out of this Saleforce instance also works. It's a full developer edition. I set up the key etc and proved connection via Python. That code is in the SFDC-Playground folder
56 |
57 | There's also a bit of Saleforce screen and form modification I did to pull the Volunteer info in. Most of it happened automatically when I did it two years ago. Not sure of the differences.
58 |
--------------------------------------------------------------------------------
/documentation/documentation-images/ReadMe.md:
--------------------------------------------------------------------------------
1 | The real directory for my images
2 |
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic2
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic3
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic4
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic5
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic6:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic6
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic7
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-Pic8:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-Pic8
--------------------------------------------------------------------------------
/documentation/documentation-images/SF-pic-1:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/documentation/documentation-images/SF-pic-1
--------------------------------------------------------------------------------
/documentation/matching_rules.md:
--------------------------------------------------------------------------------
1 | The logic for looking at duplicates from Karla/Meg discussion:
2 |
3 | 1. Create Master from Salesforce
4 | 2. Compare Volgistics to Master: If [fuzzy match on name above threshold] and [match on email] → combine records in Master
5 | 3. Compare Master to PetPoint: If [fuzzy match on name above threshold] and [match on email] → combine records in Master
6 | 4. Compare Master to ClinicHQ: If [fuzzy match on name above threshold] and [match on phone number] → combine records in Master
7 |
8 | Trigger staff review: If [fuzzy match on name above threshold] and [no other matching data] → report for human review
9 |
10 | Thresholds are TBD but should be some level where we can be reasonably certain the fuzzy match is correct most of the time.
11 | Decided to trust name and email most. Addresses will likely create more problems with fuzzy matching, allowing people in the same household (but not the same person) to potentially be matched. Email is likely to be unique per person, though the same person *may* use multiple emails.
12 |
--------------------------------------------------------------------------------
/src/README.md:
--------------------------------------------------------------------------------
1 | Run the PAWS data pipeline locally
2 | ---------------------------------------
3 | #### Run local - when debugging
4 | - Install dependencies: `pip install -r requirements.txt`
5 | - Run docker compose (as explained below) in order to have the postgres docker running.
6 | - Set environment variable: `export IS_LOCAL=True`
7 | - If your docker IP is not localhost or you want to run postgres with a different user name etc
8 | - Set environment variable: `export LOCAL_DB_IP=postgresql://postgres:thispasswordisverysecure@[your_docker_ip]/postgres`
9 | - Working directory should be: `...paws-data-pipeline/src`
10 | - Set environment variable: `export FLASK_PORT=3333` we need it to be a different port then the one in the docker-compose
11 | - Run python3 app.py
12 | - Download the file `secrets.py` from the teams dropbox and place it under `src/server`.
13 | #### Run docker - before pushing your code
14 | - Install docker - `https://docs.docker.com/install`
15 |
16 | _Docker Compose instructions_
17 | - Install Docker Compose - `https://docs.docker.com/compose/install/`
18 | - Most package managers have it as `docker-compose` and it's largely just a shell script.
19 | - `docker-compose up -d` to bring up the application.
20 | - Scheduler docker will not start. To run the scheduler, use profile flag `production-only` as explained in the Production Environment section.
21 |
22 | #### Finally - Run The UI on http://localhost:80
23 |
24 | ---------------------------------------
25 | Production Environment
26 | ---------------------------------------
27 | - `docker-compose` should use the profile flag `production-only`. i.e: `docker-compose --profile production-only up
28 | ` and `docker-compose --profile production-only build`
29 |
30 | TBD
31 |
32 | --------------------------
33 | Troubleshooting
34 | ---------------------------------------
35 | See the [Troubleshooting page](https://github.com/CodeForPhilly/paws-data-pipeline/wiki/Troubleshooting) at the GitHub wiki.
--------------------------------------------------------------------------------
/src/client/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 | .dockerignore
4 | Dockerfile
5 |
--------------------------------------------------------------------------------
/src/client/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # production
12 | /build
13 |
14 | # misc
15 | .DS_Store
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 |
21 | npm-debug.log*
22 | yarn-debug.log*
23 | yarn-error.log*
--------------------------------------------------------------------------------
/src/client/Dockerfile:
--------------------------------------------------------------------------------
1 | # pull official base image
2 | FROM node:16-alpine as builder
3 |
4 | # set working directory
5 | WORKDIR /app
6 |
7 | # add `/app/node_modules/.bin` to $PATH
8 | ENV PATH /app/node_modules/.bin:$PATH
9 |
10 | # install app dependencies
11 | COPY package.json ./
12 | COPY package-lock.json ./
13 |
14 | RUN npm install --silent
15 |
16 | COPY public ./public
17 | COPY src ./src
18 |
19 | RUN npx browserslist@latest --update-db
20 | RUN npm run build
21 |
22 | # add app
23 | FROM nginx:latest AS host
24 |
25 | # COPY nginx-client.conf /etc/nginx/conf.d/default.conf
26 | COPY nginx.conf /etc/nginx/nginx.conf
27 | COPY default.conf.template /etc/nginx/templates/
28 |
29 | COPY --from=builder /app/build/ /usr/share/nginx/html
--------------------------------------------------------------------------------
/src/client/README.md:
--------------------------------------------------------------------------------
1 | This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app).
2 |
3 | ## Proxy Setting
4 | Proxy setting uses http-proxy-middleware and can be found at `/setupProxy.js`
5 |
6 | ## Run with docker-compose
7 | The app will be loaded when the docker-compose is ran.
8 | Open [http://localhost:3000](http://localhost:3000) to view it in the browser.
9 | In order to view changes, rerun the docker or the whole docker-compose
10 |
11 | ## Run locally
12 | Make sure proxy is set to the port you are running your backend.
13 | In the project directory, you can run:
14 |
15 | ### `npm start`
16 | This will run locally against the docker container backend which running on port 5000
17 |
18 | ### `npm run start:local`
19 | This will run locally against your local backend which you should run on port 3333.
20 |
21 | ## Learn More
22 | You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started).
23 | To learn React, check out the [React documentation](https://reactjs.org/).
24 |
--------------------------------------------------------------------------------
/src/client/default.conf.template:
--------------------------------------------------------------------------------
1 | upstream backend {
2 | #server paws-compose-server:5000;
3 | server server:5000;
4 | }
5 |
6 | server {
7 | listen 80;
8 | server_name localhost;
9 | client_max_body_size 100M;
10 |
11 | # This needs to be first location block
12 | location ^~ /api/internal { # Blocks external access to /api/internal/*
13 | return 404;
14 | }
15 |
16 |
17 |
18 | location / {
19 | root /usr/share/nginx/html;
20 | index index.html index.htm;
21 | try_files $uri /index.html; # forward all requests to the index.html for react
22 | }
23 |
24 | # $uri does not refer to Mr. Rotem but 'Uniform Resource Identifier'
25 |
26 | location /api {
27 | try_files $uri @backend;
28 | }
29 |
30 | location @backend {
31 | proxy_pass http://backend;
32 | proxy_set_header X-Real-IP $remote_addr;
33 | proxy_set_header Host $host;
34 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
35 | proxy_read_timeout 3600;
36 | proxy_connect_timeout 3600;
37 | proxy_send_timeout 3600;
38 | send_timeout 3600;
39 | # Following is necessary for Websocket support
40 | # proxy_http_version 1.1;
41 | # proxy_set_header Upgrade $http_upgrade;
42 | # proxy_set_header Connection "upgrade";
43 | }
44 | }
--------------------------------------------------------------------------------
/src/client/nginx.conf:
--------------------------------------------------------------------------------
1 | user nginx;
2 | worker_processes auto;
3 |
4 | error_log /var/log/nginx/error.log notice;
5 | pid /var/run/nginx.pid;
6 |
7 |
8 | events {
9 | worker_connections 1024;
10 | }
11 |
12 | http {
13 | include /etc/nginx/mime.types;
14 | default_type application/octet-stream;
15 |
16 | log_format json_combined escape=json
17 | '{'
18 | '"timestamp":"$msec",'
19 | '"address":"$remote_addr",'
20 | '"request":"$request",'
21 | '"body_bytes_sent":$body_bytes_sent,'
22 | '"response_status":$status,'
23 | '"http_user_agent":"$http_user_agent"'
24 | '}';
25 |
26 | access_log /var/log/nginx/access.log json_combined;
27 |
28 | sendfile on;
29 |
30 | keepalive_timeout 65;
31 |
32 | include /etc/nginx/conf.d/*.conf;
33 |
34 | server {
35 | listen 80;
36 | server_name localhost;
37 | client_max_body_size 100M;
38 |
39 | location ^~ /api/internal { # Blocks external access to /api/internal/*
40 | return 404;
41 | }
42 |
43 |
44 |
45 | location / {
46 | root /usr/share/nginx/html;
47 | index index.html index.htm;
48 | try_files $uri /index.html; # forward all requests to the index.html for react
49 | }
50 |
51 | location /api {
52 | try_files $uri @backend;
53 | }
54 |
55 | location @backend {
56 | proxy_pass http://backend;
57 | proxy_set_header X-Real-IP $remote_addr;
58 | proxy_set_header Host $host;
59 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
60 | proxy_read_timeout 3600;
61 | proxy_connect_timeout 3600;
62 | proxy_send_timeout 3600;
63 | send_timeout 3600;
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/client/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "client",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@hookform/resolvers": "^3.1.0",
7 | "@material-ui/core": "^4.9.14",
8 | "@material-ui/icons": "^4.9.1",
9 | "@material-ui/lab": "^4.0.0-alpha.53",
10 | "@material-ui/styles": "^4.11.4",
11 | "@testing-library/jest-dom": "^4.2.4",
12 | "@testing-library/react": "^9.5.0",
13 | "@testing-library/user-event": "^7.2.1",
14 | "jsonwebtoken": "^9.0.0",
15 | "lodash": "^4.17.20",
16 | "moment-timezone": "^0.5.43",
17 | "react": "^16.13.1",
18 | "react-dom": "^16.13.1",
19 | "react-hook-form": "^7.43.9",
20 | "react-router-dom": "^5.2.0",
21 | "react-scripts": "3.4.1",
22 | "yup": "^1.1.1"
23 | },
24 | "scripts": {
25 | "start": "IS_LOCAL=false react-scripts start",
26 | "start:local": "IS_LOCAL=true react-scripts start",
27 | "start-api": "cd api && ../../venv/bin/flask run --no-debugger",
28 | "build": "react-scripts build",
29 | "test": "react-scripts test",
30 | "eject": "react-scripts eject"
31 | },
32 | "eslintConfig": {
33 | "extends": "react-app"
34 | },
35 | "browserslist": {
36 | "production": [
37 | ">0.2%",
38 | "not dead",
39 | "not op_mini all"
40 | ],
41 | "development": [
42 | "last 1 chrome version",
43 | "last 1 firefox version",
44 | "last 1 safari version"
45 | ]
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/client/public/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/client/public/images/logo.png
--------------------------------------------------------------------------------
/src/client/public/images/paws-logo-64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/client/public/images/paws-logo-64.png
--------------------------------------------------------------------------------
/src/client/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
15 |
24 | PAWS Best Friend
25 |
26 |
27 | You need to enable JavaScript to run this app.
28 |
29 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/src/client/public/robots.txt:
--------------------------------------------------------------------------------
1 | # https://www.robotstxt.org/robotstxt.html
2 | User-agent: *
3 | Disallow:
4 |
--------------------------------------------------------------------------------
/src/client/src/App.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { render } from '@testing-library/react';
3 | import App from './App';
4 |
5 | test('renders learn react link', () => {
6 | const { getByText } = render( );
7 | const linkElement = getByText(/learn react/i);
8 | expect(linkElement).toBeInTheDocument();
9 | });
10 |
--------------------------------------------------------------------------------
/src/client/src/assets/font/NowayRound-Regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/client/src/assets/font/NowayRound-Regular.woff
--------------------------------------------------------------------------------
/src/client/src/assets/font/font.css:
--------------------------------------------------------------------------------
1 | @font-face {
2 | font-family: 'Noway Round Regular';
3 | font-style: normal;
4 | font-weight: normal;
5 | font-display: swap;
6 | src: local('Noway Round Regular'), url('./NowayRound-Regular.woff') format('woff');
7 | }
--------------------------------------------------------------------------------
/src/client/src/assets/header-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/client/src/assets/header-logo.png
--------------------------------------------------------------------------------
/src/client/src/assets/startImage.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/client/src/assets/startImage.jpg
--------------------------------------------------------------------------------
/src/client/src/components/AlertBanner.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | import { Alert, AlertTitle } from "@material-ui/lab";
4 | import useAlert from "../hooks/useAlert";
5 | import { Typography } from "@material-ui/core";
6 | import _ from "lodash";
7 |
8 | const AlertBanner = () => {
9 | const { text, type, clearAlert } = useAlert();
10 |
11 | if (text && type) {
12 | return (
13 | clearAlert()} severity={type} spacing={2} >
14 |
15 | {_.startCase(type)}
16 |
17 | {text}
18 |
19 | );
20 | } else {
21 | return <>>;
22 | }
23 | };
24 |
25 | export default AlertBanner;
26 |
--------------------------------------------------------------------------------
/src/client/src/components/Header.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from "react";
2 | import {Link as RouterLink} from "react-router-dom";
3 | import {AppBar, Button, Toolbar, Typography} from "@material-ui/core";
4 | import logo from '../assets/header-logo.png';
5 | import Grid from "@material-ui/core/Grid";
6 |
7 | class Header extends Component {
8 | render() {
9 | return (
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | PAWS Data Pipeline
21 |
22 |
23 |
24 |
25 |
26 | {this.props.headerType === 'Admin' &&
27 | Admin }
28 |
29 | {this.props.headerType === 'Admin' &&
30 | Users }
31 |
32 | {this.props.headerType !== 'Login' && 360
33 | DataView
34 | }
35 | About us
36 | {this.props.headerType !== 'Login' &&
37 | Log Out }
38 |
39 |
40 |
41 |
42 | );
43 | }
44 |
45 | }
46 |
47 | export default Header;
48 |
49 |
--------------------------------------------------------------------------------
/src/client/src/components/Refresh.js:
--------------------------------------------------------------------------------
1 | export default async function Refresh(old_token) {
2 |
3 | // Use exisiting token to get a new fresh token
4 |
5 | const new_at = await fetch('/api/user/refresh',
6 | {
7 | method: 'GET',
8 | headers: {
9 | 'Content-Type': 'application/json',
10 | 'Authorization': 'Bearer ' + old_token
11 | }
12 | })
13 |
14 | .then((response) => {
15 | if (!response.ok) {
16 | //throw (String(response.status + ':' + response.statusText))
17 | throw (response)
18 | }
19 | return response.json()
20 | } )
21 |
22 | .catch((e) => {
23 | // If it failed there's not much to do, probably got here after expiration
24 | return '{}'
25 | });
26 |
27 |
28 | return(new_at);
29 |
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/src/client/src/components/RefreshDlg.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import Button from '@material-ui/core/Button';
3 | import Dialog from '@material-ui/core/Dialog';
4 | import DialogActions from '@material-ui/core/DialogActions';
5 | import DialogContent from '@material-ui/core/DialogContent';
6 | import DialogContentText from '@material-ui/core/DialogContentText';
7 | import DialogTitle from '@material-ui/core/DialogTitle';
8 |
9 | import useToken from '../pages/Login/useToken';
10 |
11 | import Refresh from './Refresh';
12 | import defaultTheme from "../theme/defaultTheme";
13 |
14 |
15 | export default function RefreshDlg(props) {
16 | const [open, setOpen] = React.useState(props.shouldOpen);
17 | const {access_token} = useToken(); // We want to use the passed-in top-level setToken
18 |
19 | const handleClose = async (shouldRefresh) => {
20 | // Could be closed with Yes, No, outclick (which equals No)
21 | setOpen(false);
22 | if (shouldRefresh) {
23 | const new_at = await Refresh(access_token);
24 | props.setToken(new_at);
25 | }
26 | };
27 |
28 |
29 | return (
30 |
31 |
42 | You are about to be logged out!
43 |
44 |
45 | Stay logged in to keep working?
46 |
47 |
48 |
49 | handleClose(false)} color="primary">
50 | No
51 |
52 | handleClose(true)} color="primary"
53 | autoFocus>
54 | Yes
55 |
56 |
57 |
58 |
59 | );
60 | }
61 |
--------------------------------------------------------------------------------
/src/client/src/components/TabPanel.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | /* Handles the visibility of each tab. By checking index against selected value in parent component */
4 | function TabPanel (props) {
5 | const { children, value, index } = props;
6 |
7 | return (
8 |
9 | {children}
10 |
11 |
12 | )
13 |
14 | }
15 |
16 | export default TabPanel;
--------------------------------------------------------------------------------
/src/client/src/contexts/AlertContext.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | const ALERT_TIME = 6000;
4 | const initialState = {
5 | text: "",
6 | type: "",
7 | };
8 |
9 | const AlertContext = React.createContext({
10 | ...initialState,
11 | setAlert: () => {},
12 | });
13 |
14 | export const AlertProvider = ({ children }) => {
15 | const [text, setText] = React.useState("");
16 | const [type, setType] = React.useState("");
17 | const timerRef = React.useRef(null);
18 |
19 | const setAlert = ({ type, text }) => {
20 | setType(type);
21 | setText(text);
22 |
23 | if (timerRef.current) {
24 | clearTimeout(timerRef.current);
25 | }
26 |
27 | if (type !== "error") {
28 | timerRef.current = setTimeout(() => {
29 | setText("");
30 | setType("");
31 | }, ALERT_TIME);
32 | }
33 | };
34 |
35 | const clearAlert = () => {
36 | if (timerRef.current) {
37 | clearTimeout(timerRef.current);
38 | }
39 |
40 | setType("");
41 | setText("");
42 | }
43 |
44 | return (
45 |
53 | {children}
54 |
55 | );
56 | };
57 |
58 | export default AlertContext;
59 |
--------------------------------------------------------------------------------
/src/client/src/hooks/useAlert.js:
--------------------------------------------------------------------------------
1 | import { useContext } from "react";
2 | import AlertContext from "../contexts/AlertContext";
3 |
4 | const useAlert = () => useContext(AlertContext);
5 |
6 | export default useAlert;
7 |
--------------------------------------------------------------------------------
/src/client/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 | import { ThemeProvider } from '@material-ui/core/styles';
5 | import * as serviceWorker from './serviceWorker';
6 | import "./assets/font/font.css";
7 | import defaultTheme from './theme/defaultTheme';
8 | import { AlertProvider } from './contexts/AlertContext';
9 |
10 |
11 | ReactDOM.render(
12 |
13 |
14 |
15 |
16 |
17 |
18 | ,
19 | document.getElementById('root')
20 | );
21 |
22 | // If you want your app to work offline and load faster, you can change
23 | // unregister() to register() below. Note this comes with some pitfalls.
24 | // Learn more about service workers: https://bit.ly/CRA-PWA
25 | serviceWorker.unregister();
26 |
--------------------------------------------------------------------------------
/src/client/src/pages/About.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import {Container, Divider} from '@material-ui/core';
3 | import _ from 'lodash';
4 | import Typography from "@material-ui/core/Typography";
5 | import Grid from "@material-ui/core/Grid";
6 |
7 | const DEVELOPERS = [
8 | "Uri Rotem",
9 | "Cris Simpson",
10 | "Ben Bucior",
11 | "Stephen Poserina",
12 | "Mike Crnkovich",
13 | "Mike Damert",
14 | "Dave Salorio",
15 | "Mike Bailey",
16 | "Donna St. Louis",
17 | "Joe Illuminati",
18 | "Andrew Bishop",
19 | "Akshat Vas",
20 | "Dan Kelley",
21 | "Osman Sabahat",
22 | "Stephen Carroll",
23 | "Diego Delgado",
24 | "Carlos Dominguez",
25 | "Benjamin Deck",
26 | "Sam Lufi",
27 | "Ruthie Fields",
28 | ]
29 |
30 | const PROJECT_MANAGERS = [
31 | "JW Truver",
32 | "Daniel Romero",
33 | "Eudora Linde",
34 | "Meg Niman"
35 | ]
36 |
37 | const PROJECT_LEADS = [
38 | "Karla Fettich",
39 | "Chris Kohl"
40 | ]
41 |
42 | const EXTERNAL_COLLABORATORS = [
43 | "Weston Welch",
44 | "Tan Tan Chen",
45 | "Faith Benamy",
46 | "Jesse",
47 | "Chris Alfano",
48 | "Josephine Dru",
49 | ]
50 |
51 | const SUPPORTERS = [
52 | "Code for Philly",
53 | "Linode"
54 | ]
55 |
56 | const getList = (listName) => {
57 | return (
58 |
59 | {_.map(listName, item => {
60 | return (
61 |
62 | {item}
63 |
64 |
65 | )
66 | })}
67 |
68 | );
69 | };
70 |
71 |
72 | export default function About() {
73 | return (
74 |
75 |
76 |
77 |
78 |
79 |
81 | The PAWS Data Pipeline
82 |
83 |
84 |
85 |
86 |
87 | The PAWS data pipeline (PDP) is community-driven and developed software that serves the
88 | Philadelphia Animal Welfare Society (PAWS), Philadelphia’s largest animal rescue partner
89 | and no-kill animal shelter. It is a project that began on Nov 24, 2019 and is being built
90 | through a volunteer effort coordinated by Code for Philly. PDP is free and open source
91 | software. The volunteers that have worked on this project come from diverse backgrounds,
92 | but are connected through a shared love for animals and a passion for technology.
93 |
94 |
95 |
96 |
97 |
98 |
99 | {_.size(DEVELOPERS) + _.size(PROJECT_MANAGERS) + _.size(PROJECT_LEADS) + _.size(EXTERNAL_COLLABORATORS)} individuals and {_.size(SUPPORTERS)} organisations supported and contributed to the PDP between
100 | 2019/11/24 and 2023/06/13:
101 |
102 |
103 | Developers
104 |
105 | {getList(DEVELOPERS)}
106 |
107 | Project Managers
108 |
109 | {getList(PROJECT_MANAGERS)}
110 | Project leads
111 | {getList(PROJECT_LEADS)}
112 | External collaborators and supporters
113 | {getList(EXTERNAL_COLLABORATORS)}
114 | Organisations providing support
115 | {getList(SUPPORTERS)}
116 |
117 |
118 |
119 | );
120 | }
--------------------------------------------------------------------------------
/src/client/src/pages/Admin/Components/AnalysisBox.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Button,
4 | Grid,
5 | Paper,
6 | Typography,
7 | CardContent,
8 | } from "@material-ui/core";
9 | import _ from "lodash";
10 | import AnalysisTable from "./AnalysisTable";
11 |
12 | export default function AnalysisBox(props) {
13 | const { handleExecute, lastExecution, statistics } = props;
14 |
15 | return (
16 |
17 |
18 |
19 |
20 | Run New Analysis
21 |
22 |
31 |
32 | {!_.isEmpty(statistics) && (
33 |
37 | )}
38 |
39 |
40 | );
41 | }
42 |
--------------------------------------------------------------------------------
/src/client/src/pages/Admin/Components/AnalysisTable.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Grid,
4 | TableRow,
5 | TableCell,
6 | TableBody,
7 | Table,
8 | Divider,
9 | } from "@material-ui/core";
10 | import { formatTimestamp } from "../../../utils/utils";
11 |
12 | export default function AnalysisTable(props) {
13 | const { tableData, lastExecution } = props;
14 |
15 | return (
16 |
17 |
18 |
19 |
20 |
21 |
22 | Last Analysis
23 |
24 |
25 |
26 | {formatTimestamp(lastExecution)}
27 |
28 |
29 |
30 | {tableData.map((row, index) => (
31 |
32 |
33 | {row[0]}
34 |
35 | {row[1]}
36 |
37 | ))}
38 |
39 |
40 |
41 | );
42 | }
43 |
--------------------------------------------------------------------------------
/src/client/src/pages/Admin/Components/Loading.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Box,
4 | Backdrop,
5 | CircularProgress,
6 | Typography,
7 | } from "@material-ui/core";
8 |
9 | export default function Loading({ text, speed = 300 }) {
10 | const [content, setContent] = React.useState(text);
11 |
12 | React.useEffect(() => {
13 | const id = window.setInterval(() => {
14 | setContent((content) => {
15 | return content === `${text}...` ? text : `${content}.`;
16 | });
17 | }, speed);
18 |
19 | return () => window.clearInterval(id);
20 | }, [text, speed]);
21 |
22 | return (
23 |
24 |
30 |
31 | {text &&
32 |
33 |
34 | {content}
35 |
36 |
37 | }
38 |
39 |
40 | );
41 | }
--------------------------------------------------------------------------------
/src/client/src/pages/Admin/Components/UploadBox.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Button,
4 | Grid,
5 | Paper,
6 | Typography,
7 | CardContent,
8 | } from "@material-ui/core";
9 | import _ from "lodash";
10 | import UploadsTable from "./UploadsTable";
11 |
12 | export default function UploadBox(props) {
13 | const { filesInput, handleUpload, lastUploads } = props;
14 |
15 | return (
16 |
17 |
18 |
19 | Upload Files
20 | Note: This upload feature now only accepts Volgistics data files. Other data is uploaded automatically.
21 |
34 |
35 | {!_.isEmpty(lastUploads) &&
36 |
37 | }
38 |
39 |
40 | );
41 | }
42 |
--------------------------------------------------------------------------------
/src/client/src/pages/Admin/Components/UploadsTable.jsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {
3 | Grid,
4 | TableRow,
5 | TableCell,
6 | TableBody,
7 | Table,
8 | Divider,
9 | } from "@material-ui/core";
10 | import _ from "lodash";
11 | import { formatTimestamp, formatUploadType } from "../../../utils/utils";
12 |
13 | export default function UploadsTable(props) {
14 | const { tableData } = props;
15 |
16 | return (
17 |
18 |
19 |
20 |
21 |
22 |
27 | Upload Type
28 |
29 |
30 | Last Execution
31 |
32 |
33 | {_.map(tableData, (row, index) => (
34 |
35 |
40 | {formatUploadType(Object.keys(row)[0])}
41 |
42 |
47 | {formatTimestamp(Object.values(row)[0])}
48 |
49 |
50 | ))}
51 |
52 |
53 |
54 | );
55 | }
56 |
--------------------------------------------------------------------------------
/src/client/src/pages/Check/Check.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { useHistory } from "react-router-dom";
3 | import useToken from '../../pages/Login/useToken';
4 | var jwt = require('jsonwebtoken');
5 |
6 | // const { DateTime } = require("luxon"); /* Enable if you enable console logging below */
7 |
8 |
9 | export default function Check({access_token}) {
10 |
11 | const { setToken } = useToken();
12 |
13 | const [processStatus, setProcessStatus] = React.useState('loading');
14 | const [error, setError] = React.useState('');
15 | const [data, setData] = React.useState('');
16 |
17 | let history = useHistory()
18 |
19 |
20 |
21 |
22 |
23 | // get the decoded payload and header
24 | var decoded = jwt.decode(access_token, { complete: true });
25 | const userName = decoded?.payload.sub
26 | const userRole = decoded?.payload.role
27 | const expTime = decoded?.payload.exp
28 | // console.log('User: ' + userName + ' / Role:' + userRole + '->' + processStatus + ' @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
29 |
30 |
31 |
32 | React.useEffect(() => {
33 |
34 |
35 |
36 | function authCheck() {
37 | // console.log('authCheck startfetch @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
38 | fetch('/api/user/test_auth',
39 | {
40 | method: 'GET',
41 | headers: {
42 | 'Content-Type': 'application/json',
43 | 'Authorization': 'Bearer ' + access_token
44 | }
45 | })
46 |
47 | .then((response) => {
48 | // console.log('authCheck handle response @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
49 | if (!response.ok) {
50 | //throw (String(response.status + ':' + response.statusText))
51 | throw (response)
52 | }
53 | return response.json()
54 | } )
55 | .then((data) => {
56 | // console.log('authCheck data @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
57 | setProcessStatus('complete');
58 | setData(data);
59 | })
60 | .catch((e) => {
61 | let errCode = e.status
62 | let errText = e.statusText
63 |
64 | setToken(null) // Clear the token to force login again
65 | let errStr = String(e)
66 | setProcessStatus('error');
67 | setError(errStr);
68 | console.log(errCode + ':' + errText)
69 | history.push('/')
70 | return e
71 | });
72 |
73 | } //
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 | if (! access_token){
88 | console.log("In Check w/o a token")
89 | }
90 |
91 | // console.log('Running authCheck @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
92 | authCheck();
93 | // console.log('After authCheck @ ' + DateTime.local().toFormat('HH:mm:ss.SSS'))
94 |
95 |
96 | },
97 | // eslint-disable-next-line
98 | [ processStatus, access_token, history ]);
99 |
100 | // if (processStatus === 'loading') {
101 | // console.log('Check: if pc=l loading...')
102 | // return loading..
;
103 | // }
104 |
105 | if (processStatus === 'error') {
106 | console.log('error')
107 | return ERROR: {error}
;
108 | }
109 |
110 |
111 |
112 | // console.log("About to return")
113 | return (
114 |
115 |
Check
116 |
117 |
118 | User: {userName}
119 | Role: {userRole}
120 | JWT expires: {-(Date.now()/1000 - expTime).toFixed(1)} secs
121 | {data}
122 |
123 |
124 | {userRole === 'admin' &&
125 |
Welcome, admin!
126 | }
127 |
128 | );
129 |
130 |
131 |
132 | };
133 |
134 |
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/Search/components/SearchBar.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from 'react';
2 | import {Button, TextField, IconButton, Grid, Paper} from '@material-ui/core';
3 | import SearchIcon from '@material-ui/icons/Search';
4 |
5 | import _ from 'lodash';
6 | import {withStyles} from "@material-ui/core/styles";
7 |
8 | const customStyles = theme => ({
9 | paper: {
10 | padding: theme.spacing(2)
11 | }
12 | });
13 |
14 |
15 | class SearchBar extends Component {
16 | constructor(props) {
17 | super(props);
18 |
19 | this.state = {
20 | alertMinChars: true,
21 | participantSearch: '',
22 | isSearchBusy: false
23 | }
24 |
25 | this.handleParticipantSearch = this.handleParticipantSearch.bind(this);
26 | this.handleParticipantKeyStroke = this.handleParticipantKeyStroke.bind(this);
27 |
28 | }
29 |
30 | handleParticipantKeyStroke(event) {
31 | let searchStr = _.get(event, 'target.value');
32 |
33 | if (_.isEmpty(searchStr) !== true) {
34 | const searchStrSplitted = searchStr.split(' ');
35 | let shouldShowAlert = false;
36 |
37 | if (_.size(searchStrSplitted) === 2) {
38 | shouldShowAlert = _.size(searchStrSplitted[0]) < 2 || _.size(searchStrSplitted[1]) < 2;
39 | } else if (_.size(searchStrSplitted) === 1) {
40 | shouldShowAlert = _.size(searchStrSplitted[0]) < 2;
41 | }
42 |
43 | this.setState({alertMinChars: shouldShowAlert});
44 | }
45 | this.setState({participantSearch: searchStr});
46 | }
47 |
48 | searchParticipant(event) {
49 | return (
50 |
51 |
74 | );
75 | }
76 |
77 | async handleParticipantSearch(event) {
78 | event.preventDefault();
79 | if (_.isEmpty(this.state.participantSearch) !== true) {
80 | this.props.handleSearchChange(this.state.participantSearch);
81 | }
82 | };
83 |
84 | render() {
85 | const {classes} = this.props;
86 |
87 | return (
88 |
89 |
90 |
91 |
92 | {this.searchParticipant()}
93 |
94 |
95 |
96 |
97 | )
98 | }
99 | }
100 |
101 | export default withStyles(customStyles)(SearchBar);
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/AnimalInfo.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import {
3 | Paper,
4 | Container,
5 | IconButton
6 | } from '@material-ui/core';
7 | import LinkIcon from '@material-ui/icons/Link';
8 | import _ from 'lodash';
9 | import Grid from "@material-ui/core/Grid";
10 | import PetsIcon from "@material-ui/icons/Pets";
11 |
12 | import CollapsibleTable from './CollapsibleTable';
13 | import DataTableHeader from './DataTableHeader';
14 |
15 |
16 | const PET_COUNT = 5;
17 |
18 | class AnimalInfo extends Component {
19 |
20 | getLatestPets(petObject, events) {
21 |
22 | function customizer(objValue, srcValue) {
23 | if (_.isObject(objValue) && _.isObject(srcValue)) {
24 | // sort according to date of most recent event
25 | return _.set(objValue, 'Events', _.orderBy(srcValue, ['Time'], ['desc']));
26 | }
27 | }
28 |
29 | let result = _.mergeWith(petObject, events, customizer);
30 | let nonEmptyEvents = _.filter(result, function(pet) { return pet["Events"] && _.size(pet["Events"]) > 0 });
31 | result = [..._.orderBy(nonEmptyEvents, ['Events[0].Time'], ['desc'])]
32 | return result.slice(0, PET_COUNT);
33 | }
34 |
35 | render() {
36 | const numOfPets = _.size(this.props.pets);
37 | const events = this.props.events;
38 | const latestPets = this.getLatestPets(this.props.pets, events);
39 | const headerText = this.props.headerText;
40 | const headerAddition = (numOfPets > PET_COUNT) ? " (Showing " + PET_COUNT + " Pets out of " + numOfPets + ")" : ""
41 | const shelterLuvPersonURL = `https://www.shelterluv.com/phlp-p-${this.props.shelterluv_id}`;
42 |
43 | return (
44 |
45 | }
47 | >
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 | );
57 | }
58 | }
59 |
60 |
61 | export default AnimalInfo;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/DataTableHeader.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import {
3 | Typography,
4 | } from '@material-ui/core';
5 | import Grid from "@material-ui/core/Grid";
6 | import Box from "@material-ui/core/Box";
7 |
8 |
9 | function DataTableHeader(props) {
10 | const { headerText, emojiIcon } = props;
11 | return (
12 |
13 |
14 |
15 |
16 |
17 | {emojiIcon}
18 |
19 |
20 |
21 |
22 | {headerText}
23 |
24 |
25 | {props.children}
26 |
27 |
28 |
29 |
30 |
31 | );
32 | }
33 |
34 | export default DataTableHeader;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/Donations.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from 'react';
2 | import {
3 | Paper,
4 | Table,
5 | TableContainer,
6 | TableHead,
7 | TableBody,
8 | TableRow,
9 | TableCell,
10 | Container
11 | } from '@material-ui/core';
12 | import _ from 'lodash';
13 | import moment from 'moment-timezone';
14 | import AttachMoneyIcon from '@material-ui/icons/AttachMoney';
15 | import DataTableHeader from "./DataTableHeader";
16 |
17 | const ROWS_TO_SHOW = 3
18 |
19 | class Donations extends Component {
20 | constructor(props) {
21 | super(props);
22 |
23 | this.createRows = this.createRows.bind(this);
24 | }
25 |
26 | createRows(donations) {
27 | const donationsSorted = _.sortBy(donations, donation => {
28 | return new moment(donation.close_date).format("YYYY-MM-DD");
29 | }).reverse();
30 |
31 | const latestDonations = donationsSorted.slice(0, ROWS_TO_SHOW);
32 |
33 | const result = _.map(latestDonations, (donation, index) => {
34 | return (
35 | {donation.close_date}
36 | ${donation.amount.toFixed(2)}
37 | {donation.type}
38 | {donation.primary_campaign_source}
39 | );
40 | });
41 |
42 | return result;
43 | }
44 |
45 | render() {
46 | const headerText = `Financial Support Activity (Most Recent ${ROWS_TO_SHOW})`
47 | return (
48 |
49 | }
52 | />
53 |
54 |
55 |
56 |
57 |
58 | Date of Donation
59 | Amount
60 | Donation Type
61 | Primary Campaign Source
62 |
63 |
64 |
65 | {this.props.donations && this.createRows(this.props.donations)}
66 |
67 |
68 |
69 |
70 | );
71 | }
72 | }
73 |
74 |
75 | export default Donations;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/EventsModal.js:
--------------------------------------------------------------------------------
1 | import React, { Component, Fragment } from 'react';
2 | import {
3 | Paper,
4 | Table,
5 | TableContainer,
6 | TableHead,
7 | TableBody,
8 | TableRow,
9 | TableCell,
10 | } from '@material-ui/core';
11 | import { withStyles } from '@material-ui/core/styles';
12 | import _ from 'lodash';
13 |
14 |
15 | const customStyles = theme => ({
16 | spaceIcon: {
17 | marginTop: 3,
18 | marginRight: 3
19 | },
20 | headerCell: {
21 | fontWeight: "bold",
22 | },
23 | paper: {
24 | position: 'absolute',
25 | width: 400,
26 | backgroundColor: theme.palette.background.paper,
27 | border: '2px solid #000',
28 | boxShadow: theme.shadows[5],
29 | padding: theme.spacing(2, 4, 3),
30 | }
31 | });
32 |
33 | class EventsModal extends Component {
34 |
35 | render() {
36 |
37 | return (
38 |
39 |
40 |
41 |
42 |
43 | Subtype
44 | Time
45 | Type
46 | User
47 |
48 |
49 |
50 | {_.map(this.props.data, (adoptionInfo, index) => {
51 | return
52 | {adoptionInfo["Subtype"]}
53 | {adoptionInfo["Time"]}
54 | {adoptionInfo["Type"]}
55 | {adoptionInfo["User"]}
56 |
57 | })}
58 |
59 |
60 |
61 | )
62 | }
63 | }
64 |
65 | export default withStyles(customStyles)(EventsModal);
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/Fosters.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import {
3 | IconButton,
4 | Paper,
5 | Table,
6 | TableContainer,
7 | TableHead,
8 | TableBody,
9 | TableRow,
10 | TableCell,
11 | Container
12 | } from '@material-ui/core';
13 | import LinkIcon from '@material-ui/icons/Link';
14 | import _ from 'lodash';
15 | import moment from "moment-timezone";
16 | import Grid from "@material-ui/core/Grid";
17 | import PetsIcon from "@material-ui/icons/Pets";
18 |
19 | import DataTableHeader from './DataTableHeader';
20 | import { showAnimalAge } from '../../../../utils/utils'
21 |
22 | const PET_COUNT = 3;
23 |
24 | class Fosters extends Component {
25 |
26 | getLatestPets(animals) {
27 | const latestPets = _.sortBy(animals, animal => {
28 | return animal.Events.Time
29 | }).reverse()
30 |
31 | return latestPets.slice(0, PET_COUNT)
32 | }
33 |
34 | combineAnimalAndEvents(animals, events) {
35 | let combined = {}
36 | for (const id in animals) {
37 | if (_.includes(_.keys(events), id)) {
38 | let sortedEvents = _.sortBy(events[id], ['Time'])
39 | combined[id] = { ...animals[id], "Events": sortedEvents }
40 | }
41 | }
42 | return combined
43 | }
44 |
45 | createRows(data) {
46 | const result = _.map(data, (row, index) => {
47 | const photo = row.Photos[0]
48 | return (
49 |
50 | {row.Name}
51 | {row.Type}
52 | {moment.unix(row.Events[0].Time).format("DD MMM YYYY")}
53 | {moment.unix(row.Events[1].Time).format("DD MMM YYYY")}
54 | {showAnimalAge(row.DOBUnixTime)}
55 | { }
56 |
57 | );
58 | });
59 |
60 | return result;
61 | }
62 |
63 | render() {
64 | const { pets, events, headerText, shelterluvShortId } = this.props;
65 | const combined = this.combineAnimalAndEvents(pets, events)
66 | const numOfPets = _.size(combined);
67 | const latestPets = this.getLatestPets(combined);
68 | const headerAddition = (numOfPets > PET_COUNT) ? " (Most Recent " + PET_COUNT + ")" : ""
69 | const shelterLuvPersonURL = `https://www.shelterluv.com/phlp-p-${shelterluvShortId}`;
70 |
71 | return (
72 |
73 | }
76 | >
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 | Name
88 | Animal Type
89 | Start Date
90 | End Date
91 | Age
92 | Photo
93 |
94 |
95 |
96 | {latestPets && this.createRows(latestPets)}
97 |
98 |
99 |
100 |
101 | );
102 | }
103 | }
104 |
105 |
106 | export default Fosters;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/SupportOverview.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import { Box, Container, Divider, Paper, Typography } from '@material-ui/core';
3 | import { withStyles } from '@material-ui/core/styles';
4 |
5 | import Grid from "@material-ui/core/Grid";
6 |
7 | const customStyles = theme => ({
8 | spacingRows: {
9 | padding: 2
10 | },
11 | spaceIcon: {
12 | marginTop: 2
13 | }
14 | });
15 |
16 | class SupportOverview extends Component {
17 |
18 | createRowData(data) {
19 | const isDonor = data.first_donation_date || data.number_of_gifts > 0
20 | if (!isDonor) {
21 | return [{"title": "First Gift Date", "value": "N/A"}]
22 | }
23 | const rows = [
24 | { "title": "First Gift Date", "value": data.first_donation_date },
25 | { "title": "First Gift Amount", "value": `$${data.first_gift_amount.toFixed(2)}`},
26 | { "title": "Lifetime Giving", "value": `$${data.total_giving.toFixed(2)}`},
27 | { "title": "Total # of Gifts", "value": data.number_of_gifts},
28 | { "title": "Largest Gift", "value": `$${data.largest_gift.toFixed(2)}`},
29 | { "title": "Recurring Donor?", "value": data.is_recurring ? "Yes" : "No"},
30 | // { "title": "PAWS Legacy Society?", "value": "test" }
31 | ]
32 | return rows;
33 | }
34 |
35 | createRows(classes, data) {
36 | return data.map((row) => (
37 |
38 |
39 |
40 | {row.title}
41 |
42 |
43 |
44 |
45 | {row.value}
46 |
47 |
48 |
49 | ));
50 | }
51 |
52 | render() {
53 | const { classes, data } = this.props;
54 | const rows = this.createRowData(data);
55 |
56 | return (
57 |
58 |
59 |
60 |
61 |
62 |
63 | Support Overview
64 |
65 |
66 |
67 |
68 |
69 |
70 | {this.createRows(classes, rows)}
71 |
72 |
73 |
74 |
75 | );
76 | }
77 | }
78 |
79 | export default withStyles(customStyles)(SupportOverview);
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/Volunteer.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import _ from 'lodash';
3 | import VolunteerActivity from './VolunteerActivity';
4 | import VolunteerHistory from './VolunteerHistory';
5 |
6 |
7 | class Volunteer extends Component {
8 |
9 | render() {
10 | return (
11 |
12 |
13 |
14 |
15 | );
16 | }
17 | }
18 |
19 | export default Volunteer;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/VolunteerActivity.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import {
3 | Paper,
4 | Table,
5 | TableContainer,
6 | TableHead,
7 | TableBody,
8 | TableRow,
9 | TableCell,
10 | Container,
11 | } from '@material-ui/core';
12 | import EmojiPeopleIcon from '@material-ui/icons/EmojiPeople';
13 | import DataTableHeader from './DataTableHeader';
14 |
15 |
16 | class VolunteerActivity extends Component {
17 |
18 | render() {
19 | return (
20 |
21 |
22 | }
24 | />
25 |
26 |
27 |
28 |
29 | Volunteer activity start
30 | Life hours
31 | YTD hours
32 |
33 |
34 |
35 | { this.props.volunteer && (
36 |
37 | {(this.props.volunteer.start_date) ? this.props.volunteer.start_date : "N/A"}
38 | {(this.props.volunteer.life_hours) ? this.props.volunteer.life_hours.toFixed(2) : 0}
39 | {(this.props.volunteer.ytd_hours) ? this.props.volunteer.ytd_hours.toFixed(2) : 0}
40 |
41 | )}
42 |
43 |
44 |
45 |
46 |
47 | );
48 | }
49 | }
50 |
51 |
52 | export default VolunteerActivity;
--------------------------------------------------------------------------------
/src/client/src/pages/DataView360/View/components/VolunteerHistory.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import {
3 | Paper,
4 | Table,
5 | TableContainer,
6 | TableHead,
7 | TableBody,
8 | TableRow,
9 | TableCell,
10 | Container,
11 | } from '@material-ui/core';
12 | import _ from 'lodash';
13 | import moment from 'moment-timezone';
14 | import TimelineIcon from '@material-ui/icons/Timeline';
15 | import DataTableHeader from './DataTableHeader';
16 |
17 |
18 | const SHIFTS_TO_SHOW = 3;
19 |
20 | class VolunteerHistory extends Component {
21 |
22 | createShiftRows(shifts) {
23 | const shiftsFiltered = _.filter(shifts, function(s) { return s.from !== "Invalid date"});
24 | const shiftsSorted = _.sortBy(shiftsFiltered, shift => {
25 | return new moment(shift.from_date).format("YYYY-MM-DD");
26 | }).reverse();
27 |
28 | const lastShifts = shiftsSorted.slice(0, SHIFTS_TO_SHOW)
29 |
30 | const result = _.map(lastShifts, (shift, index) => {
31 | shift.from_date = moment.utc(shift.from_date).format("YYYY-MM-DD")
32 | return(
33 | {shift.from_date}
34 | {shift.assignment}
35 | );
36 |
37 | });
38 |
39 | return result;
40 | }
41 |
42 | render() {
43 |
44 | return (
45 |
46 |
47 | }
49 | />
50 |
51 |
52 |
53 |
54 | Date
55 | Assignment
56 |
57 |
58 |
59 | { this.props.volunteerShifts && this.createShiftRows(this.props.volunteerShifts) }
60 |
61 |
62 |
63 |
64 |
65 | );
66 | }
67 | }
68 |
69 |
70 | export default VolunteerHistory;
--------------------------------------------------------------------------------
/src/client/src/pages/Home.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import startImage from '../assets/startImage.jpg';
3 | import Grid from '@material-ui/core/Grid';
4 | import {Typography} from "@material-ui/core";
5 |
6 |
7 | class HomePage extends Component {
8 | render() {
9 | return (
10 |
11 |
12 | Welcome PAWS User
13 |
14 |
15 |
16 |
17 |
18 | );
19 | }
20 | }
21 |
22 | export default HomePage;
--------------------------------------------------------------------------------
/src/client/src/pages/Login/Login.js:
--------------------------------------------------------------------------------
1 | import React, {useState} from 'react';
2 | import PropTypes from 'prop-types';
3 | import {CardContent, Paper, TextField, Typography} from "@material-ui/core";
4 | import Grid from "@material-ui/core/Grid";
5 | import Button from "@material-ui/core/Button";
6 |
7 | function checkLoginResponse(response) {
8 | let gotError = !response.ok;
9 | if (gotError) {
10 | throw new Error("Unable to log in - check username and password");
11 | }
12 | return response
13 | }
14 |
15 |
16 | async function loginUser(credentials) {
17 | return fetch('/api/user/login', {
18 | method: 'POST',
19 | headers: {
20 | 'Content-Type': 'application/json'
21 | },
22 | body: JSON.stringify(credentials)
23 | })
24 | .then(checkLoginResponse)
25 | .then(data => data.json())
26 | .catch(error => document.getElementById('loginErrorMsg').innerHTML = error)
27 |
28 | }
29 |
30 | export default function Login({setToken}) {
31 | const [username, setUserName] = useState();
32 | const [password, setPassword] = useState();
33 | //const [authcode, setAuthCode] = useState(); // For future use
34 |
35 | const handleSubmit = async e => {
36 | e.preventDefault();
37 | const access_token = await loginUser({
38 | username,
39 | password
40 | });
41 | setToken(access_token);
42 | }
43 |
44 | return (
45 |
46 |
47 | Please Log In
48 |
49 |
50 |
51 |
52 |
68 |
69 |
70 |
71 |
72 |
73 |
74 | )
75 | }
76 |
77 | Login.propTypes = {
78 | setToken: PropTypes.func.isRequired
79 | };
--------------------------------------------------------------------------------
/src/client/src/pages/Login/Logout.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import PropTypes from 'prop-types';
3 | import { useHistory } from "react-router-dom";
4 |
5 |
6 | export default function Logout({ setToken }) {
7 |
8 | setToken(null);
9 | let history = useHistory();
10 | history.push('/')
11 |
12 | return (
13 |
14 | )
15 | }
16 |
17 | Logout.propTypes = {
18 | setToken: PropTypes.func.isRequired
19 | };
--------------------------------------------------------------------------------
/src/client/src/pages/Login/useToken.js:
--------------------------------------------------------------------------------
1 | import { useState } from 'react';
2 |
3 | export default function useToken() {
4 | const getToken = () => {
5 | const tokenString = sessionStorage.getItem('access_token'); // getItem(key)
6 | const userToken = JSON.parse(tokenString);
7 |
8 | return userToken?.access_token
9 | };
10 |
11 |
12 | const [access_token, setToken] = useState(getToken());
13 |
14 | const saveToken = userToken => {
15 | sessionStorage.setItem('access_token', JSON.stringify(userToken));
16 | setToken(userToken?.access_token);
17 | };
18 |
19 |
20 | return {
21 | setToken: saveToken,
22 | access_token
23 | }
24 |
25 |
26 | }
--------------------------------------------------------------------------------
/src/client/src/pages/UserManagement/Components/Dialog/ChangePasswordDialog.jsx:
--------------------------------------------------------------------------------
1 | import { yupResolver } from "@hookform/resolvers/yup";
2 | import {
3 | Button,
4 | Dialog,
5 | DialogActions,
6 | DialogContent,
7 | DialogTitle,
8 | TextField,
9 | Typography
10 | } from '@material-ui/core';
11 | import React from 'react';
12 | import { useForm } from 'react-hook-form';
13 | import * as Yup from 'yup';
14 | import { updateUser } from "../../../../utils/api";
15 | import { buildPasswordValidation } from '../../Validations';
16 | import useAlert from "../../../../hooks/useAlert";
17 |
18 |
19 | export default function ChangePasswordDialog(props) {
20 | const {
21 | onClose,
22 | token,
23 | user
24 | } = props;
25 | const { username } = user;
26 | const { setAlert } = useAlert();
27 |
28 | const validationSchema = Yup.object().shape({
29 | password: buildPasswordValidation(username),
30 | confirmPassword: Yup.string().oneOf([Yup.ref("password")], "Passwords must match"),
31 | });
32 |
33 | const { register, handleSubmit, formState: { errors }, trigger } = useForm({
34 | resolver: yupResolver(validationSchema),
35 | });
36 |
37 | const onSubmitHandler = (data) => {
38 | const { password } = data;
39 |
40 | updateUser({ username, password }, token)
41 | .then((res) => {
42 | if (res === "Updated") {
43 | setAlert({ type: "success", text: `Password for user ${username} successfully changed!` });
44 | } else {
45 | setAlert({ type: "error", text: res });
46 | }
47 | })
48 | .catch(e => {
49 | console.warn(e)
50 | setAlert({ type: "error", text: e })
51 | });
52 | onClose();
53 | }
54 |
55 | return (
56 |
61 | Change Password
62 |
106 |
107 | )
108 | }
109 |
--------------------------------------------------------------------------------
/src/client/src/pages/UserManagement/Components/Dialog/UserDialog.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ChangePasswordDialog from './ChangePasswordDialog';
3 | import NewUserDialog from './NewUserDialog';
4 | import UpdateUserDialog from './UpdateUserDialog';
5 |
6 | export const DialogTypes = {
7 | NewUser: 'new-user',
8 | UpdateUser: 'update-user',
9 | ChangePassword: 'change-password',
10 | }
11 |
12 | export default function UserDialog(props) {
13 | const {
14 | onClose,
15 | selectedUser,
16 | type,
17 | token,
18 | updateUsers
19 | } = props;
20 |
21 | switch (type) {
22 | case DialogTypes.NewUser:
23 | return (
24 |
29 | )
30 | case DialogTypes.UpdateUser:
31 | return (
32 |
38 | )
39 | case DialogTypes.ChangePassword:
40 | return (
41 |
46 | )
47 | default:
48 | return null
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/client/src/pages/UserManagement/Components/RolesRadioGroup.jsx:
--------------------------------------------------------------------------------
1 | import {
2 | FormControlLabel,
3 | Radio,
4 | RadioGroup,
5 | } from "@material-ui/core";
6 | import React from "react";
7 |
8 | const UserRoles = {
9 | Admin: "admin",
10 | User: "user",
11 | };
12 |
13 | const options = [
14 | {
15 | label: "Admin",
16 | value: UserRoles.Admin,
17 | },
18 | {
19 | label: "User",
20 | value: UserRoles.User,
21 | },
22 | ];
23 |
24 | export default function RolesRadioGroup(props) {
25 | const { register, user } = props;
26 | const [selectedRole, setSelectedRole] = React.useState(user ? user.role : undefined);
27 |
28 | React.useEffect(() => {
29 | setSelectedRole(user ? user.role : null);
30 | }, [user]);
31 |
32 | const generateRadioOptions = () => {
33 | return options.map((option) => (
34 | }
39 | checked={selectedRole === option.value}
40 | onClick={(() => setSelectedRole(option.value))}
41 | {...register("role")}
42 | />
43 | ));
44 | };
45 |
46 | return (
47 |
52 | {generateRadioOptions()}
53 |
54 | );
55 | };
56 |
--------------------------------------------------------------------------------
/src/client/src/pages/UserManagement/Components/UserRow.jsx:
--------------------------------------------------------------------------------
1 | import {
2 | Button,
3 | TableCell,
4 | TableRow
5 | } from "@material-ui/core";
6 | import React from 'react';
7 | import { DialogTypes } from "./Dialog/UserDialog";
8 |
9 | export default function UserRow(props) {
10 | const { active, full_name: fullName, role, username } = props.user;
11 | const openDialog = props.openDialog;
12 |
13 | return (
14 |
15 | {fullName}
16 | {username}
17 | {role}
18 | {active === 'Y' ? 'Yes' : 'No'}
19 |
20 | openDialog({ type: DialogTypes.UpdateUser, user: props.user })}>
21 | Update User
22 |
23 |
24 |
25 | openDialog({ type: DialogTypes.ChangePassword, user: props.user })}>
26 | Change Password
27 |
28 |
29 |
30 | )
31 | }
32 |
--------------------------------------------------------------------------------
/src/client/src/pages/UserManagement/Validations.js:
--------------------------------------------------------------------------------
1 | import * as Yup from 'yup';
2 |
3 | export const DISALLOWED_WORDS = [
4 | 'cat',
5 | 'dog',
6 | 'password',
7 | 'philly',
8 | 'paws',
9 | ]
10 |
11 | export const buildNameValidation = () => {
12 | return Yup.string()
13 | .trim()
14 | .min(2, "Name must be at least 2 characters")
15 | .max(50, "Name cannot be more than 50 characters")
16 | .matches(/^(?!.*\s{2,})[a-zA-Z ]+$/, "Name must only contain letters and non-consecutive internal spaces")
17 | .required("Name is required")
18 | }
19 |
20 | export const buildUsernameValidation = () => {
21 | return Yup.string()
22 | .trim()
23 | .min(2, "Username must be at least 2 characters")
24 | .max(50, "Username cannot be more than 50 characters")
25 | .matches(/^[a-zA-Z0-9].*$/, "Username must begin with a letter or number")
26 | .matches(/^.*[a-zA-Z0-9]$/, "Username must end with a letter or number")
27 | .matches(/^(?!.*?__)[a-zA-Z0-9_]+$/, "Username must contain only alphanumeric characters and non-consecutive underscores")
28 | }
29 |
30 | export const buildRoleValidation = () => {
31 | return Yup.string()
32 | .trim()
33 | .oneOf(["user", "admin"], "Role must be one of the following: user/admin")
34 | .required("Role is required")
35 | }
36 |
37 | export const buildPasswordValidation = (username) => {
38 | return Yup.string()
39 | .required("Password is required")
40 | .test(
41 | "no-disallowed-words",
42 | "Password cannot include 'dog', 'cat', 'password', 'philly', 'paws', or your username",
43 | (value, context) => {
44 | if (!value) {
45 | return true;
46 | }
47 |
48 | const lowercasePassword = value.toLowerCase();
49 | const lowercaseUsername = username || context.parent.username.toLowerCase()
50 | return [...DISALLOWED_WORDS, lowercaseUsername].every((word) => !lowercasePassword.includes(word))
51 | })
52 | .matches(/^[a-zA-Z0-9!@#$%^*]+$/, "Password can only contain numbers, letters, and the following symbols: !@#$%^*")
53 | .min(12, "Password must contain at least 12 characters")
54 | .max(36, "Password must be 36 characters or less")
55 | }
56 |
--------------------------------------------------------------------------------
/src/client/src/setupProxy.js:
--------------------------------------------------------------------------------
1 | const proxy = require('http-proxy-middleware');
2 |
3 |
4 | module.exports = function(app) {
5 | app.use(proxy('/api/**', {
6 | target: process.env.IS_LOCAL === 'true' ? 'http://localhost:3333' : 'http://server:5000'
7 | }
8 | ));
9 | }
--------------------------------------------------------------------------------
/src/client/src/setupTests.js:
--------------------------------------------------------------------------------
1 | // jest-dom adds custom jest matchers for asserting on DOM nodes.
2 | // allows you to do things like:
3 | // expect(element).toHaveTextContent(/react/i)
4 | // learn more: https://github.com/testing-library/jest-dom
5 | import '@testing-library/jest-dom/extend-expect';
6 |
--------------------------------------------------------------------------------
/src/client/src/theme/defaultTheme.js:
--------------------------------------------------------------------------------
1 | import {createMuiTheme} from '@material-ui/core/styles';
2 |
3 |
4 | const defaultTheme = createMuiTheme({
5 | palette: {
6 | primary: {
7 | main: '#90caf9',
8 | contrastText: '#fff',
9 | },
10 | secondary: {
11 | main: '#d9adfa',
12 | contrastText: '#000',
13 | },
14 |
15 | },
16 | typography: {
17 | fontFamily: 'Roboto',
18 | htmlFontSize: 16,
19 | h1: {
20 | fontSize: '3em',
21 | fontWeight: 700
22 | },
23 | h2: {
24 | fontSize: '2.5em',
25 | fontWeight: 700
26 | },
27 | h3: {
28 | fontSize: '2em',
29 | fontWeight: 700
30 | },
31 | h4: {
32 | fontSize: '1.7em',
33 | fontWeight: 700
34 | },
35 | h5: {
36 | fontSize: '1.5em',
37 | fontWeight: 700
38 | },
39 | h6: {
40 | fontSize: '1em',
41 | fontWeight: 700
42 | },
43 | subtitle1: {
44 | fontSize: '1.5em',
45 | },
46 | button: {
47 | fontSize: '0.8em',
48 | fontWeight: 700
49 | },
50 | }
51 | });
52 |
53 | defaultTheme.overrides = {
54 | MuiSvgIcon: {
55 | root: {
56 | padding: 5
57 | }
58 | },
59 | MuiBackdrop: {
60 | root: {
61 | zIndex: defaultTheme.zIndex.drawer + 1,
62 | color: '#fff',
63 | }
64 | },
65 | MuiTableCell: {
66 | head: {
67 | fontWeight: "bold"
68 | }
69 | },
70 | };
71 |
72 | export default defaultTheme;
--------------------------------------------------------------------------------
/src/client/src/utils/api.js:
--------------------------------------------------------------------------------
1 | export const fetchUsers = async ({ token }) => {
2 | const api = "/api/admin/user/get_users";
3 | const requestOptions = {
4 | method: 'GET',
5 | headers: {
6 | 'Authorization': 'Bearer ' + token
7 | },
8 | };
9 |
10 | return fetch(api, requestOptions)
11 | .then((res) => res.json())
12 | .catch((e) => {
13 | console.warn(e)
14 | throw new Error(e);
15 | })
16 | }
17 |
18 | export const createUser = async (userData, token) => {
19 | const api = "/api/admin/user/create";
20 | const requestOptions = {
21 | method: 'POST',
22 | headers: {
23 | 'Authorization': 'Bearer ' + token
24 | },
25 | body: JSON.stringify(userData)
26 | };
27 |
28 | return fetch(api, requestOptions)
29 | .then((res) => res.json())
30 | .catch((e) => {
31 | console.warn(e)
32 | throw new Error(e);
33 | })
34 | }
35 |
36 | export const updateUser = async (userData, token) => {
37 | const api = "/api/admin/user/update";
38 | const requestOptions = {
39 | method: 'POST',
40 | headers: {
41 | 'Authorization': 'Bearer ' + token
42 | },
43 | body: JSON.stringify(userData)
44 | }
45 |
46 | return fetch(api, requestOptions)
47 | .then((res) => res.json())
48 | .catch((e) => {
49 | console.warn(e)
50 | throw new Error(e);
51 | })
52 | }
53 |
--------------------------------------------------------------------------------
/src/client/src/utils/utils.js:
--------------------------------------------------------------------------------
1 | import moment from 'moment-timezone';
2 |
3 | export function formatPhoneNumber(phoneNumberString) {
4 | let retVal;
5 |
6 | if (phoneNumberString) {
7 | const match = phoneNumberString.match(/^(\d{3})(\d{3})(\d{4})$/)
8 |
9 | if (match) {
10 | retVal = '(' + match[1] + ') ' + match[2] + '-' + match[3];
11 | }
12 | }
13 |
14 | return retVal;
15 | }
16 |
17 | export function getAnimalAge(epochTime) {
18 | let dateOfBirth = moment(epochTime * 1000);
19 | return moment().diff(dateOfBirth, 'years');
20 | }
21 |
22 | export function showAnimalAge(epochTime) {
23 | const age = getAnimalAge(epochTime)
24 | return (age === 1) ? `${age} year` : `${age} years`
25 | }
26 |
27 | export function formatTimestamp(timestamp) {
28 | const momentObj = moment.utc(timestamp);
29 | return momentObj.tz("America/New_York").format("MMMM Do YYYY, h:mm:ss a");
30 | }
31 |
32 | export function formatUploadType(data) {
33 | switch (data) {
34 | case 'last_volgistics_update':
35 | return 'Volgistics';
36 | case 'last_shelterluv_update':
37 | return 'Shelterluv';
38 | case 'last_salesforce_update':
39 | return 'Salesforce';
40 | default:
41 | break;
42 | }
43 | }
--------------------------------------------------------------------------------
/src/deploy_from_tar_docker-compose.sh:
--------------------------------------------------------------------------------
1 | docker-compose down -v
2 | docker-compose build
3 | docker-compose up
--------------------------------------------------------------------------------
/src/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 |
5 | db:
6 | container_name: paws-compose-db
7 | ports:
8 | - "5432:5432"
9 | image: postgres:15.4-alpine
10 | volumes:
11 | - postgres15:/var/lib/postgresql/data
12 | environment:
13 | POSTGRES_DB: paws
14 | POSTGRES_PASSWORD: thispasswordisverysecure
15 | # command: ["postgres", "-c", "log_statement=all"]
16 |
17 |
18 | server:
19 | container_name: paws-compose-server
20 | build: ./server
21 | ports:
22 | - "5000:5000"
23 | depends_on:
24 | - db
25 | volumes:
26 | - src_archive:/app/static/raw_data
27 |
28 | environment:
29 | - FLASK_ENV=development
30 |
31 | client:
32 | build: ./client
33 | container_name: paws-compose-client
34 | ports:
35 | - "80:80"
36 | depends_on:
37 | - server
38 | environment:
39 | - CHOKIDAR_USEPOLLING=true
40 | - PAWS_API_HOST=server # paws-compose-server
41 | stdin_open: true
42 |
43 | scheduler:
44 | build: ./scheduler
45 | container_name: paws-scheduler
46 | ports:
47 | - "6000:6000"
48 | depends_on:
49 | - server
50 | profiles: ["production-only"]
51 |
52 |
53 |
54 | #using named volumes fixs a windows docker bug relating to container permissions
55 | #https://stackoverflow.com/questions/49148754/docker-container-shuts-down-giving-data-directory-has-wrong-ownership-error-wh
56 | volumes:
57 | postgres15:
58 | src_archive:
59 | server_logs:
60 |
--------------------------------------------------------------------------------
/src/helm-chart/.helmignore:
--------------------------------------------------------------------------------
1 | # Patterns to ignore when building packages.
2 | # This supports shell glob matching, relative path matching, and
3 | # negation (prefixed with !). Only one pattern per line.
4 | .DS_Store
5 | # Common VCS dirs
6 | .git/
7 | .gitignore
8 | .bzr/
9 | .bzrignore
10 | .hg/
11 | .hgignore
12 | .svn/
13 | # Common backup files
14 | *.swp
15 | *.bak
16 | *.tmp
17 | *.orig
18 | *~
19 | # Various IDEs
20 | .project
21 | .idea/
22 | *.tmproj
23 | .vscode/
24 |
--------------------------------------------------------------------------------
/src/helm-chart/Chart.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v2
2 | name: paws-datapipeline
3 | description: A Helm chart for Kubernetes
4 |
5 | # A chart can be either an 'application' or a 'library' chart.
6 | #
7 | # Application charts are a collection of templates that can be packaged into versioned archives
8 | # to be deployed.
9 | #
10 | # Library charts provide useful utilities or functions for the chart developer. They're included as
11 | # a dependency of application charts to inject those utilities and functions into the rendering
12 | # pipeline. Library charts do not define any templates and therefore cannot be deployed.
13 | type: application
14 |
15 | # This is the chart version. This version number should be incremented each time you make changes
16 | # to the chart and its templates, including the app version.
17 | # Versions are expected to follow Semantic Versioning (https://semver.org/)
18 | version: 0.1.0
19 |
20 | # This is the version number of the application being deployed. This version number should be
21 | # incremented each time you make changes to the application. Versions are not expected to
22 | # follow Semantic Versioning. They should reflect the version the application is using.
23 | appVersion: 0.8.0
24 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/NOTES.txt:
--------------------------------------------------------------------------------
1 | 1. Get the application URL by running these commands:
2 | {{- if .Values.ingress.enabled }}
3 | {{- range $host := .Values.ingress.hosts }}
4 | {{- range .paths }}
5 | http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ . }}
6 | {{- end }}
7 | {{- end }}
8 | {{- else if contains "NodePort" .Values.service.type }}
9 | export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "helm-chart.fullname" . }})
10 | export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
11 | echo http://$NODE_IP:$NODE_PORT
12 | {{- else if contains "LoadBalancer" .Values.service.type }}
13 | NOTE: It may take a few minutes for the LoadBalancer IP to be available.
14 | You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "helm-chart.fullname" . }}'
15 | export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "helm-chart.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
16 | echo http://$SERVICE_IP:{{ .Values.service.port }}
17 | {{- else if contains "ClusterIP" .Values.service.type }}
18 | export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "helm-chart.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
19 | export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
20 | echo "Visit http://127.0.0.1:8080 to use your application"
21 | kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
22 | {{- end }}
23 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/_helpers.tpl:
--------------------------------------------------------------------------------
1 | {{/*
2 | Expand the name of the chart.
3 | */}}
4 | {{- define "helm-chart.name" -}}
5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
6 | {{- end }}
7 |
8 | {{/*
9 | Create a default fully qualified app name.
10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
11 | If release name contains chart name it will be used as a full name.
12 | */}}
13 | {{- define "helm-chart.fullname" -}}
14 | {{- if .Values.fullnameOverride }}
15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
16 | {{- else }}
17 | {{- $name := default .Chart.Name .Values.nameOverride }}
18 | {{- if contains $name .Release.Name }}
19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }}
20 | {{- else }}
21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
22 | {{- end }}
23 | {{- end }}
24 | {{- end }}
25 |
26 | {{/*
27 | Create chart name and version as used by the chart label.
28 | */}}
29 | {{- define "helm-chart.chart" -}}
30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
31 | {{- end }}
32 |
33 | {{/*
34 | Common labels
35 | */}}
36 | {{- define "helm-chart.labels" -}}
37 | helm.sh/chart: {{ include "helm-chart.chart" . }}
38 | {{ include "helm-chart.selectorLabels" . }}
39 | {{- if .Chart.AppVersion }}
40 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
41 | {{- end }}
42 | app.kubernetes.io/managed-by: {{ .Release.Service }}
43 | {{- end }}
44 |
45 | {{/*
46 | Selector labels
47 | */}}
48 | {{- define "helm-chart.selectorLabels" -}}
49 | app.kubernetes.io/name: {{ include "helm-chart.name" . }}
50 | app.kubernetes.io/instance: {{ .Release.Name }}
51 | {{- end }}
52 |
53 | {{/*
54 | Create the name of the service account to use
55 | */}}
56 | {{- define "helm-chart.serviceAccountName" -}}
57 | {{- if .Values.serviceAccount.create }}
58 | {{- default (include "helm-chart.fullname" .) .Values.serviceAccount.name }}
59 | {{- else }}
60 | {{- default "default" .Values.serviceAccount.name }}
61 | {{- end }}
62 | {{- end }}
63 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/deployment.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: Deployment
3 | metadata:
4 | name: {{ include "helm-chart.fullname" . }}
5 | labels:
6 | {{- include "helm-chart.labels" . | nindent 4 }}
7 | spec:
8 | {{- if not .Values.autoscaling.enabled }}
9 | replicas: {{ .Values.replicaCount }}
10 | {{- end }}
11 | strategy:
12 | type: Recreate
13 | selector:
14 | matchLabels:
15 | {{- include "helm-chart.selectorLabels" . | nindent 6 }}
16 | template:
17 | metadata:
18 | {{- with .Values.podAnnotations }}
19 | annotations:
20 | {{- toYaml . | nindent 8 }}
21 | {{- end }}
22 | labels:
23 | {{- include "helm-chart.selectorLabels" . | nindent 8 }}
24 | spec:
25 | {{- with .Values.imagePullSecrets }}
26 | imagePullSecrets:
27 | {{- toYaml . | nindent 8 }}
28 | {{- end }}
29 | hostAliases:
30 | {{- .Values.hostAliases | toYaml | nindent 8 }}
31 | volumes:
32 | - name: {{ .Release.Name }}-postgresql-data
33 | persistentVolumeClaim:
34 | claimName: {{ .Release.Name }}-postgresql-data
35 | - name: {{ .Release.Name }}-server-data
36 | persistentVolumeClaim:
37 | claimName: {{ .Release.Name }}-server-data
38 | serviceAccountName: {{ include "helm-chart.serviceAccountName" . }}
39 | securityContext:
40 | {{- toYaml .Values.podSecurityContext | nindent 8 }}
41 | containers:
42 | {{- with .Values.db }}
43 | - name: {{ $.Chart.Name }}-db
44 | image: "{{ .image.repository }}:{{ .image.tag | default $.Chart.AppVersion }}"
45 | imagePullPolicy: {{ .image.pullPolicy }}
46 | env:
47 | - name: POSTGRES_DB
48 | value: paws
49 | envFrom:
50 | - secretRef:
51 | name: db-pass
52 | ports:
53 | - name: postgresql
54 | containerPort: 5432
55 | protocol: TCP
56 | volumeMounts:
57 | - name: {{ $.Release.Name }}-postgresql-data
58 | mountPath: /var/lib/postgresql/data
59 | subPath: postgresql-data
60 | {{- end }}
61 | {{- with .Values.server }}
62 | - name: {{ $.Chart.Name }}-server
63 | image: "{{ .image.repository }}:{{ .image.tag | default $.Chart.AppVersion }}"
64 | imagePullPolicy: {{ .image.pullPolicy }}
65 | envFrom:
66 | - secretRef:
67 | name: api-server-pws
68 | - secretRef:
69 | name: db-pass
70 | ports:
71 | - containerPort: 5000
72 | protocol: TCP
73 | volumeMounts:
74 | - name: {{ $.Release.Name }}-server-data
75 | mountPath: /var/lib/server/data
76 | subPath: server-data
77 | livenessProbe:
78 | httpGet:
79 | path: /api/user/test
80 | port: 5000
81 | readinessProbe:
82 | httpGet:
83 | path: /api/user/test
84 | port: 5000
85 | {{- end }}
86 | {{- with .Values.client }}
87 | - name: {{ $.Chart.Name }}-client
88 | image: "{{ .image.repository }}:{{ .image.tag | default $.Chart.AppVersion }}"
89 | imagePullPolicy: {{ .image.pullPolicy }}
90 | ports:
91 | - name: http
92 | containerPort: 80
93 | protocol: TCP
94 | livenessProbe:
95 | httpGet:
96 | path: /
97 | port: http
98 | readinessProbe:
99 | httpGet:
100 | path: /
101 | port: http
102 | {{- end }}
103 | {{- with .Values.nodeSelector }}
104 | nodeSelector:
105 | {{- toYaml . | nindent 8 }}
106 | {{- end }}
107 | {{- with .Values.affinity }}
108 | affinity:
109 | {{- toYaml . | nindent 8 }}
110 | {{- end }}
111 | {{- with .Values.tolerations }}
112 | tolerations:
113 | {{- toYaml . | nindent 8 }}
114 | {{- end }}
115 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/hpa.yaml:
--------------------------------------------------------------------------------
1 | {{- if .Values.autoscaling.enabled }}
2 | apiVersion: autoscaling/v2beta1
3 | kind: HorizontalPodAutoscaler
4 | metadata:
5 | name: {{ include "helm-chart.fullname" . }}
6 | labels:
7 | {{- include "helm-chart.labels" . | nindent 4 }}
8 | spec:
9 | scaleTargetRef:
10 | apiVersion: apps/v1
11 | kind: Deployment
12 | name: {{ include "helm-chart.fullname" . }}
13 | minReplicas: {{ .Values.autoscaling.minReplicas }}
14 | maxReplicas: {{ .Values.autoscaling.maxReplicas }}
15 | metrics:
16 | {{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
17 | - type: Resource
18 | resource:
19 | name: cpu
20 | targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
21 | {{- end }}
22 | {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
23 | - type: Resource
24 | resource:
25 | name: memory
26 | targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
27 | {{- end }}
28 | {{- end }}
29 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/ingress.yaml:
--------------------------------------------------------------------------------
1 | {{- if .Values.ingress.enabled -}}
2 | {{- $fullName := include "helm-chart.fullname" . -}}
3 | {{- $svcPort := .Values.service.port -}}
4 | apiVersion: networking.k8s.io/v1
5 | kind: Ingress
6 | metadata:
7 | name: {{ $fullName }}
8 | labels:
9 | {{- include "helm-chart.labels" . | nindent 4 }}
10 | {{- with .Values.ingress.annotations }}
11 | annotations:
12 | {{- toYaml . | nindent 4 }}
13 | {{- end }}
14 | spec:
15 | {{- if .Values.ingress.tls }}
16 | tls:
17 | {{- range .Values.ingress.tls }}
18 | - hosts:
19 | {{- range .hosts }}
20 | - {{ . | quote }}
21 | {{- end }}
22 | secretName: {{ .secretName }}
23 | {{- end }}
24 | {{- end }}
25 | rules:
26 | {{- range .Values.ingress.hosts }}
27 | - host: {{ .host | quote }}
28 | http:
29 | paths:
30 | {{- range .paths }}
31 | - path: {{ . }}
32 | pathType: Prefix
33 | backend:
34 | service:
35 | name: {{ $fullName }}
36 | port:
37 | number: {{ $svcPort }}
38 | {{- end }}
39 | {{- end }}
40 | {{- end }}
41 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/pvc.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: PersistentVolumeClaim
3 | metadata:
4 | name: {{ .Release.Name }}-postgresql-data
5 | namespace: {{ .Release.Namespace }}
6 | labels:
7 | {{- include "helm-chart.selectorLabels" . | nindent 4 }}
8 | spec:
9 | accessModes:
10 | - ReadWriteOnce
11 | resources:
12 | requests:
13 | storage: {{ .Values.storage.pgdb }}
14 |
15 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/pvc_server.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: PersistentVolumeClaim
3 | metadata:
4 | name: {{ .Release.Name }}-server-data
5 | namespace: {{ .Release.Namespace }}
6 | labels:
7 | {{- include "helm-chart.selectorLabels" . | nindent 4 }}
8 | spec:
9 | accessModes:
10 | - ReadWriteOnce
11 | resources:
12 | requests:
13 | storage: {{ .Values.storage.server }}
14 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/service.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: {{ include "helm-chart.fullname" . }}
5 | labels:
6 | {{- include "helm-chart.labels" . | nindent 4 }}
7 | spec:
8 | type: {{ .Values.service.type }}
9 | ports:
10 | - port: {{ .Values.service.port }}
11 | targetPort: http
12 | protocol: TCP
13 | name: http
14 | selector:
15 | {{- include "helm-chart.selectorLabels" . | nindent 4 }}
16 |
--------------------------------------------------------------------------------
/src/helm-chart/templates/serviceaccount.yaml:
--------------------------------------------------------------------------------
1 | {{- if .Values.serviceAccount.create -}}
2 | apiVersion: v1
3 | kind: ServiceAccount
4 | metadata:
5 | name: {{ include "helm-chart.serviceAccountName" . }}
6 | labels:
7 | {{- include "helm-chart.labels" . | nindent 4 }}
8 | {{- with .Values.serviceAccount.annotations }}
9 | annotations:
10 | {{- toYaml . | nindent 4 }}
11 | {{- end }}
12 | {{- end }}
13 |
--------------------------------------------------------------------------------
/src/helm-chart/values.yaml:
--------------------------------------------------------------------------------
1 | # Default values for helm-chart.
2 | # This is a YAML-formatted file.
3 | # Declare variables to be passed into your templates.
4 |
5 | replicaCount: 1
6 |
7 | server:
8 | image:
9 | repository: ghcr.io/codeforphilly/paws-data-pipeline/server
10 | pullPolicy: Always
11 | # Overrides the image tag whose default is the chart appVersion.
12 | tag: "2.50"
13 |
14 | client:
15 | image:
16 | repository: ghcr.io/codeforphilly/paws-data-pipeline/client
17 | pullPolicy: Always
18 | # Overrides the image tag whose default is the chart appVersion.
19 | tag: "2.50"
20 |
21 | db:
22 | image:
23 | repository: postgres
24 | pullPolicy: IfNotPresent
25 | # Overrides the image tag whose default is the chart appVersion.
26 | tag: "15.4-alpine"
27 |
28 |
29 | storage:
30 | pgdb: 1Gi
31 | server: 1Gi
32 |
33 | imagePullSecrets:
34 | - name: regcred
35 | nameOverride: "paws-datapipeline"
36 | fullnameOverride: "paws-dp-chart"
37 |
38 | serviceAccount:
39 | # Specifies whether a service account should be created
40 | create: true
41 | # Annotations to add to the service account
42 | annotations: {}
43 | # The name of the service account to use.
44 | # If not set and create is true, a name is generated using the fullname template
45 | name: ""
46 |
47 | podAnnotations: {}
48 |
49 | podSecurityContext: {}
50 | # fsGroup: 2000
51 |
52 | securityContext: {}
53 | # capabilities:
54 | # drop:
55 | # - ALL
56 | # readOnlyRootFilesystem: true
57 | # runAsNonRoot: true
58 | # runAsUser: 1000
59 | hostAliases:
60 | - ip: "127.0.0.1"
61 | hostnames:
62 | - "server"
63 | - "paws-compose-db"
64 | - "client"
65 |
66 | service:
67 | type: NodePort #ClusterIP
68 | port: 80
69 |
70 | ingress:
71 | enabled: false
72 | annotations: {}
73 | # kubernetes.io/ingress.class: nginx
74 | # kubernetes.io/tls-acme: "true"
75 | hosts:
76 | - host: chart-example.local
77 | paths: []
78 | tls: []
79 | # - secretName: chart-example-tls
80 | # hosts:
81 | # - chart-example.local
82 |
83 | resources: {}
84 | # We usually recommend not to specify default resources and to leave this as a conscious
85 | # choice for the user. This also increases chances charts run on environments with little
86 | # resources, such as Minikube. If you do want to specify resources, uncomment the following
87 | # lines, adjust them as necessary, and remove the curly braces after 'resources:'.
88 | # limits:
89 | # cpu: 100m
90 | # memory: 128Mi
91 | # requests:
92 | # cpu: 100m
93 | # memory: 128Mi
94 |
95 | autoscaling:
96 | enabled: false
97 | minReplicas: 1
98 | maxReplicas: 100
99 | targetCPUUtilizationPercentage: 80
100 | # targetMemoryUtilizationPercentage: 80
101 |
102 | nodeSelector: {}
103 |
104 | tolerations: []
105 |
106 | affinity: {}
107 |
108 | jobs:
109 | - name: minute-cron
110 | concurrencyPolicy: Forbid
111 | schedule: "*/1 * * * *"
112 | image: curlimages/curl
113 | imagePullPolicy: IfNotPresent
114 | command: ["/bin/bash"]
115 |
116 | args:
117 | - "-c"
118 | - "curl http://paws-compose-server:5000/api/internal/test"
119 | restartPolicy: OnFailure
--------------------------------------------------------------------------------
/src/k8s_conf/_pv_claims.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: PersistentVolumeClaim
3 | metadata:
4 | creationTimestamp: null
5 | labels:
6 | io.kompose.service: src-archive
7 | name: src-archive
8 | spec:
9 | accessModes:
10 | - ReadWriteOnce
11 | resources:
12 | requests:
13 | storage: 100Mi
14 | status: {}
15 |
16 | ---
17 | apiVersion: v1
18 | kind: PersistentVolumeClaim
19 | metadata:
20 | creationTimestamp: null
21 | labels:
22 | io.kompose.service: postgres
23 | name: postgres
24 | spec:
25 | accessModes:
26 | - ReadWriteOnce
27 | resources:
28 | requests:
29 | storage: 100Mi
30 | status: {}
31 |
--------------------------------------------------------------------------------
/src/k8s_conf/api_server.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | annotations:
5 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
6 | kompose.version: 1.21.0 (992df58d8)
7 | creationTimestamp: null
8 | labels:
9 | io.kompose.service: server
10 | name: server
11 | spec:
12 | ports:
13 | - name: "5000"
14 | port: 5000
15 | targetPort: 5000
16 | selector:
17 | io.kompose.service: server
18 | status:
19 | loadBalancer: {}
20 |
21 | ---
22 | apiVersion: apps/v1
23 | kind: Deployment
24 | metadata:
25 | annotations:
26 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
27 | kompose.version: 1.21.0 (992df58d8)
28 | creationTimestamp: null
29 | labels:
30 | io.kompose.service: server
31 | name: server
32 | spec:
33 | replicas: 1
34 | selector:
35 | matchLabels:
36 | io.kompose.service: server
37 | strategy:
38 | type: Recreate
39 | template:
40 | metadata:
41 | annotations:
42 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
43 | kompose.version: 1.21.0 (992df58d8)
44 | creationTimestamp: null
45 | labels:
46 | io.kompose.service: server
47 | spec:
48 | initContainers:
49 | - name: wait-for-db
50 | image: groundnuty/k8s-wait-for:v1.3
51 | imagePullPolicy: Always
52 | args:
53 | - "service"
54 | - "paws-compose-db"
55 |
56 | containers:
57 | - env:
58 | - name: FLASK_ENV
59 | value: development
60 | image: localhost:5000/src-server:latest
61 | imagePullPolicy: "IfNotPresent"
62 | name: server #paws-compose-server
63 | ports:
64 | - containerPort: 5000
65 | resources: {}
66 | volumeMounts:
67 | - mountPath: /app/static/raw_data
68 | name: src-archive
69 | envFrom:
70 | - secretRef:
71 | name: api-server-pws-7c66bb2cf7
72 | - secretRef:
73 | name: db-pass-762dk6hmhm
74 | restartPolicy: Always
75 | serviceAccountName: ""
76 | imagePullSecrets:
77 | - name: regcred
78 | volumes:
79 | - name: src-archive
80 | persistentVolumeClaim:
81 | claimName: src-archive
82 | status: {}
83 |
--------------------------------------------------------------------------------
/src/k8s_conf/db_server.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | annotations:
5 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
6 | kompose.version: 1.21.0 (992df58d8)
7 | creationTimestamp: null
8 | labels:
9 | io.kompose.service: paws-compose-db
10 | name: paws-compose-db
11 | spec:
12 | ports:
13 | - name: "5432"
14 | port: 5432
15 | targetPort: 5432
16 | selector:
17 | io.kompose.service: paws-compose-db
18 | status:
19 | loadBalancer: {}
20 |
21 | ---
22 | apiVersion: v1
23 | kind: ConfigMap
24 | metadata:
25 | name: postgres-configuration
26 | labels:
27 | app: postgres
28 | data:
29 | POSTGRES_DB: paws
30 | POSTGRES_USER: postgres
31 |
32 | ---
33 |
34 | apiVersion: apps/v1
35 | kind: Deployment
36 | metadata:
37 | annotations:
38 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
39 | kompose.version: 1.21.0 (992df58d8)
40 | creationTimestamp: null
41 | labels:
42 | io.kompose.service: paws-compose-db
43 | name: paws-compose-db
44 | spec:
45 | replicas: 1
46 | selector:
47 | matchLabels:
48 | io.kompose.service: paws-compose-db
49 | strategy:
50 | type: Recreate
51 | template:
52 | metadata:
53 | annotations:
54 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
55 | kompose.version: 1.21.0 (992df58d8)
56 | creationTimestamp: null
57 | labels:
58 | io.kompose.service: paws-compose-db
59 | spec:
60 | containers:
61 | - env:
62 | - name: POSTGRES_DB
63 | value: paws
64 | envFrom:
65 | - secretRef:
66 | name: db-pass-762dk6hmhm
67 | image: postgres:11-alpine # localhost:5000/postgres:11.3-alpine
68 | imagePullPolicy: ""
69 | name: paws-compose-db
70 | ports:
71 | - containerPort: 5432
72 | resources: {}
73 | volumeMounts:
74 | - mountPath: /var/lib/postgresql/data
75 | name: postgres
76 | restartPolicy: Always
77 | serviceAccountName: ""
78 | volumes:
79 | - name: postgres
80 | persistentVolumeClaim:
81 | claimName: postgres
82 | status: {}
83 |
--------------------------------------------------------------------------------
/src/k8s_conf/front_end.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | annotations:
5 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
6 | kompose.version: 1.21.0 (992df58d8)
7 | creationTimestamp: null
8 | labels:
9 | io.kompose.service: client
10 | name: client
11 | spec:
12 | ports:
13 | - name: "80"
14 | port: 80
15 | targetPort: 80
16 | selector:
17 | io.kompose.service: client
18 | status:
19 | loadBalancer: {}
20 |
21 | ---
22 | apiVersion: apps/v1
23 | kind: Deployment
24 | metadata:
25 | annotations:
26 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
27 | kompose.version: 1.21.0 (992df58d8)
28 | creationTimestamp: null
29 | labels:
30 | io.kompose.service: client
31 | name: client
32 | spec:
33 | replicas: 1
34 | selector:
35 | matchLabels:
36 | io.kompose.service: client
37 | strategy: {}
38 | template:
39 | metadata:
40 | annotations:
41 | kompose.cmd: \temp\kompose -f docker-compose.yml convert
42 | kompose.version: 1.21.0 (992df58d8)
43 | creationTimestamp: null
44 | labels:
45 | io.kompose.service: client
46 | spec:
47 | containers:
48 | - env:
49 | - name: CHOKIDAR_USEPOLLING
50 | value: "true"
51 | - name: PAWS_API_HOST
52 | value: "server"
53 | image: localhost:5000/src-client:latest
54 | imagePullPolicy: "IfNotPresent"
55 | name: paws-compose-client
56 | ports:
57 | - containerPort: 3000
58 | resources: {}
59 | stdin: true
60 | restartPolicy: Always
61 | serviceAccountName: ""
62 | volumes: null
63 | imagePullSecrets:
64 | - name: regcred
65 | status: {}
66 |
--------------------------------------------------------------------------------
/src/k8s_conf/kind_w_reg.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | #
3 | # Adapted from:
4 | # https://github.com/kubernetes-sigs/kind/commits/master/site/static/examples/kind-with-registry.sh
5 | #
6 | # Copyright 2020 The Kubernetes Project
7 | #
8 | # Licensed under the Apache License, Version 2.0 (the "License");
9 | # you may not use this file except in compliance with the License.
10 | # You may obtain a copy of the License at
11 | #
12 | # http://www.apache.org/licenses/LICENSE-2.0
13 | #
14 | # Unless required by applicable law or agreed to in writing, software
15 | # distributed under the License is distributed on an "AS IS" BASIS,
16 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | # See the License for the specific language governing permissions and
18 | # limitations under the License.
19 |
20 | set -o errexit
21 |
22 | # desired cluster name; default is "kind"
23 | KIND_CLUSTER_NAME="${KIND_CLUSTER_NAME:-kind}"
24 |
25 | kind_version=$(kind version)
26 | kind_network='kind'
27 | reg_name='kind-registry'
28 | reg_port='5000'
29 | case "${kind_version}" in
30 | "kind v0.7."* | "kind v0.6."* | "kind v0.5."*)
31 | kind_network='bridge'
32 | ;;
33 | esac
34 |
35 | # create registry container unless it already exists
36 | running="$(docker inspect -f '{{.State.Running}}' "${reg_name}" 2>/dev/null || true)"
37 | if [ "${running}" != 'true' ]; then
38 | docker run \
39 | -d --restart=always -p "${reg_port}:5000" --name "${reg_name}" \
40 | registry:2
41 | fi
42 |
43 | reg_host="${reg_name}"
44 | if [ "${kind_network}" = "bridge" ]; then
45 | reg_host="$(docker inspect -f '{{.NetworkSettings.IPAddress}}' "${reg_name}")"
46 | fi
47 | echo "Registry Host: ${reg_host}"
48 |
49 | # create a cluster with the local registry enabled in containerd
50 | cat <>>>>>>>>>>>>>>>> Creating cluster. Now's a good time to go get coffee >>>>>>>>>>>>>>>>>"
6 | kind create cluster
7 |
8 | # Assumes kustomization.yaml lives in server/secets
9 | echo " "; echo ">>>>>>>>>>>>>>>>> Create and add secrets to k8s environment >>>>>>>>>>>>>>>>>"
10 | kubectl apply -k server/secrets
11 |
12 | echo " "; echo ">>>>>>>>>>>>>>>>> Build images >>>>>>>>>>>>>>>>>"
13 | docker-compose build
14 |
15 | # So pods, specifically 'wait_for', have read access to API
16 | echo " "; echo ">>>>>>>>>>>>>>>>> Give pods access to k8s API >>>>>>>>>>>>>>>>>"
17 | kubectl create role pod-reader --verb=get --verb=list --verb=watch --resource=pods,services,deployments
18 | kubectl create rolebinding default-pod-reader --role=pod-reader --serviceaccount=default:default --namespace=default
19 |
20 | echo " "; echo ">>>>>>>>>>>>>>>>> Tag and push client container image >>>>>>>>>>>>>>>>>"
21 | docker tag src_client localhost:5000/src-client:latest
22 | kind load docker-image localhost:5000/src-client:latest
23 |
24 | echo " "; echo ">>>>>>>>>>>>>>>>> Tag and push server container image >>>>>>>>>>>>>>>>>"
25 | docker tag src_server localhost:5000/src-server:latest
26 | kind load docker-image localhost:5000/src-server:latest
27 |
28 | echo " "; echo ">>>>>>>>>>>>>>>>> Apply k8s deployment files to launch containers >>>>>>>>>>>>>>>>>"
29 | kubectl apply -f k8s_conf
30 |
31 | echo " "; echo ">>>>>>>>>>>>>>>>> Wait 20s in hopes that client service/container is live >>>>>>>>>>>>>>>>>"
32 | sleep 20
33 |
34 | echo " "; echo ">>>>>>>>>>>>>>>>> Forwarding port 80 from cluster to localhost >>>>>>>>>>>>>>>>>"
35 | echo "'>>>>>>>>>>>>>>>>> Forwarding...' means it's working and will forward until ^C "
36 | echo ">>>>>>>>>>>>>>>>> To restart port-forwarding, 'kubectl port-forward service/client 80:80 '"
37 | kubectl port-forward service/client 80:80
38 | echo " "; echo ">>>>>>>>>>>>>>>>> ^- Failed? Try running 'kubectl port-forward service/client 80:80' >>>>>>>>>>>>>>>>>"
--------------------------------------------------------------------------------
/src/scheduler/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM alpine:3.6
2 |
3 | # copy crontabs for root user
4 | COPY cronfile /etc/crontabs/root
5 | RUN apk --no-cache add curl
6 |
7 | # start crond with log level 8 in foreground, output to stderr
8 | CMD ["crond", "-f", "-d", "8"]
--------------------------------------------------------------------------------
/src/scheduler/cronfile:
--------------------------------------------------------------------------------
1 | #run python script every minutes
2 | #* * * * * echo "hello cron" >> /proc/1/fd/1 2> /proc/1/fd/2 #testing
3 | 0 1 * * 6 curl http://paws-compose-server:5000/api/ingestRawData
4 | # Don't remove the empty line at the end of this file. It is required to run the cron job
5 |
6 |
--------------------------------------------------------------------------------
/src/server/.dockerignore:
--------------------------------------------------------------------------------
1 | Dockerfile
2 |
--------------------------------------------------------------------------------
/src/server/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.9-bullseye
2 |
3 | RUN apt-get update && apt-get install -y vim
4 |
5 | WORKDIR /app
6 |
7 | ENV PYTHONDONTWRITEBYTECODE 1
8 | ENV PYTHONUNBUFFERED 1
9 |
10 | RUN apt-get update
11 |
12 | RUN apt-get install -y python3-dev uwsgi uwsgi-src libcap-dev uwsgi-plugin-python3 libpcre3-dev
13 | RUN pip install --upgrade pip
14 |
15 | COPY requirements.txt /
16 |
17 | RUN pip install --no-cache-dir -r /requirements.txt
18 |
19 | RUN export PYTHON=python3.10
20 | RUN uwsgi --build-plugin "/usr/src/uwsgi/plugins/python python310"
21 | RUN mv python310_plugin.so /usr/lib/uwsgi/plugins/python310_plugin.so
22 | RUN chmod 666 /usr/lib/uwsgi/plugins/python310_plugin.so
23 |
24 | COPY . .
25 |
26 | RUN mkdir -p /app/static \
27 | /app/static/raw_data \
28 | /app/static/logs \
29 | /app/static/zipped
30 |
31 | RUN [ ! -f /app/static/logs/last_execution.json ] && printf {} > /app/static/logs/last_execution.json
32 |
33 | RUN chmod -R 777 /app/static
34 |
35 | RUN chmod +x bin/startServer.sh
36 | # RUN ufw allow 5000
37 | WORKDIR /app
38 |
39 | RUN useradd -m pawsapp
40 | RUN mkdir -p /app/.pytest_cache/v/cache
41 | RUN chown -R pawsapp:pawsapp /app/.pytest_cache/v/cache
42 | USER pawsapp
43 |
44 | CMD bin/startServer.sh
45 | #>> start.log 2>&1
--------------------------------------------------------------------------------
/src/server/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = alembic
6 |
7 | # template used to generate migration files
8 | # file_template = %%(rev)s_%%(slug)s
9 |
10 | # timezone to use when rendering the date
11 | # within the migration file as well as the filename.
12 | # string value is passed to dateutil.tz.gettz()
13 | # leave blank for localtime
14 | # timezone =
15 |
16 | # max length of characters to apply to the
17 | # "slug" field
18 | # truncate_slug_length = 40
19 |
20 | # set to 'true' to run the environment during
21 | # the 'revision' command, regardless of autogenerate
22 | # revision_environment = false
23 |
24 | # set to 'true' to allow .pyc and .pyo files without
25 | # a source .py file to be detected as revisions in the
26 | # versions/ directory
27 | # sourceless = false
28 |
29 | # version location specification; this defaults
30 | # to alembic/versions. When using multiple version
31 | # directories, initial revisions must be specified with --version-path
32 | # version_locations = %(here)s/bar %(here)s/bat alembic/versions
33 |
34 | # the output encoding used when revision files
35 | # are written from script.py.mako
36 | # output_encoding = utf-8
37 |
38 | # Container
39 | sqlalchemy.url = postgresql://postgres:PASSWORD@paws-compose-db/paws
40 |
41 | # Local
42 | # sqlalchemy.url = postgresql://postgres:thispasswordisverysecure@localhost/paws
43 |
44 |
45 | [post_write_hooks]
46 | # post_write_hooks defines scripts or Python functions that are run
47 | # on newly generated revision scripts. See the documentation for further
48 | # detail and examples
49 |
50 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
51 | # hooks=black
52 | # black.type=console_scripts
53 | # black.entrypoint=black
54 | # black.options=-l 79
55 |
56 | # Logging configuration
57 | [loggers]
58 | keys = root,sqlalchemy,alembic
59 |
60 | [handlers]
61 | keys = console
62 |
63 | [formatters]
64 | keys = generic
65 |
66 | [logger_root]
67 | level = WARN
68 | handlers = console
69 | qualname =
70 |
71 | [logger_sqlalchemy]
72 | level = WARN
73 | handlers =
74 | qualname = sqlalchemy.engine
75 |
76 | [logger_alembic]
77 | level = INFO
78 | handlers =
79 | qualname = alembic
80 |
81 | [handler_console]
82 | class = StreamHandler
83 | args = (sys.stderr,)
84 | level = NOTSET
85 | formatter = generic
86 |
87 | [formatter_generic]
88 | format = %(levelname)-5.5s [%(name)s] %(message)s
89 | datefmt = %H:%M:%S
90 |
--------------------------------------------------------------------------------
/src/server/alembic/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/src/server/alembic/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from os import environ
7 |
8 | from alembic import context
9 |
10 | # this is the Alembic Config object, which provides
11 | # access to the values within the .ini file in use.
12 | config = context.config
13 |
14 | # Interpret the config file for Python logging.
15 | # This line sets up loggers basically.
16 | fileConfig(config.config_file_name)
17 |
18 | # add your model's MetaData object here
19 | # for 'autogenerate' support
20 | # from myapp import mymodel
21 | # target_metadata = mymodel.Base.metadata
22 | target_metadata = None
23 |
24 | # other values from the config, defined by the needs of env.py,
25 | # can be acquired:
26 | # my_important_option = config.get_main_option("my_important_option")
27 | # ... etc.
28 |
29 |
30 | PG_URL1 = 'postgresql://postgres:'
31 | PG_URL2 = environ['POSTGRES_PASSWORD']
32 | PG_URL3 = '@paws-compose-db/paws'
33 |
34 | PG_URL = PG_URL1 + PG_URL2 + PG_URL3
35 |
36 |
37 | def run_migrations_offline():
38 | """Run migrations in 'offline' mode.
39 |
40 | This configures the context with just a URL
41 | and not an Engine, though an Engine is acceptable
42 | here as well. By skipping the Engine creation
43 | we don't even need a DBAPI to be available.
44 |
45 | Calls to context.execute() here emit the given string to the
46 | script output.
47 |
48 | """
49 | # url = config.get_main_option("sqlalchemy.url")
50 | url = PG_URL
51 | context.configure(
52 | url=url,
53 | target_metadata=target_metadata,
54 | literal_binds=True,
55 | dialect_opts={"paramstyle": "named"},
56 | )
57 |
58 | with context.begin_transaction():
59 | context.run_migrations()
60 |
61 |
62 | def run_migrations_online():
63 | """Run migrations in 'online' mode.
64 |
65 | In this scenario we need to create an Engine
66 | and associate a connection with the context.
67 |
68 | """
69 | connectable = engine_from_config(
70 | config.get_section(config.config_ini_section),
71 | prefix="sqlalchemy.",
72 | poolclass=pool.NullPool,
73 | url=PG_URL,
74 | )
75 |
76 | with connectable.connect() as connection:
77 | context.configure(
78 | connection=connection, target_metadata=target_metadata
79 | )
80 |
81 | with context.begin_transaction():
82 | context.run_migrations()
83 |
84 |
85 | if context.is_offline_mode():
86 | run_migrations_offline()
87 | else:
88 | run_migrations_online()
89 |
--------------------------------------------------------------------------------
/src/server/alembic/generate_rfm_mapping.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import structlog
3 | logger = structlog.get_logger()
4 |
5 | def get_all_combinations(chars):
6 | yield from itertools.product(*([chars] * 3))
7 |
8 |
9 | def convertTuple(tup):
10 | str = ''
11 | for item in tup:
12 | str = str + item
13 | return str
14 |
15 |
16 | def start():
17 | mapping_rows = []
18 |
19 | mapping_rows.append(
20 | '''-- Run this script in your SQL query tool
21 | -- Run truncate command if this table is already populated
22 | -- TRUNCATE TABLE rfm_mapping;
23 | -- BEGIN;
24 | -- Fields are (rfm_score, label, (background) color, text color)
25 | '''
26 | )
27 |
28 | combinations = []
29 | for x in get_all_combinations('12345'):
30 | combinations.append(convertTuple(x))
31 |
32 | for rfm_score in combinations:
33 | label = ''
34 | background_color = ''
35 | color_text = ''
36 | r_m_average = (int(rfm_score[1]) + (int(rfm_score[2]))) / 2
37 | r = int(rfm_score[0])
38 |
39 | if r == 5 and (3 < r_m_average <= 5):
40 | label = 'High impact, engaged'
41 | background_color = '#034858'
42 | color_text = '#ffffff'
43 | elif r == 5 and (1 <= r_m_average <= 3):
44 | label = 'Low impact, engaged'
45 | background_color = '#47b8a7'
46 | color_text = '#000000'
47 | elif (3 <= r <= 4) and (3 < r_m_average <= 5):
48 | label = 'High impact, slipping'
49 | background_color = '#990000'
50 | color_text = '#ffffff'
51 | elif (3 <= r <= 4) and (1 <= r_m_average <= 3):
52 | label = 'Low impact, slipping'
53 | background_color = '#f77d4e'
54 | color_text = '#000000'
55 | elif (1 <= r <= 2) and (3 < r_m_average <= 5):
56 | label = 'High impact, disengaged'
57 | background_color = '#cf3030'
58 | color_text = '#ffffff'
59 | elif (1 <= r <= 2) and (1 <= r_m_average <= 3):
60 | label = 'Low impact, disengaged'
61 | background_color = '#eed0aa'
62 | color_text = '#000000'
63 |
64 | mapping_rows.append(
65 | "insert into rfm_mapping values('{}', '{}','{}', '{}');".format(rfm_score, label, background_color,
66 | color_text))
67 |
68 | mapping_rows.append('-- COMMIT;')
69 |
70 | with open('populate_rfm_mapping.sql', 'w') as f:
71 | for item in mapping_rows:
72 | f.write("%s\n" % item)
73 |
74 |
75 | logger.debug('Completed generate_rfm_mapping')
76 |
77 |
78 | start()
79 |
--------------------------------------------------------------------------------
/src/server/alembic/insert_rfm_edges.sql:
--------------------------------------------------------------------------------
1 | INSERT INTO "public"."kv_unique"( "keycol", "valcol") VALUES
2 | ( 'rfm_edges',
3 | '{
4 | "r":{"5": 0, "4": 262, "3": 1097, "2": 1910, "1": 2851},
5 | "f": {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4},
6 | "m": {"1": 0.0, "2": 50.0, "3": 75.0, "4": 100.0, "5": 210.0}
7 | }'
8 | );
9 |
--------------------------------------------------------------------------------
/src/server/alembic/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/05e0693f8cbb_key_value_table.py:
--------------------------------------------------------------------------------
1 | """key/value table
2 |
3 | Revision ID: 05e0693f8cbb
4 | Revises: 6b8cf99be000
5 | Create Date: 2021-03-18 11:35:43.512082
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '05e0693f8cbb'
14 | down_revision = '6b8cf99be000'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'kv_unique',
22 | sa.Column('_id', sa.Integer, primary_key=True),
23 | sa.Column('keycol', sa.String(50), nullable=False, unique=True),
24 | sa.Column('valcol', sa.String(65536), nullable=True),
25 | )
26 |
27 | # op.create_index('kvk_ix', 'kv_unique', ['key'], unique=True)
28 |
29 |
30 | def downgrade():
31 | op.drop_table('kv_unique')
32 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/36c4ecbfd11a_add_pdp_users_full_name.py:
--------------------------------------------------------------------------------
1 | """Add pdp_users full_name
2 |
3 | Revision ID: 36c4ecbfd11a
4 | Revises: 7138d52f92d6
5 | Create Date: 2020-12-18 15:28:17.367718
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = "36c4ecbfd11a"
14 | down_revision = "7138d52f92d6"
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.add_column("pdp_users", sa.Column("full_name", sa.String))
21 |
22 |
23 | def downgrade():
24 | op.drop_column("pdp_users", "full_name")
25 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/40be910424f0_update_rfm_mapping_remove_rfm_edges.py:
--------------------------------------------------------------------------------
1 | """Update rfm_mapping, remove rfm_edges
2 |
3 | Revision ID: 40be910424f0
4 | Revises: 57b547e9b464
5 | Create Date: 2021-08-08 17:26:40.622536
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '40be910424f0'
14 | down_revision = '57b547e9b464'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.drop_table("rfm_edges") # Unneeded, unused
21 | op.add_column('rfm_mapping',
22 | sa.Column('rfm_text_color', sa.String())
23 | )
24 |
25 |
26 | def downgrade():
27 | op.create_table (
28 | "rfm_edges",
29 | sa.Column("component", sa.String(), primary_key=True),
30 | sa.Column("edge_string", sa.String(), nullable=False)
31 | )
32 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/41da831646e4_pdp_users_role_fk_from_roles.py:
--------------------------------------------------------------------------------
1 | """pdp_users.role FK from roles
2 |
3 | Revision ID: 41da831646e4
4 | Revises: 72d50d531bd5
5 | Create Date: 2020-12-16 15:53:28.514053
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = "41da831646e4"
14 | down_revision = "72d50d531bd5"
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.drop_column("pdp_users", "role")
21 | op.add_column(
22 | "pdp_users", sa.Column("role", sa.Integer, sa.ForeignKey("pdp_user_roles._id"))
23 | )
24 |
25 |
26 | def downgrade():
27 | pass
28 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/494e064d69a3_tables_for_rfm_data.py:
--------------------------------------------------------------------------------
1 | """Tables for RFM data
2 |
3 | Revision ID: 494e064d69a3
4 | Revises: d0841384d5d7
5 | Create Date: 2021-07-20 19:45:29.418756
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '494e064d69a3'
14 | down_revision = 'd0841384d5d7'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "rfm_scores",
22 | sa.Column("matching_id", sa.Integer, primary_key=True),
23 | sa.Column("rfm_score", sa.String(3), nullable=False)
24 | )
25 |
26 | op.create_table (
27 | "rfm_mapping",
28 | sa.Column("rfm_value", sa.String(3), primary_key=True),
29 | sa.Column("rfm_label", sa.String(), nullable=True),
30 | sa.Column("rfm_color", sa.String(), nullable=True, default='0xe0e0e0')
31 | )
32 |
33 |
34 | def downgrade():
35 | op.drop_table("rfm_scores")
36 | op.drop_table("rfm_mapping")
37 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/57b547e9b464_create_rfm_edges_table.py:
--------------------------------------------------------------------------------
1 | """Create RFM edges table
2 |
3 | Revision ID: 57b547e9b464
4 | Revises: 494e064d69a3
5 | Create Date: 2021-07-20 21:39:00.438116
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '57b547e9b464'
14 | down_revision = '494e064d69a3'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "rfm_edges",
22 | sa.Column("component", sa.String(), primary_key=True),
23 | sa.Column("edge_string", sa.String(), nullable=False)
24 | )
25 |
26 |
27 | def downgrade():
28 | op.drop_table("rfm_edges")
29 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/6b8cf99be000_add_user_journal_table.py:
--------------------------------------------------------------------------------
1 | """Add user journal table
2 |
3 | Revision ID: 6b8cf99be000
4 | Revises: 36c4ecbfd11a
5 | Create Date: 2020-12-21 15:08:07.784568
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.sql import func
11 |
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = "6b8cf99be000"
15 | down_revision = "36c4ecbfd11a"
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade():
21 | op.create_table(
22 | "pdp_user_journal",
23 | sa.Column("_id", sa.Integer, primary_key=True),
24 | sa.Column("stamp", sa.DateTime, nullable=False, server_default=func.now()),
25 | sa.Column("username", sa.String(50), nullable=False),
26 | sa.Column("event_type", sa.String(50)),
27 | sa.Column("detail", sa.String(120)),
28 | )
29 |
30 |
31 | def downgrade():
32 | op.drop_table('pdp_user_journal')
33 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/7138d52f92d6_add_uniqueness_constraints.py:
--------------------------------------------------------------------------------
1 | """add uniqueness constraints
2 |
3 | Revision ID: 7138d52f92d6
4 | Revises: f3d30db17bed
5 | Create Date: 2020-12-17 17:31:29.154789
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = "7138d52f92d6"
14 | down_revision = "f3d30db17bed"
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_unique_constraint("uq_username", "pdp_users", ["username"])
21 | op.create_unique_constraint("uq_role", "pdp_user_roles", ["role"])
22 |
23 |
24 | def downgrade():
25 | pass
26 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/72d50d531bd5_fix_pdp_users_timestamp.py:
--------------------------------------------------------------------------------
1 | """Fix pdp_users timestamp
2 |
3 | Revision ID: 72d50d531bd5
4 | Revises: 783cabf889d9
5 | Create Date: 2020-12-16 15:22:54.734670
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.sql import func
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = "72d50d531bd5"
14 | down_revision = "783cabf889d9"
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.drop_column("pdp_users", "created")
21 | op.add_column(
22 | "pdp_users",
23 | sa.Column("created", sa.DateTime, nullable=False, server_default=func.now()),
24 | )
25 |
26 |
27 | def downgrade():
28 | sa.Column("created", sa.DateTime, nullable=False, server_default="now()")
29 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/783cabf889d9_inital_schema_setup.py:
--------------------------------------------------------------------------------
1 | """inital schema setup
2 |
3 | Revision ID: 783cabf889d9
4 | Revises:
5 | Create Date: 2020-12-16 01:47:43.686881
6 |
7 | """
8 | from sqlalchemy.sql.expression import null
9 | from alembic import op
10 | import sqlalchemy as sa
11 |
12 |
13 |
14 | # revision identifiers, used by Alembic.
15 | revision = '783cabf889d9'
16 | down_revision = None
17 | branch_labels = None
18 | depends_on = None
19 |
20 |
21 | def upgrade():
22 | op.create_table(
23 | 'pdp_user_roles',
24 | sa.Column('_id', sa.Integer, primary_key=True),
25 | sa.Column('role', sa.String(50), nullable=False)
26 | )
27 |
28 | op.create_table(
29 | 'pdp_users',
30 | sa.Column('_id', sa.Integer, primary_key=True),
31 | sa.Column('username', sa.String(50), nullable=False),
32 | sa.Column('role', sa.String(50), nullable=False),
33 | sa.Column('password', sa.String(50), nullable=False),
34 | sa.Column('active', sa.String(50), nullable=False),
35 | sa.Column('created', sa.DateTime,nullable=False, server_default='now()')
36 | )
37 |
38 | def downgrade():
39 | pass
--------------------------------------------------------------------------------
/src/server/alembic/versions/90f471ac445c_create_sl_events.py:
--------------------------------------------------------------------------------
1 | """Shelterluv animal events table
2 |
3 | Revision ID: 90f471ac445c
4 | Revises: 9687db7928ee
5 | Create Date: 2022-09-04 17:21:51.511030
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '90f471ac445c'
14 | down_revision = '9687db7928ee'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "sl_event_types",
22 | sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
23 | sa.Column("event_name", sa.Text, nullable=False),
24 | )
25 |
26 | op.create_table (
27 | "sl_animal_events",
28 | sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
29 | sa.Column("person_id", sa.Integer, nullable=False),
30 | sa.Column("animal_id", sa.Integer, nullable=False),
31 | sa.Column("event_type", sa.Integer, sa.ForeignKey('sl_event_types.id')),
32 | sa.Column("time", sa.BigInteger, nullable=False)
33 | )
34 |
35 | op.create_index('sla_idx', 'sl_animal_events', ['person_id'])
36 |
37 |
38 |
39 | def downgrade():
40 | op.drop_table("sl_animal_events")
41 | op.drop_table("sl_event_types")
--------------------------------------------------------------------------------
/src/server/alembic/versions/9687db7928ee_shelterluv_animals.py:
--------------------------------------------------------------------------------
1 | """Create SL_animals table
2 |
3 | Revision ID: 9687db7928ee
4 | Revises: 45a668fa6325
5 | Create Date: 2021-12-24 21:15:33.399197
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '9687db7928ee'
14 | down_revision = '45a668fa6325'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "shelterluv_animals",
22 | sa.Column("id", sa.BigInteger, primary_key=True),
23 | sa.Column("local_id", sa.BigInteger, nullable=False),
24 | sa.Column("name", sa.Text, nullable=False),
25 | sa.Column("type", sa.Text, nullable=False),
26 | sa.Column("dob", sa.BigInteger, nullable=False),
27 | sa.Column("update_stamp", sa.BigInteger, nullable=False),
28 | sa.Column("photo", sa.Text, nullable=False)
29 | )
30 |
31 |
32 | def downgrade():
33 | op.drop_table("shelterluv_animals")
34 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/a3ba63dee8f4_rmv_details_size_limit.py:
--------------------------------------------------------------------------------
1 | """Remove execution_status.details field size limit
2 |
3 | Revision ID: a3ba63dee8f4
4 | Revises: 40be910424f0
5 | Create Date: 2021-09-18 18:14:48.044985
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'a3ba63dee8f4'
14 | down_revision = '40be910424f0'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.alter_column('execution_status',"details", type_=sa.String(None) )
21 |
22 |
23 |
24 | def downgrade():
25 | op.alter_column('execution_status',"details", type_=sa.String(128) )
26 |
27 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/bfb1262d3195_create_execution_status_table.py:
--------------------------------------------------------------------------------
1 | """create execution status table
2 |
3 | Revision ID: bfb1262d3195
4 | Revises: 05e0693f8cbb
5 | Create Date: 2021-05-28 16:12:40.561829
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.sql.sqltypes import Integer
11 | from sqlalchemy.sql import func
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = 'bfb1262d3195'
15 | down_revision = '05e0693f8cbb'
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade():
21 | op.create_table (
22 | "execution_status",
23 | sa.Column("_id", sa.Integer, primary_key=True),
24 | sa.Column("job_id", sa.Integer, nullable=False),
25 | sa.Column("stage", sa.String(32), nullable=False),
26 | sa.Column("status", sa.String(32), nullable=False),
27 | sa.Column("details", sa.String(128), nullable=False),
28 | sa.Column("update_stamp", sa.DateTime, nullable=False, server_default=func.now())
29 | )
30 |
31 | op.execute("""CREATE FUNCTION last_upd_trig() RETURNS trigger
32 | LANGUAGE plpgsql AS
33 | $$BEGIN
34 | NEW.update_stamp := current_timestamp;
35 | RETURN NEW;
36 | END;$$;""")
37 |
38 | op.execute("""CREATE TRIGGER last_upd_trigger
39 | BEFORE INSERT OR UPDATE ON execution_status
40 | FOR EACH ROW
41 | EXECUTE PROCEDURE last_upd_trig();"""
42 | ) # Postgres-specific, obviously
43 |
44 | op.create_unique_constraint("uq_job_id", "execution_status", ["job_id"])
45 |
46 | def downgrade():
47 | op.drop_table("execution_status")
48 | op.execute("DROP FUNCTION last_upd_trig()")
--------------------------------------------------------------------------------
/src/server/alembic/versions/d0841384d5d7_explicitly_create_vshifts.py:
--------------------------------------------------------------------------------
1 | """Explicitly create vshifts
2 |
3 | Revision ID: d0841384d5d7
4 | Revises: e3ef522bd3d9
5 | Create Date: 2021-07-05 22:05:52.743905
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'd0841384d5d7'
14 | down_revision = 'e3ef522bd3d9'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "volgisticsshifts",
22 | sa.Column("_id", sa.Integer, primary_key=True),
23 | sa.Column("volg_id", sa.Integer, nullable=False),
24 | sa.Column("assignment", sa.String(), nullable=True),
25 | sa.Column("site", sa.String(), nullable=True),
26 | sa.Column("from_date", sa.Date, nullable=False),
27 | sa.Column("hours", sa.DECIMAL, nullable=False)
28 | )
29 |
30 | op.execute("""CREATE INDEX vs_volg_id_idx
31 | ON public.volgisticsshifts USING btree (volg_id);"""
32 | )
33 |
34 | op.create_unique_constraint( "uq_shift", "volgisticsshifts", ["volg_id", "assignment", "from_date", "hours"] )
35 |
36 |
37 | def downgrade():
38 | op.drop_table("volgisticsshifts")
39 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/d80cb6df0fa2_rmv_shifts_uniqueness_constraint.py:
--------------------------------------------------------------------------------
1 | """rmv shifts uniqueness constraint
2 |
3 | Revision ID: d80cb6df0fa2
4 | Revises: 90f471ac445c
5 | Create Date: 2023-03-18 16:22:23.282568
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'd80cb6df0fa2'
14 | down_revision = '90f471ac445c'
15 | branch_labels = None
16 | depends_on = None
17 |
18 | # It's probably more likely that a duplicate row is actually a real shift with a bad (dupe)
19 | # like Saturday, Saturday instead of Saturday, Sunday
20 | # We really care about last shift so this is not critical
21 |
22 | def upgrade():
23 | op.drop_constraint( "uq_shift", "volgisticsshifts")
24 |
25 | def downgrade():
26 | # op.create_unique_constraint( "uq_shift", "volgisticsshifts", ["volg_id", "assignment", "from_date", "hours"] )
27 | # This will fail if you have any dupes
28 | # running
29 | # ALTER TABLE "public"."volgisticsshifts" ADD CONSTRAINT "uq_shift" UNIQUE( "volg_id", "assignment", "from_date", "hours" );
30 | # will fail and tell you of any dupes so you can fix
31 |
32 | pass
33 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/e3ef522bd3d9_explicit_create_sfd.py:
--------------------------------------------------------------------------------
1 | """Explicit creation for salesforcedonations
2 |
3 | Revision ID: e3ef522bd3d9
4 | Revises: bfb1262d3195
5 | Create Date: 2021-06-18 21:55:56.651101
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'e3ef522bd3d9'
14 | down_revision = 'bfb1262d3195'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table (
21 | "salesforcedonations",
22 | sa.Column("_id", sa.Integer, primary_key=True),
23 | sa.Column("opp_id", sa.String(), nullable=False),
24 | sa.Column("recurring_donor", sa.Boolean, nullable=False),
25 | sa.Column("primary_contact", sa.String(), nullable=True),
26 | sa.Column("contact_id", sa.String(), nullable=False),
27 | sa.Column("amount", sa.DECIMAL, nullable=False),
28 | sa.Column("close_date", sa.Date, nullable=False),
29 | sa.Column("donation_type", sa.String(), nullable=True),
30 | sa.Column("primary_campaign_source", sa.String(),nullable=True)
31 | )
32 |
33 | op.execute("""CREATE INDEX sfd_contact_id_idx
34 | ON public.salesforcedonations USING btree (contact_id);"""
35 | )
36 |
37 | op.create_unique_constraint( "uq_donation", "salesforcedonations", ["opp_id", "contact_id", "close_date", "amount"] )
38 |
39 |
40 | def downgrade():
41 | op.drop_table("salesforcedonations")
--------------------------------------------------------------------------------
/src/server/alembic/versions/f3d30db17bed_change_pdp_users_password_to_bytea.py:
--------------------------------------------------------------------------------
1 | """Change pdp_users.password to bytea
2 |
3 | Revision ID: f3d30db17bed
4 | Revises: 41da831646e4
5 | Create Date: 2020-12-16 21:26:08.548724
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = "f3d30db17bed"
14 | down_revision = "41da831646e4"
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.drop_column("pdp_users", "password")
21 | op.add_column("pdp_users", sa.Column("password", sa.LargeBinary, nullable=False))
22 |
23 |
24 | def downgrade():
25 | op.drop_column("pdp_users", "password")
26 | op.add_column("pdp_users", "password", sa.String(50), nullable=False),
27 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/fc7325372396_merge_heads.py:
--------------------------------------------------------------------------------
1 | """Merges heads '8f4, '28b
2 |
3 | Revision ID: fc7325372396
4 | Revises: a3ba63dee8f4, fd187937528b
5 | Create Date: 2022-01-17 22:05:05.824901
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'fc7325372396'
14 | down_revision = ('a3ba63dee8f4', 'fd187937528b')
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | pass
21 |
22 |
23 | def downgrade():
24 | pass
25 |
--------------------------------------------------------------------------------
/src/server/alembic/versions/fd187937528b_create_pdp_contacts_table.py:
--------------------------------------------------------------------------------
1 | """create pdp_contacts table
2 |
3 | Revision ID: fd187937528b
4 | Revises: 57b547e9b464
5 | Create Date: 2021-08-10 20:16:54.169168
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.dialects.postgresql import JSONB
11 | import datetime
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = 'fd187937528b'
15 | down_revision = '57b547e9b464'
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade():
21 |
22 | op.create_table('pdp_contacts',
23 | sa.Column('_id', sa.Integer, primary_key=True, autoincrement=True),
24 | sa.Column('matching_id', sa.Integer, primary_key=True),
25 | sa.Column('source_type', sa.String, nullable=False),
26 | sa.Column('source_id', sa.String, nullable=False),
27 | sa.Column('is_organization', sa.Boolean),
28 | sa.Column('first_name', sa.String),
29 | sa.Column('last_name', sa.String),
30 | sa.Column('email', sa.String),
31 | sa.Column('mobile', sa.String),
32 | sa.Column('street_and_number', sa.String),
33 | sa.Column('apartment', sa.String),
34 | sa.Column('city', sa.String),
35 | sa.Column('state', sa.String),
36 | sa.Column('zip', sa.String),
37 | sa.Column('json', JSONB),
38 | sa.Column('created_date', sa.DateTime, default=datetime.datetime.utcnow),
39 | sa.Column('archived_date', sa.DateTime, default=None)
40 | )
41 |
42 | def downgrade():
43 |
44 | op.drop_table("pdp_contacts")
45 | op.drop_table("pdp_contact_types")
46 |
--------------------------------------------------------------------------------
/src/server/api/API_ingest/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/api/API_ingest/__init__.py
--------------------------------------------------------------------------------
/src/server/api/API_ingest/dropbox_handler.py:
--------------------------------------------------------------------------------
1 | import dropbox
2 | import structlog
3 | logger = structlog.get_logger()
4 |
5 | try:
6 | from secrets_dict import DROPBOX_APP
7 | except ImportError:
8 | # Not running locally
9 | logger.debug("Couldn't get DROPBOX_APP from file, trying environment **********")
10 | from os import environ
11 |
12 | try:
13 | DROPBOX_APP = environ['DROPBOX_APP']
14 | except KeyError:
15 | # Not in environment
16 | # You're SOL for now
17 | logger.error("Couldn't get DROPBOX_APP from file or environment")
18 |
19 |
20 | class TransferData:
21 | def __init__(self, access_token):
22 | self.access_token = access_token
23 |
24 | def upload_file(self, file_from, file_to):
25 | dbx = dropbox.Dropbox(self.access_token)
26 |
27 | with open(file_from, 'rb') as f:
28 | dbx.files_upload(f.read(), file_to)
29 |
30 |
31 | def upload_file_to_dropbox(file_path, upload_path):
32 | access_token = DROPBOX_APP
33 | transfer_data = TransferData(access_token)
34 |
35 | file_from = file_path
36 | file_to = upload_path # The full path to upload the file to, including the file name
37 |
38 | transfer_data.upload_file(file_from, file_to)
--------------------------------------------------------------------------------
/src/server/api/API_ingest/ingest_sources_from_api.py:
--------------------------------------------------------------------------------
1 | from api.API_ingest import shelterluv_people, salesforce_contacts, sl_animal_events
2 | import structlog
3 |
4 | from pipeline.log_db import log_shelterluv_update
5 | logger = structlog.get_logger()
6 |
7 | def start():
8 | logger.debug("Start Fetching raw data from different API sources")
9 |
10 | logger.debug(" Fetching Salesforce contacts")
11 | salesforce_contacts.store_contacts_all()
12 | logger.debug(" Finished fetching Salesforce contacts")
13 |
14 | logger.debug(" Fetching Shelterluv people")
15 | slp_count = shelterluv_people.store_shelterluv_people_all()
16 | logger.debug(" Finished fetching Shelterluv people - %d records" , slp_count)
17 |
18 | logger.debug(" Fetching Shelterluv events")
19 | sle_count = sl_animal_events.store_all_animals_and_events()
20 | logger.debug(" Finished fetching Shelterluv events - %d records" , sle_count)
21 | log_shelterluv_update()
22 |
23 | logger.debug("Finished fetching raw data from different API sources")
24 |
25 |
--------------------------------------------------------------------------------
/src/server/api/API_ingest/salesforce_contacts.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import structlog
4 | from simple_salesforce import Salesforce
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from config import engine
8 | from models import SalesForceContacts
9 |
10 | from api import pem
11 |
12 | logger = structlog.get_logger()
13 |
14 | TEST_MODE = os.getenv("TEST_MODE") # if not present, has value None
15 |
16 | DOMAIN = os.getenv("SALESFORCE_DOMAIN")
17 | CONSUMER_KEY = os.getenv('SALESFORCE_CONSUMER_KEY')
18 | USERNAME = os.getenv('SALESFORCE_USERNAME')
19 |
20 | def store_contacts_all():
21 | Session = sessionmaker(engine)
22 | with Session() as session:
23 |
24 | logger.debug("truncating table salesforcecontacts")
25 | session.execute("TRUNCATE TABLE salesforcecontacts")
26 |
27 | logger.debug("retrieving the latest salesforce contacts data")
28 |
29 | pem_file = pem.find_pem_file()
30 |
31 | if pem_file == '':
32 | logger.error("Missing salesforce jwt private key pem file, skipping data pull")
33 | return
34 |
35 |
36 | sf = Salesforce(username=USERNAME, consumer_key=CONSUMER_KEY,
37 | privatekey_file=pem_file, domain=DOMAIN)
38 | results = sf.query("SELECT Contact_ID_18__c, FirstName, LastName, Contact.Account.Name, MailingCountry, MailingStreet, MailingCity, MailingState, MailingPostalCode, Phone, MobilePhone, Email FROM Contact")
39 | logger.debug("%d total Salesforce contact records", results['totalSize'])
40 | if TEST_MODE:
41 | logger.debug("running in test mode so only downloading first page of Salesforce contacts")
42 |
43 | total_records = 0
44 | done = False
45 | while not done:
46 | total_records += len(results['records'])
47 | logger.debug("Query returned %d Salesforce contact records, total %d", len(results['records']), total_records)
48 | for row in results['records']:
49 | account_name = row['Account']['Name'] if row['Account'] is not None else None
50 | contact = SalesForceContacts(contact_id=row['Contact_ID_18__c'],
51 | first_name=row['FirstName'],
52 | last_name=row['LastName'],
53 | account_name=account_name,
54 | mailing_country=row['MailingCountry'],
55 | mailing_street=row['MailingStreet'],
56 | mailing_city=row['MailingCity'],
57 | mailing_state_province=row['MailingState'],
58 | mailing_zip_postal_code=row['MailingPostalCode'],
59 | phone=row['Phone'],
60 | mobile=row['MobilePhone'],
61 | email=row['Email'])
62 | session.add(contact)
63 | # if in test mode only return first page of results
64 | done = results['done'] if not TEST_MODE else True
65 | if not done:
66 | results = sf.query_more(results['nextRecordsUrl'], True)
67 | logger.debug("Committing downloaded contact records")
68 | session.commit()
69 | logger.info("finished downloading latest salesforce contacts data")
70 |
--------------------------------------------------------------------------------
/src/server/api/API_ingest/shelterluv_people.py:
--------------------------------------------------------------------------------
1 | import requests, os
2 | from models import ShelterluvPeople
3 | from config import engine
4 | from sqlalchemy.orm import sessionmaker
5 | import structlog
6 | logger = structlog.get_logger()
7 |
8 | try:
9 | from secrets_dict import SHELTERLUV_SECRET_TOKEN
10 | except ImportError:
11 | # Not running locally
12 | logger.debug("Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********")
13 | from os import environ
14 |
15 | try:
16 | SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN']
17 | except KeyError:
18 | # Not in environment
19 | # You're SOL for now
20 | logger.error("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment")
21 |
22 |
23 |
24 | TEST_MODE=os.getenv("TEST_MODE") # if not present, has value None
25 | LIMIT = 100
26 | #################################
27 | # This script is used to fetch data from shelterluv API.
28 | # Please be mindful of your usage.
29 | # example: /people will fetch the data of all people. and send approximately 300 requests.
30 | # https://help.shelterluv.com/hc/en-us/articles/115000580127-Shelterluv-API-Overview
31 | #################################
32 |
33 | ######## Insights ###############
34 | # Max result items is 100 - even though it's not specifically specified in the above reference
35 | # /people has all the data. it seems that /person/:id isn't used
36 | #################################
37 |
38 | ''' Iterate over all shelterlove people and store in json file in the raw data folder
39 | We fetch 100 items in each request, since that is the limit based on our research '''
40 | def store_shelterluv_people_all():
41 | offset = 0
42 | has_more = True
43 | Session = sessionmaker(engine)
44 |
45 | with Session() as session:
46 | logger.debug("Truncating table shelterluvpeople")
47 |
48 | session.execute("TRUNCATE TABLE shelterluvpeople")
49 |
50 | logger.debug("Start getting shelterluv contacts from people table")
51 |
52 | while has_more:
53 | r = requests.get("http://shelterluv.com/api/v1/people?limit={}&offset={}".format(LIMIT, offset),
54 | headers={"x-api-key": SHELTERLUV_SECRET_TOKEN})
55 | response = r.json()
56 | for person in response["people"]:
57 | #todo: Does this need more "null checks"?
58 | session.add(ShelterluvPeople(firstname=person["Firstname"],
59 | lastname=person["Lastname"],
60 | id=person["ID"] if "ID" in person else None,
61 | internal_id=person["Internal-ID"],
62 | associated=person["Associated"],
63 | street=person["Street"],
64 | apartment=person["Apartment"],
65 | city=person["City"],
66 | state=person["State"],
67 | zip=person["Zip"],
68 | email=person["Email"],
69 | phone=person["Phone"],
70 | animal_ids=person["Animal_ids"]))
71 | offset += LIMIT
72 | has_more = response["has_more"] if not TEST_MODE else response["has_more"] and offset < 1000
73 | if offset % 1000 == 0:
74 | logger.debug("Reading offset %s", str(offset))
75 | session.commit()
76 |
77 | logger.debug("Finished getting shelterluv contacts from people table")
78 | return offset
79 |
80 |
--------------------------------------------------------------------------------
/src/server/api/API_ingest/updated_data.py:
--------------------------------------------------------------------------------
1 |
2 | import structlog
3 | from sqlalchemy.orm import sessionmaker
4 |
5 | from config import engine
6 |
7 | logger = structlog.get_logger()
8 |
9 |
10 | def get_updated_contact_data():
11 | Session = sessionmaker(engine)
12 |
13 | qry = """ -- Collect latest foster/volunteer dates
14 | select json_agg (upd) as "cd"
15 | from (
16 | select
17 | sf.source_id as "Id" , -- long salesforce string
18 | array_agg(sl.source_id) filter (where sl.source_id is not null) as "Person_Id__c", -- short PAWS-local shelterluv id
19 | case
20 | when
21 | (extract(epoch from now())::bigint - max(foster_out) < 365*86400) -- foster out in last year
22 | or (extract(epoch from now())::bigint - max(foster_return) < 365*86400) -- foster return
23 | then 'Active'
24 | else 'Inactive'
25 | end as "Foster_Activity__c",
26 | max(foster_out) as "Foster_Start_Date__c",
27 | max(foster_return) as "Foster_End_Date__c",
28 | min(vol.first_date) "First_volunteer_date__c",
29 | max(vol.last_date) "Last_volunteer_date__c",
30 | sum(vol.hours) as "Total_volunteer_hours__c",
31 | array_agg(vc.source_id::integer) filter(where vc.source_id is not null) as "Volgistics_Id__c"
32 | from (
33 | select source_id, matching_id from pdp_contacts sf
34 | where sf.source_type = 'salesforcecontacts'
35 | ) sf
36 | left join pdp_contacts sl on sl.matching_id = sf.matching_id and sl.source_type = 'shelterluvpeople'
37 | left join (
38 | select
39 | person_id,
40 | max(case when event_type=1 then time else null end) * 1000 adopt,
41 | max(case when event_type=2 then time else null end) * 1000 foster_out,
42 | -- max(case when event_type=3 then time else null end) rto,
43 | max(case when event_type=5 then time else null end) * 1000 foster_return
44 | from sl_animal_events
45 | group by person_id
46 | ) sle on sle.person_id::text = sl.source_id
47 | left join pdp_contacts vc on vc.matching_id = sf.matching_id and vc.source_type = 'volgistics'
48 | left join (
49 | select
50 | volg_id,
51 | sum(hours) as hours,
52 | extract(epoch from min(from_date)) * 1000 as first_date,
53 | extract(epoch from max(from_date)) * 1000 as last_date
54 | from volgisticsshifts
55 | group by volg_id
56 | ) vol on vol.volg_id::text = vc.source_id
57 | where sl.matching_id is not null or vc.matching_id is not null
58 | group by sf.source_id
59 | ) upd;
60 | """
61 |
62 | with Session() as session:
63 | result = session.execute(qry)
64 | sfdata = result.fetchone()[0]
65 | if sfdata:
66 | logger.debug(sfdata)
67 | logger.debug("Query for Salesforce update returned %d records", len(sfdata))
68 | return sfdata
--------------------------------------------------------------------------------
/src/server/api/API_ingest/volgistics_db.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import Table, MetaData
2 | from sqlalchemy.orm import sessionmaker
3 |
4 | from config import engine
5 |
6 | import structlog
7 | logger = structlog.get_logger()
8 |
9 | def insert_volgistics_people(row_list):
10 |
11 | row_count = 0
12 | try:
13 | Session = sessionmaker(engine)
14 | session = Session()
15 | metadata = MetaData()
16 | volg_table = Table("volgistics", metadata, autoload=True, autoload_with=engine)
17 |
18 | result = session.execute("TRUNCATE table volgistics;")
19 | ret = session.execute(volg_table.insert(row_list))
20 |
21 | row_count = ret.rowcount
22 |
23 | session.commit() # Commit all inserted rows
24 | session.close()
25 | except Exception as e:
26 | row_count = 0
27 | logger.error("Exception inserting volgistics people")
28 | logger.exception(e)
29 | return row_count
30 |
31 |
32 | def insert_volgistics_shifts(row_list):
33 |
34 | row_count = 0
35 | try:
36 | Session = sessionmaker(engine)
37 | session = Session()
38 | metadata = MetaData()
39 | volg_table = Table("volgisticsshifts", metadata, autoload=True, autoload_with=engine)
40 |
41 | result = session.execute("TRUNCATE table volgisticsshifts;")
42 | ret = session.execute(volg_table.insert(row_list))
43 |
44 | row_count = ret.rowcount
45 |
46 | session.commit() # Commit all inserted rows
47 | session.close()
48 | except Exception as e:
49 | row_count = 0
50 | logger.error("Exception inserting volgistics shifts")
51 | logger.exception(e.pgerror)
52 | return row_count
53 |
--------------------------------------------------------------------------------
/src/server/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/api/__init__.py
--------------------------------------------------------------------------------
/src/server/api/api.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint
2 | from flask_cors import CORS
3 |
4 | admin_api = Blueprint("admin_api", __name__)
5 | common_api = Blueprint("common_api", __name__)
6 | user_api = Blueprint("user_api", __name__)
7 | internal_api = Blueprint("internal_api", __name__)
8 |
9 |
10 | # TODO: SECURITY - CORS is wide open for development, needs to be limited for production
11 | CORS(user_api)
12 | CORS(common_api)
13 | CORS(admin_api)
14 |
--------------------------------------------------------------------------------
/src/server/api/fake_data.py:
--------------------------------------------------------------------------------
1 | """ Fake data that can be returned when an API token is missing for local
2 | development, or for running pytest
3 |
4 | Shelterluv Data contains:
5 | Matched: Animal & Event End point
6 | """
7 |
8 | shelterluv_data = {
9 | 'animals': {
10 | "animal_details": {
11 | '12345': {
12 | "Age": 24,
13 | "DOBUnixTime": 1568480456,
14 | "Name": "Lola aka Fake Cat",
15 | "Type": "Cat",
16 | "Photos":
17 | ["https://images.unsplash.com/photo-1456926631375-92c8ce872def?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxzZWFyY2h8OHx8YW5pbWFsfGVufDB8fDB8fA%3D%3D&w=1000&q=80"],
18 | "Status": "Healthy In Home",
19 | },
20 | },
21 | "person_details": {
22 | "shelterluv_short_id": 2,
23 | },
24 | },
25 | 'events': {
26 | '12345':[
27 | {
28 | 'AssociatedRecords': [
29 | {'Id': 12345, 'Type': 'Animal' },
30 | {'Id': 12345, 'Type': 'Person'},
31 | ],
32 | 'Subtype': 'Foster Home',
33 | 'Time': '1602694822',
34 | 'Type': 'Outcome.Adoption',
35 | 'User': 'Fake User',
36 | },
37 | ]
38 | },
39 | }
40 |
41 |
42 | def sl_mock_data(end_point: str)-> dict:
43 | """ Shelterluv mock data.
44 | Takes the end_point as a str of `animals` or `events` and returns
45 | a dict representing a test data for that end_point.
46 | """
47 |
48 | return shelterluv_data.get(end_point)
49 |
--------------------------------------------------------------------------------
/src/server/api/file_uploader.py:
--------------------------------------------------------------------------------
1 | from config import engine
2 | from donations_importer import validate_import_sfd
3 | from flask import current_app
4 | from models import ManualMatches, SalesForceContacts, ShelterluvPeople, Volgistics
5 | from pipeline.log_db import log_volgistics_update
6 | from volgistics_importer import open_volgistics, validate_import_vs, volgistics_people_import
7 | from werkzeug.utils import secure_filename
8 | import structlog
9 | logger = structlog.get_logger()
10 |
11 | SUCCESS_MSG = "Uploaded Successfully!"
12 |
13 |
14 | def validate_and_arrange_upload(file):
15 | logger.info("Start uploading file: %s ", file.filename)
16 | filename = secure_filename(file.filename)
17 | file_extension = filename.rpartition(".")[2]
18 | with engine.begin() as conn:
19 | determine_upload_type(file, file_extension, conn)
20 |
21 |
22 | def determine_upload_type(file, file_extension, conn):
23 | # Yes, this method of discovering what kind of file we have by looking at
24 | # the extension and columns is silly. We'd like to get more of our data from
25 | # automatically pulling from vendor APIs directly, in which case we'd know
26 | # what kind of data we had.
27 | if file_extension == "csv":
28 | logger.warn("%s: We no longer support CSV files", file.filename)
29 | return
30 |
31 | if file_extension == "xlsx":
32 | # Assume it's Volgistics
33 | workbook = open_volgistics(file)
34 | if workbook:
35 | validate_import_vs(workbook)
36 | volgistics_people_import(workbook)
37 | workbook.close()
38 | log_volgistics_update()
39 | return
40 |
41 | logger.error("Don't know how to process file: %s", file.filename)
--------------------------------------------------------------------------------
/src/server/api/internal_api.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | import structlog
4 | from flask import jsonify
5 |
6 | from api.API_ingest import ingest_sources_from_api
7 | from api.API_ingest import updated_data
8 | from api.api import internal_api
9 |
10 | from pipeline import flow_script
11 | from pub_sub import salesforce_message_publisher
12 |
13 |
14 | logger = structlog.get_logger()
15 |
16 | ### Internal API endpoints can only be accessed from inside the cluster;
17 | ### they are blocked by location rule in NGINX config
18 |
19 |
20 | # Verify that this can only be accessed from within cluster
21 | @internal_api.route("/api/internal/test", methods=["GET"])
22 | def user_test():
23 | """ Liveness test, does not require JWT """
24 | logger.debug("Liveness test")
25 | return jsonify(("OK from INTERNAL Test @ " + str(datetime.now())))
26 |
27 |
28 | @internal_api.route("/api/internal/test/test", methods=["GET"])
29 | def user_test2():
30 | """ Liveness test, does not require JWT """
31 | return jsonify(("OK from INTERNAL test/test @ " + str(datetime.now())))
32 |
33 |
34 | @internal_api.route("/api/internal/ingestRawData", methods=["GET"])
35 | def ingest_raw_data():
36 | try:
37 | ingest_sources_from_api.start()
38 | except Exception as e:
39 | logger.error(e)
40 |
41 | return jsonify({'outcome': 'OK'}), 200
42 |
43 |
44 | @internal_api.route("/api/internal/get_updated_data", methods=["GET"])
45 | def get_contact_data():
46 | logger.debug("Calling get_updated_contact_data()")
47 | contact_json = updated_data.get_updated_contact_data()
48 | if contact_json:
49 | logger.debug("Returning %d contact records", len(contact_json))
50 | else:
51 | logger.debug("No contact records found")
52 | return jsonify({'outcome': 'OK'}), 200
53 |
54 |
55 | @internal_api.route("/api/internal/start_flow", methods=["GET"])
56 | def run_flow():
57 | logger.debug("Calling flow_script.start_flow()")
58 | flow_script.start_flow()
59 | logger.debug("Flow processing complete")
60 | return jsonify({'outcome': 'OK'}), 200
61 |
62 |
63 | @internal_api.route("/api/internal/send_salesforce_platform_message", methods=["GET"])
64 | def send_salesforce_platform_message():
65 | contact_list = updated_data.get_updated_contact_data()
66 | if contact_list:
67 | logger.debug("Returning %d contact records", len(contact_list))
68 | salesforce_message_publisher.send_pipeline_update_messages(contact_list)
69 | else:
70 | logger.debug("No contact records found")
71 | return jsonify({'outcome': 'OK'}), 200
72 |
73 | @internal_api.route("/api/internal/full_flow", methods=["GET"])
74 | def start_flow():
75 | logger.info("Downloading data from APIs")
76 | ingest_sources_from_api.start()
77 | logger.info("Starting pipeline matching")
78 | flow_script.start_flow()
79 | logger.info("Building updated data payload")
80 | updated_contacts_list = updated_data.get_updated_contact_data()
81 | if updated_contacts_list:
82 | logger.info("Sending Salesforce platform messages")
83 | salesforce_message_publisher.send_pipeline_update_messages(updated_contacts_list)
84 | else:
85 | logger.info("No contacts to update")
86 |
87 | return jsonify({'outcome': 'OK'}), 200
--------------------------------------------------------------------------------
/src/server/api/jwt_ops.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from flask import Flask, jsonify, request, current_app
3 | from flask_jwt_extended import (
4 | JWTManager,
5 | jwt_required,
6 | create_access_token,
7 | get_jwt_identity,
8 | verify_jwt_in_request,
9 | get_jwt
10 |
11 | )
12 |
13 | from app import app, jwt
14 |
15 | # Wraps funcs to require admin role to execute
16 | def admin_required(fn):
17 | @wraps(fn)
18 | def wrapper(*args, **kwargs):
19 | verify_jwt_in_request()
20 | claims = get_jwt()
21 | if claims["role"] != "admin":
22 | return jsonify(msg="Admins only!"), 403
23 | else:
24 | return fn(*args, **kwargs)
25 |
26 | return wrapper
27 |
28 | def create_token(username, accesslevel):
29 | """ Create a JWT *access* token for the specified user ('sub:') and role ('role:').
30 | """
31 | # Identity can be any data that is json serializable, we just use username
32 | addl_claims = {'role': accesslevel}
33 | new_token = create_access_token(identity=username, additional_claims=addl_claims)
34 | return jsonify(access_token=new_token)
35 |
36 |
37 | def validate_decode_jwt():
38 | """ If valid, return jwt fields as a dictionary, else None """
39 | jwtdict = None
40 | try:
41 | jwtdict = verify_jwt_in_request()[1]
42 | except:
43 | pass # Wasn't valid - either expired or failed validation
44 |
45 | return jwtdict
46 |
--------------------------------------------------------------------------------
/src/server/api/pem.py:
--------------------------------------------------------------------------------
1 | from os import path
2 |
3 | import structlog
4 |
5 | logger = structlog.get_logger()
6 |
7 |
8 | def find_pem_file():
9 | """
10 | Search likely places for the .pem file needed for SalesForce operations,
11 | returning filename if found or empty string if not.
12 | """
13 |
14 | locations = ['server/bin', 'bin', 'pem', '/app/pem','server/pem']
15 | file_name = 'connected-app-secrets.pem'
16 |
17 | pem_file = ''
18 |
19 | for file_path in locations:
20 | if path.exists(path.join(file_path, file_name)):
21 | pem_file = path.normpath(path.join(file_path, file_name))
22 | logger.info("Found pem file at %s ", pem_file)
23 | break
24 |
25 | return pem_file
--------------------------------------------------------------------------------
/src/server/app.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import structlog
4 | logger = structlog.get_logger()
5 |
6 | from flask import Flask
7 |
8 | from flask_jwt_extended import JWTManager
9 |
10 | try:
11 | from secrets_dict import JWT_SECRET, APP_SECRET_KEY
12 | except ImportError:
13 | # Not running locally
14 | logger.info("Could not get secrets from file, trying environment **********")
15 | from os import environ
16 |
17 | try:
18 | JWT_SECRET = environ['JWT_SECRET']
19 | APP_SECRET_KEY = environ['APP_SECRET_KEY']
20 | except KeyError:
21 | # Nor in environment
22 | # You're SOL for now
23 | logger.critical("Couldn't get secrets from file or environment")
24 |
25 |
26 |
27 | # logger = structlog.get_logger()
28 |
29 | # Send all Flask logs to structlog
30 | structlog.configure(
31 | processors=[
32 | structlog.contextvars.merge_contextvars,
33 | structlog.processors.KeyValueRenderer(
34 | key_order=["event", "view", "peer"]
35 | ),
36 | ],
37 | logger_factory=structlog.stdlib.LoggerFactory(),
38 | )
39 |
40 |
41 |
42 | app = Flask(__name__)
43 |
44 |
45 | app.config["JWT_SECRET_KEY"] = JWT_SECRET
46 | app.config["JWT_MAX_TIMEOUT"] = 30*60 #Seconds
47 |
48 | # We'll use max for default but can be reduced for testing
49 | app.config["JWT_ACCESS_TOKEN_EXPIRES"] = app.config["JWT_MAX_TIMEOUT"]
50 |
51 | jwt = JWTManager(app)
52 |
53 |
54 | app.secret_key = APP_SECRET_KEY
55 | app.config["MAX_CONTENT_LENGTH"] = 500 * 1024 * 1024 # 500 Megs
56 | app.config["SEND_FILE_MAX_AGE_DEFAULT"] = 0
57 |
58 | from api.admin_api import admin_api
59 | from api.common_api import common_api
60 | from api.user_api import user_api
61 | from api.internal_api import internal_api
62 |
63 |
64 | # Emit a log entry at each level
65 | app.register_blueprint(admin_api)
66 | app.register_blueprint(common_api)
67 | app.register_blueprint(user_api)
68 | app.register_blueprint(internal_api)
69 |
70 |
71 | # init_db_schema.start(connection)
72 | logger.debug("Log sample - debug")
73 | logger.info("Log sample - info")
74 | logger.warn("Log sample - warn")
75 | logger.error("Log sample - error")
76 | logger.critical("Log sample - critical")
77 |
78 | if __name__ == "__main__":
79 | FLASK_PORT = os.getenv("FLASK_PORT", None)
80 |
81 | # create_app()
82 | app.run(host="0.0.0.0", debug=True, port=FLASK_PORT)
83 |
--------------------------------------------------------------------------------
/src/server/bin/export_secrets.sh:
--------------------------------------------------------------------------------
1 | set -o allexport
2 | source bin/secrets_dict.py
3 | set +o allexport
4 |
--------------------------------------------------------------------------------
/src/server/bin/startServer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # we may want to switch this to a script which logs output, etc?
4 | echo "------------STARTING `date` ------------------"
5 | set FLASK_APP=server/app.py
6 | export FLASK_APP
7 | source bin/export_secrets.sh
8 | # This abomination ensures that the PG server has finished its restart cycle
9 | echo "SLEEPING.. WAITING FOR DB"; sleep 5; echo "WAKING"; alembic upgrade head; alembic current; echo "DB SETUP";
10 | #; python -m flask run --host=0.0.0.0 --no-reload
11 |
12 | # --no-reload prevents Flask restart, which usually happens in middle of create_base_users()
13 | #TODO: SECURITY - ensure we are not running in debug mode in production
14 | uwsgi bin/uwsgi.ini
15 |
--------------------------------------------------------------------------------
/src/server/bin/uwsgi.ini:
--------------------------------------------------------------------------------
1 | [uwsgi]
2 | http-socket = :5000
3 | plugin = python310
4 | module = wsgi:app
5 | chdir = /app
6 | pythonpath = .
7 | processes = 2
8 | threads = 4
9 | log-4xx = true
10 | log-5xx = true
11 | disable-logging = true
12 | logformat = {"timestamp": "%(tmsecs)", "address": "%(addr)", "method": "%(method)", "protocol": "%(proto)", "resp_size": "%(size)", "request_body_size": "%(cl)", "response_status": "%(status)", "response_time": "%(secs)", "uri": "%(uri)"}
13 | logformat-strftime = true
--------------------------------------------------------------------------------
/src/server/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import sqlalchemy as db
4 | import models
5 | from constants import IS_LOCAL, BASE_PATH, RAW_DATA_PATH, OUTPUT_PATH, LOGS_PATH, REPORT_PATH, ZIPPED_FILES
6 |
7 | import logging
8 | import structlog
9 | from structlog.processors import CallsiteParameter
10 |
11 |
12 | # structlog setup for complete app
13 |
14 | # Formatters
15 | shared_processors=[
16 | structlog.contextvars.merge_contextvars,
17 | structlog.processors.add_log_level,
18 | structlog.processors.StackInfoRenderer(),
19 | structlog.dev.set_exc_info,
20 | structlog.processors.TimeStamper(fmt=None, utc=True ),
21 | structlog.processors.CallsiteParameterAdder(
22 | [
23 | CallsiteParameter.FILENAME,
24 | CallsiteParameter.FUNC_NAME,
25 | CallsiteParameter.LINENO,
26 | ])
27 | ]
28 |
29 | # Select output processor depending if running locally/interactively or not
30 | if sys.stderr.isatty(): # Pretty-print
31 | processors = shared_processors + [structlog.dev.ConsoleRenderer(), ]
32 | else: # Emit structured/JSON
33 | processors = shared_processors + [ structlog.processors.dict_tracebacks, structlog.processors.JSONRenderer(), ]
34 |
35 | structlog.configure(
36 | processors=processors,
37 | wrapper_class=structlog.make_filtering_bound_logger(logging.NOTSET),
38 | context_class=dict,
39 | logger_factory=structlog.PrintLoggerFactory(),
40 | cache_logger_on_first_use=False
41 | )
42 | logger = structlog.get_logger()
43 |
44 |
45 | # Initiate postgres DB
46 | # best practices is to have only one engine per application process
47 | # https://docs.sqlalchemy.org/en/13/core/connections.html
48 | POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "thispasswordisverysecure")
49 | POSTGRES_DATABASE = os.getenv("POSTGRES_DATABASE", "paws")
50 | POSTGRES_USER = os.getenv("POSTGRES_USER", "postgres")
51 |
52 | if IS_LOCAL == "True":
53 | DB = os.getenv(
54 | "LOCAL_DB_IP",
55 | "postgresql://postgres:"
56 | + POSTGRES_PASSWORD
57 | + "@localhost:5432/"
58 | + POSTGRES_DATABASE,
59 | )
60 | else:
61 | DB = (
62 | "postgresql://"
63 | + POSTGRES_USER
64 | + ":"
65 | + POSTGRES_PASSWORD
66 | + "@paws-compose-db/"
67 | + POSTGRES_DATABASE
68 | )
69 |
70 | engine = db.create_engine(DB)
71 |
72 | # Run Alembic to create managed tables
73 | # from alembic.config import Config
74 | # from alembic import command
75 |
76 | # alembic_cfg = Config("alembic.ini")
77 | # command.stamp(alembic_cfg, "head")
78 |
79 | # logger.warn("Testing")
80 |
81 | with engine.connect() as connection:
82 | import db_setup.base_users
83 | db_setup.base_users.create_base_roles() # IFF there are no roles already
84 | db_setup.base_users.create_base_users() # IFF there are no users already
85 | db_setup.base_users.populate_sl_event_types() # IFF there are no event types already
86 | db_setup.base_users.populate_rfm_mapping_table() # Set to True to force loading latest version of populate script
87 | # found in the server/alembic directory
88 |
89 | # Create these directories only one time - when initializing
90 | if not os.path.isdir(BASE_PATH):
91 | os.makedirs(BASE_PATH, exist_ok=True)
92 | os.makedirs(RAW_DATA_PATH, exist_ok=True)
93 | os.makedirs(OUTPUT_PATH, exist_ok=True)
94 | os.makedirs(LOGS_PATH, exist_ok=True)
95 | os.makedirs(RAW_DATA_PATH, exist_ok=True)
96 | os.makedirs(REPORT_PATH, exist_ok=True)
97 | os.makedirs(ZIPPED_FILES, exist_ok=True)
98 |
--------------------------------------------------------------------------------
/src/server/constants.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # Determine if app is ran from docker or local by testing the env var "IS_LOCAL"
4 | IS_LOCAL = os.getenv("IS_LOCAL")
5 | BASE_PATH = "../local_files/" if IS_LOCAL == "True" else "/app/static/"
6 |
7 |
8 | # Initiate local file system
9 | RAW_DATA_PATH = BASE_PATH + "raw_data/"
10 | OUTPUT_PATH = BASE_PATH + "output/"
11 | LOGS_PATH = BASE_PATH + "logs/"
12 | REPORT_PATH = OUTPUT_PATH + "reports/"
13 | ZIPPED_FILES = BASE_PATH + "zipped/"
--------------------------------------------------------------------------------
/src/server/db_setup/README.md:
--------------------------------------------------------------------------------
1 | User Management API
2 | ----------
3 |
4 |
5 | *** Create user record
6 |
7 | Requires admin role
8 |
9 | Form POST Parameters
10 | ----------
11 | username : str
12 | full_name : str
13 | password : str
14 | role : str, one of `user`, `admin`
15 |
16 | Returns
17 | ----------
18 | User created: 201 + username
19 | Invalid role: 422 + "Bad role"
20 | Duplicate user: 409 + DB error
21 |
22 |
23 | *** Get users
24 |
25 |
26 | Returns
27 | ----------
28 | One header row of field names, one row per user
29 | "['username', 'full_name', 'active', 'role'],
30 | ['admin', None, 'Y', 'admin'],
31 | ['steve11', 'Steve the User', 'Y', 'admin'],
32 | ['user', None, 'Y', 'user'],"
--------------------------------------------------------------------------------
/src/server/db_setup/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/db_setup/__init__.py
--------------------------------------------------------------------------------
/src/server/pipeline/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/pipeline/__init__.py
--------------------------------------------------------------------------------
/src/server/pipeline/log_db.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import json
3 | from sqlalchemy.sql import text
4 | from flask import current_app
5 | import time
6 |
7 | from sqlalchemy.dialects.postgresql import insert
8 | from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, exc, select
9 |
10 | from config import engine
11 | import structlog
12 | logger = structlog.get_logger()
13 |
14 | metadata = MetaData()
15 |
16 | ex_stat = Table("execution_status", metadata, autoload=True, autoload_with=engine)
17 | kvt = Table("kv_unique", metadata, autoload=True, autoload_with=engine)
18 |
19 |
20 | # Alembic version bfb1262d3195
21 |
22 | # CREATE TABLE public.execution_status (
23 | # "_id" serial NOT NULL,
24 | # job_id int4 NOT NULL,
25 | # stage varchar(32) NOT NULL,
26 | # status varchar(32) NOT NULL,
27 | # details varchar(128) NOT NULL,
28 | # update_stamp timestamp NOT NULL DEFAULT now(),
29 | # CONSTRAINT execution_status_pkey null
30 | # );
31 |
32 |
33 |
34 | def log_exec_status(job_id: str, exec_stage: str, exec_status: str, job_details: str):
35 | """Log execution status (job_id, status, job_details) to DB """
36 |
37 | with engine.connect() as connection:
38 | ins_stmt = insert(ex_stat).values( # Postgres-specific insert() supporting ON CONFLICT
39 | job_id = job_id,
40 | stage = exec_stage,
41 | status = exec_status,
42 | details = json.dumps(job_details)
43 | )
44 |
45 | # If key already present in DB, do update instead
46 | upsert = ins_stmt.on_conflict_do_update(
47 | constraint='uq_job_id',
48 | set_=dict( stage = exec_stage, status = exec_status, details = json.dumps(job_details))
49 | )
50 |
51 | try:
52 | connection.execute(upsert)
53 | except Exception as e:
54 | logger.error("Insert/Update failed, Execution status")
55 | logger.error(e)
56 |
57 |
58 | def log_volgistics_update():
59 | """Log Volgistics data update"""
60 |
61 | timestamp = datetime.now().ctime()
62 |
63 | with engine.connect() as connection:
64 | ins_stmt = insert(kvt).values(
65 | keycol = 'last_volgistics_update',
66 | valcol = timestamp,
67 | )
68 | # If key already present in DB, do update instead
69 | upsert = ins_stmt.on_conflict_do_update(
70 | constraint='kv_unique_keycol_key',
71 | set_=dict(valcol=timestamp)
72 | )
73 |
74 | try:
75 | connection.execute(upsert)
76 | except Exception as e:
77 | logger.error("Insert/Update failed on Volgistics stats")
78 | logger.error(e)
79 |
80 |
81 | def log_shelterluv_update():
82 | """Log Shelterluv data update"""
83 |
84 | timestamp = datetime.now().ctime()
85 |
86 | with engine.connect() as connection:
87 | ins_stmt = insert(kvt).values(
88 | keycol = 'last_shelterluv_update',
89 | valcol = timestamp,
90 | )
91 | # If key already present in DB, do update instead
92 | upsert = ins_stmt.on_conflict_do_update(
93 | constraint='kv_unique_keycol_key',
94 | set_=dict(valcol=timestamp)
95 | )
96 |
97 | try:
98 | connection.execute(upsert)
99 | except Exception as e:
100 | logger.error("Insert/Update failed on Shelterluv stats")
101 | logger.error(e)
102 |
103 |
104 | def log_salesforce_update():
105 | """Log SalesForce data update"""
106 |
107 | timestamp = datetime.now().ctime()
108 |
109 | with engine.connect() as connection:
110 | ins_stmt = insert(kvt).values(
111 | keycol = 'last_salesforce_update',
112 | valcol = timestamp,
113 | )
114 | # If key already present in DB, do update instead
115 | upsert = ins_stmt.on_conflict_do_update(
116 | constraint='kv_unique_keycol_key',
117 | set_=dict(valcol=timestamp)
118 | )
119 |
120 | try:
121 | connection.execute(upsert)
122 | except Exception as e:
123 | logger.error("Insert/Update failed on SalseForce stats")
124 | logger.error(e)
125 |
--------------------------------------------------------------------------------
/src/server/pub_sub/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/pub_sub/__init__.py
--------------------------------------------------------------------------------
/src/server/pub_sub/salesforce_message_publisher.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | import jwt
4 | import os
5 | import requests
6 | import certifi
7 | import grpc
8 | import pub_sub.stubs.pubsub_api_pb2_grpc as pb2_grpc
9 | import pub_sub.stubs.pubsub_api_pb2 as pb2
10 | import avro.io
11 | import io
12 | import structlog
13 | from datetime import datetime
14 |
15 | from api import pem
16 |
17 | logger = structlog.get_logger()
18 |
19 | ISSUER = os.getenv("SALESFORCE_CONSUMER_KEY")
20 | DOMAIN = os.getenv("SALESFORCE_DOMAIN")
21 | SUBJECT = os.getenv("SALESFORCE_USERNAME")
22 | CREATOR_CONTACT_ID = os.getenv("CREATOR_CONTACT_ID")
23 | INSTANCE_URL = os.getenv("INSTANCE_URL")
24 | TENANT_ID = os.getenv("TENANT_ID")
25 | BATCH_SIZE = os.getenv("BATCH_SIZE", 400)
26 |
27 | UPDATE_TOPIC = "/event/updated_contacts_batched__e"
28 |
29 | def send_pipeline_update_messages(contacts_list):
30 | pem_file = pem.find_pem_file() #TODO: Could we get here (and get errors) if we didn't have a pem file?
31 | with open(pem_file) as fd:
32 | private_key = fd.read()
33 | logger.info('Loaded PEM certificate')
34 |
35 | claim = {
36 | 'iss': ISSUER,
37 | 'exp': int(time.time()) + 300,
38 | 'aud': 'https://{}.salesforce.com'.format(DOMAIN),
39 | 'sub': SUBJECT,
40 | }
41 | assertion = jwt.encode(claim, private_key, algorithm='RS256', headers={'alg': 'RS256'})
42 | logger.info('Generated JWT')
43 |
44 | r = requests.post('https://{}.salesforce.com/services/oauth2/token'.format(DOMAIN), data={
45 | 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
46 | 'assertion': assertion,
47 | })
48 | access_token = r.json()['access_token']
49 | logger.info('Made OAuth call to get access token')
50 |
51 | with open(certifi.where(), 'rb') as f:
52 | creds = grpc.ssl_channel_credentials(f.read())
53 | with grpc.secure_channel('api.pubsub.salesforce.com:7443', creds) as channel:
54 | auth_meta_data = (('accesstoken', access_token),
55 | ('instanceurl', INSTANCE_URL),
56 | ('tenantid', TENANT_ID))
57 |
58 |
59 | stub = pb2_grpc.PubSubStub(channel)
60 | schema_id = stub.GetTopic(pb2.TopicRequest(topic_name=UPDATE_TOPIC), metadata=auth_meta_data).schema_id
61 | schema = stub.GetSchema(pb2.SchemaRequest(schema_id=schema_id), metadata=auth_meta_data).schema_json
62 |
63 | payloads = []
64 | while len(contacts_list) > 0:
65 | if len(contacts_list) > BATCH_SIZE:
66 | current_batch = contacts_list[:BATCH_SIZE]
67 | del contacts_list[:BATCH_SIZE]
68 | else:
69 | current_batch = contacts_list
70 | contacts_list = []
71 |
72 | root_object = {
73 | "updatedContactsJson" : current_batch
74 | }
75 | message = {
76 | "CreatedById": CREATOR_CONTACT_ID,
77 | "CreatedDate": int(datetime.now().timestamp()),
78 | "updated_contacts_json__c": json.dumps(root_object)
79 | }
80 | buf = io.BytesIO()
81 | encoder = avro.io.BinaryEncoder(buf)
82 | writer = avro.io.DatumWriter(avro.schema.parse(schema))
83 | writer.write(message, encoder)
84 | payload = {
85 | "schema_id": schema_id,
86 | "payload": buf.getvalue()
87 | }
88 | payloads.append(payload)
89 |
90 | stub.Publish(pb2.PublishRequest(topic_name=UPDATE_TOPIC, events=payloads), metadata=auth_meta_data)
91 |
92 | logger.info("%s total pipeline update messages sent", len(payloads))
93 |
94 |
--------------------------------------------------------------------------------
/src/server/pub_sub/stubs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CodeForPhilly/paws-data-pipeline/7ee6230405e9b2719b61cb2ce6a587db90f9d6e0/src/server/pub_sub/stubs/__init__.py
--------------------------------------------------------------------------------
/src/server/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask==3.0.1
2 | sqlalchemy==1.4.15
3 | psycopg2-binary==2.9.1
4 | openpyxl
5 | requests
6 | pytest
7 | flask-jwt-extended==4.6.0
8 | alembic
9 | flask-cors
10 | dropbox
11 | jellyfish
12 | networkx
13 | Jinja2>=3.1.2
14 | itsdangerous>=2.1.2
15 | simple-salesforce
16 | werkzeug==3.0.1
17 | structlog
18 | pyjwt
19 | cryptography
20 | grpcio
21 | google-api-python-client
22 | avro
--------------------------------------------------------------------------------
/src/server/rfm-edges.txt:
--------------------------------------------------------------------------------
1 | -- Run delete if rfm_edges already present.
2 | -- DELETE FROM kv_unique WHERE keycol = 'rfm_edges';
3 |
4 | INSERT INTO "public"."kv_unique"(
5 | "keycol",
6 | "valcol"
7 | ) VALUES (
8 | 'rfm_edges',
9 | '{ "r":{"5": 0, "4": 90, "3": 181, "2": 273, "1": 364},
10 | "f": {"1": 0, "2": 1, "3": 3, "4": 11, "5": 17},
11 | "m": {"1": 0, "2": 49.99, "3": 74.99, "4": 99.99, "5": 209.99}}'
12 | );
13 |
--------------------------------------------------------------------------------
/src/server/secrets_dict.py:
--------------------------------------------------------------------------------
1 | SD_COMMENT="This is for local development"
2 | APP_SECRET_KEY="ASKASK"
3 | JWT_SECRET="JWTSECRET"
4 | POSTGRES_PASSWORD="thispasswordisverysecure"
5 | BASEUSER_PW="basepw"
6 | BASEEDITOR_PW="editorpw"
7 | BASEADMIN_PW="basepw"
8 | DROPBOX_APP="DBAPPPW"
9 |
10 | SALESFORCE_USERNAME=''
11 | SALESFORCE_CONSUMER_KEY=''
12 | SALESFORCE_DOMAIN=''
13 | TENANT_ID=''
14 | INSTANCE_URL=''
15 | CREATOR_CONTACT_ID=''
16 |
--------------------------------------------------------------------------------
/src/server/wsgi.py:
--------------------------------------------------------------------------------
1 | from app import app
2 |
3 | if __name__ == "__main__":
4 | app.run()
--------------------------------------------------------------------------------