├── backend
├── LICENSE
├── dbt
│ ├── data
│ │ └── .gitkeep
│ ├── macros
│ │ └── .gitkeep
│ ├── tests
│ │ └── .gitkeep
│ ├── analysis
│ │ └── .gitkeep
│ ├── snapshots
│ │ └── .gitkeep
│ ├── .gitignore
│ ├── .user.yml
│ ├── models
│ │ └── stripe
│ │ │ ├── mrr_facts_by_email.sql
│ │ │ ├── customers.sql
│ │ │ ├── events.sql
│ │ │ ├── plans.sql
│ │ │ ├── customer_emails.sql
│ │ │ ├── invoice_items.sql
│ │ │ ├── subscriptions.sql
│ │ │ ├── invoice_facts.sql
│ │ │ ├── invoices_tiered.sql
│ │ │ ├── plan_tiers.sql
│ │ │ ├── invoices.sql
│ │ │ ├── models.yml
│ │ │ └── mrr_facts.sql
│ ├── run_dbt.sh
│ ├── profiles.yml
│ ├── README.md
│ └── dbt_project.yml
├── static
│ └── something.json
├── runlocal.sh
├── pints
│ ├── __init__.py
│ ├── utils.py
│ ├── cabinet.py
│ ├── modeling.py
│ ├── yaml2sql.py
│ ├── stripe.py
│ ├── sheets.py
│ ├── scheduler.py
│ ├── slack.py
│ └── postgres.py
├── logger.py
├── requirements.txt
├── .gitignore
├── Dockerfile
├── metrics
│ └── metrics.py
└── app.py
├── .gitignore
├── frontend
├── public
│ ├── favicon.ico
│ ├── paper-logo.png
│ ├── paper-logo-text.png
│ ├── paper-white-logo.png
│ ├── stripeApiKeyCreation.png
│ ├── index.html
│ └── sign-in-with-google.svg
├── babel.config.js
├── postcss.config.js
├── .gitignore
├── Dockerfile-Dev
├── src
│ ├── main.js
│ ├── components
│ │ ├── PaperCurrency.vue
│ │ ├── PaperToggle.vue
│ │ ├── PaperMenu.vue
│ │ └── PaperSelect.vue
│ ├── assets
│ │ └── styles
│ │ │ └── tailwind.css
│ ├── router
│ │ └── index.js
│ ├── store.js
│ ├── views
│ │ ├── Slack2.vue
│ │ ├── Logout.vue
│ │ ├── Callback.vue
│ │ └── Login.vue
│ └── App.vue
├── README.md
├── tailwind.config.js
└── package.json
├── docker-compose.yml
├── LICENSE
├── render.yaml
└── README.md
/backend/LICENSE:
--------------------------------------------------------------------------------
1 | #TODO
--------------------------------------------------------------------------------
/backend/dbt/data/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/dbt/macros/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/dbt/tests/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/dbt/analysis/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/dbt/snapshots/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | DS_Store
3 | .env
--------------------------------------------------------------------------------
/backend/static/something.json:
--------------------------------------------------------------------------------
1 | {"blah": 2}
--------------------------------------------------------------------------------
/backend/dbt/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | target/
3 | dbt_modules/
4 | logs/
5 |
--------------------------------------------------------------------------------
/backend/dbt/.user.yml:
--------------------------------------------------------------------------------
1 | id: bb5150a1-ddce-4812-9bea-fec82777690f
2 |
--------------------------------------------------------------------------------
/frontend/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mike-paper/pulse/HEAD/frontend/public/favicon.ico
--------------------------------------------------------------------------------
/frontend/public/paper-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mike-paper/pulse/HEAD/frontend/public/paper-logo.png
--------------------------------------------------------------------------------
/frontend/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [
3 | '@vue/cli-plugin-babel/preset'
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/frontend/public/paper-logo-text.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mike-paper/pulse/HEAD/frontend/public/paper-logo-text.png
--------------------------------------------------------------------------------
/frontend/public/paper-white-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mike-paper/pulse/HEAD/frontend/public/paper-white-logo.png
--------------------------------------------------------------------------------
/frontend/public/stripeApiKeyCreation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mike-paper/pulse/HEAD/frontend/public/stripeApiKeyCreation.png
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/mrr_facts_by_email.sql:
--------------------------------------------------------------------------------
1 | with mrr as (
2 | select *
3 | from {{ref('mrr_facts')}}
4 | )
5 | select * from mrr
--------------------------------------------------------------------------------
/frontend/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | // tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
8 |
--------------------------------------------------------------------------------
/backend/dbt/run_dbt.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # echo $PAPER_DBT_SCHEMA
3 | cd dbt
4 | # dbt --log-format json run --profiles-dir . --model mrr_facts
5 | dbt --log-format json run --profiles-dir .
--------------------------------------------------------------------------------
/backend/runlocal.sh:
--------------------------------------------------------------------------------
1 | source ~/.bash_profile
2 | docker build -t paperapi . && PORT=5000 && \
3 | docker run -it -p ${PORT}:${PORT} \
4 | --env-file .env \
5 | -e PORT=${PORT} \
6 | -e WORKERS=4 \
7 | paperapi:latest
--------------------------------------------------------------------------------
/backend/pints/__init__.py:
--------------------------------------------------------------------------------
1 | from .slack import *
2 | from .cabinet import *
3 | from .scheduler import *
4 | from .sheets import *
5 | from .stripe import getAll, getObject
6 | from .postgres import *
7 | from .yaml2sql import *
8 | from .utils import *
9 | from .modeling import *
--------------------------------------------------------------------------------
/backend/logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 |
4 | # FORMAT = '%(asctime)-15s %(clientip)s %(user)-8s %(message)s'
5 | logging.basicConfig(stream=sys.stdout, level=logging.INFO)
6 | logging.getLogger('apscheduler').setLevel(logging.DEBUG)
7 | logger = logging.getLogger(__name__)
8 | logger.debug('debugging...')
9 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/customers.sql:
--------------------------------------------------------------------------------
1 | with customers as (
2 | select
3 | c.details ->> 'id' as customer_id,
4 | c.details ->> 'email' as email,
5 | to_timestamp((c.details ->> 'created')::int) as "created_on"
6 | from
7 | public.stripe_customers as c
8 | where c.team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
9 | )
10 | select * from customers
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | node_modules
3 | /dist
4 |
5 |
6 | # local env files
7 | .env
8 | .env.local
9 | .env.*.local
10 |
11 | # Log files
12 | npm-debug.log*
13 | yarn-debug.log*
14 | yarn-error.log*
15 | pnpm-debug.log*
16 |
17 | # Editor directories and files
18 | .idea
19 | .vscode
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/frontend/Dockerfile-Dev:
--------------------------------------------------------------------------------
1 | FROM node:lts-alpine
2 |
3 | # RUN npm install -g http-server
4 |
5 | WORKDIR /src/app/frontend
6 |
7 | COPY package*.json ./
8 |
9 | RUN npm install
10 |
11 | COPY . .
12 |
13 | # RUN npm run build
14 |
15 | EXPOSE 8080
16 |
17 | # CMD ["npm", "run", "serve"]
18 | # CMD [ "http-server", "dist" ]
19 | CMD ["yarn", "serve"]
20 |
21 |
--------------------------------------------------------------------------------
/backend/pints/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | from cryptography.fernet import Fernet
3 |
4 | FERNET_KEY = os.environ.get('PAPER_FERNET_KEY')
5 |
6 | def encrypt(e):
7 | f = Fernet(FERNET_KEY)
8 | e = e.encode("utf-8")
9 | return f.encrypt(e).decode()
10 |
11 | def decrypt(d):
12 | f = Fernet(FERNET_KEY)
13 | d = f.decrypt(d.encode("utf-8"))
14 | return d.decode()
--------------------------------------------------------------------------------
/frontend/src/main.js:
--------------------------------------------------------------------------------
1 | import '@/assets/styles/tailwind.css'
2 | // import Vue from 'vue'
3 | import { createApp } from 'vue'
4 | import App from './App.vue'
5 | import router from './router'
6 | import mitt from 'mitt';
7 | const emitter = mitt();
8 |
9 | const app = createApp(App)
10 | app.use(router)
11 | app.config.productionTip = false
12 | app.config.globalProperties.emitter = emitter
13 | app.mount('#app')
14 |
15 |
16 |
--------------------------------------------------------------------------------
/frontend/README.md:
--------------------------------------------------------------------------------
1 | # pulse frontend
2 |
3 | ## Project setup
4 | ```
5 | yarn install
6 | ```
7 |
8 | ### Compiles and hot-reloads for development
9 | ```
10 | yarn serve
11 | ```
12 |
13 | ### Compiles and minifies for production
14 | ```
15 | yarn build
16 | ```
17 |
18 | ### Lints and fixes files
19 | ```
20 | yarn lint
21 | ```
22 |
23 | ### Customize configuration
24 | See [Configuration Reference](https://cli.vuejs.org/config/).
25 |
--------------------------------------------------------------------------------
/frontend/src/components/PaperCurrency.vue:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/backend/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | Gunicorn
3 | Flask-Cors
4 | Flask-SQLAlchemy
5 | psycopg2
6 | pandas
7 | slack_sdk==3.5.1
8 | altair==4.1.0
9 | altair_saver==0.5.0
10 | selenium==3.141.0
11 | cloudstorage[amazon]==0.11.0
12 | # python-magic-bin==0.4.14
13 | stripe==2.56.0
14 | pyyaml==5.4.1
15 | # dbt==0.18.1
16 | dbt-postgres==0.19.1
17 | cryptography==3.4.7
18 | apscheduler==3.7.0
19 | magic-admin==0.0.5
20 |
21 | # GOOGLE STUFF
22 | # oauth2client==4.1.2
23 | google
24 | google-auth
25 | google-api-core
26 | google-api-python-client
27 | httplib2
28 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/events.sql:
--------------------------------------------------------------------------------
1 | with events as (
2 | select
3 | c.details ->> 'id' as event_id,
4 | c.details ->> 'type' as event_type,
5 | to_timestamp((c.details ->> 'created')::int) as "created_on",
6 | c.details -> 'data' -> 'object' as event,
7 | c.details -> 'data' -> 'object' ->> 'customer' as customer_id
8 | from
9 | public.stripe_events as c
10 | where c.team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
11 | )
12 | select
13 | c.email,
14 | e.*
15 | from events as e left join
16 | {{ref('customers')}} as c on e.customer_id = c.customer_id
--------------------------------------------------------------------------------
/backend/dbt/profiles.yml:
--------------------------------------------------------------------------------
1 | default:
2 | outputs:
3 | dev:
4 | type: postgres
5 | threads: 8
6 | host: oregon-postgres.render.com
7 | port: 5432
8 | user: admin
9 | pass: "{{ env_var('PAPER_DBT_PASSWORD') }}"
10 | dbname: paperdb
11 | schema: "{{ env_var('PAPER_DBT_SCHEMA') }}"
12 | prod:
13 | type: postgres
14 | threads: 8
15 | host: oregon-postgres.render.com
16 | port: 5432
17 | user: admin
18 | pass: "{{ env_var('PAPER_DBT_PASSWORD') }}"
19 | dbname: paperdb
20 | schema: "{{ env_var('PAPER_DBT_SCHEMA') }}"
21 | target: dev
22 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.9" # optional since v1.27.0
2 | services:
3 | db:
4 | image: 'postgres:13-alpine'
5 | container_name: postgres
6 | ports:
7 | - "5432:5432"
8 | frontend:
9 | build:
10 | context: ./frontend
11 | dockerfile: Dockerfile-Dev
12 | ports:
13 | - "8080:8080"
14 | container_name: frontend
15 | backend:
16 | build:
17 | context: ./backend
18 | dockerfile: Dockerfile
19 | ports:
20 | - "5000:5000"
21 | env_file: ./backend/.env
22 | environment:
23 | - "PORT:5000"
24 | - "WORKERS:4"
25 | volumes:
26 | logvolume01: {}
--------------------------------------------------------------------------------
/backend/dbt/README.md:
--------------------------------------------------------------------------------
1 | Welcome to your new dbt project!
2 |
3 | ### Using the starter project
4 |
5 | Try running the following commands:
6 | - dbt run
7 | - dbt test
8 |
9 |
10 | ### Resources:
11 | - Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
12 | - Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers
13 | - Join the [chat](http://slack.getdbt.com/) on Slack for live discussions and support
14 | - Find [dbt events](https://events.getdbt.com) near you
15 | - Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
16 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/plans.sql:
--------------------------------------------------------------------------------
1 | with plans as (
2 | select
3 | details ->> 'id' as plan_id,
4 | details ->> 'name' as "name",
5 | to_timestamp((p.details ->> 'created')::int) as "created_on",
6 | details ->> 'currency' as "currency",
7 | details ->> 'amount' as "amount",
8 | details ->> 'product' as "product",
9 | details ->> 'active' as "active",
10 | details ->> 'billing_scheme' as "billing_scheme",
11 | details ->> 'trial_period_days' as "trial_period_days",
12 | details -> 'tiers' as "tiers",
13 | details ->> 'interval' as "interval"
14 | from public.stripe_plans as p
15 | where team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
16 | )
17 |
18 | select * from plans
--------------------------------------------------------------------------------
/frontend/tailwind.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | future: {
3 | // removeDeprecatedGapUtilities: true,
4 | // purgeLayersByDefault: true,
5 | },
6 | mode: 'jit',
7 | purge: [
8 | './src/**/*.html',
9 | './src/**/*.vue',
10 | './src/**/*.jsx',
11 | ],
12 | theme: {
13 | extend: {
14 | boxShadow: {
15 | blue: '0 4px 14px 0 rgba(19, 51, 81, 0.39)',
16 | },
17 | },
18 | },
19 | variants: {
20 | extend: {
21 | opacity: ['disabled', 'group-hover'],
22 | }
23 | },
24 | plugins: [
25 | require('@tailwindcss/forms'),
26 | require('@tailwindcss/typography'),
27 | require('@tailwindcss/aspect-ratio'),
28 | ]
29 | }
30 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/customer_emails.sql:
--------------------------------------------------------------------------------
1 |
2 | with emails as (
3 | select
4 | details ->> 'customer_email' as email,
5 | details ->> 'customer' as customer_id,
6 | details ->> 'created' as created
7 | from public.stripe_invoices
8 | where details ->> 'customer_email' is not null
9 | and team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
10 | ), max_email as (
11 | select
12 | customer_id,
13 | max(created) as created
14 | from
15 | emails
16 | group by
17 | customer_id
18 | )
19 |
20 | select
21 | e.customer_id,
22 | e.email,
23 | to_timestamp((e.created)::int) as "created_on"
24 | from emails as e inner join
25 | max_email as me on e.customer_id = me.customer_id and e.created = me.created
--------------------------------------------------------------------------------
/backend/pints/cabinet.py:
--------------------------------------------------------------------------------
1 | import os
2 | from cloudstorage.drivers.amazon import S3Driver
3 |
4 | AWS_ACCESS_KEY_ID = os.environ.get('PAPER_AWS_ACCESS_KEY_ID')
5 | AWS_SECRET_ACCESS_KEY = os.environ.get('PAPER_AWS_SECRET_ACCESS_KEY')
6 | PAPER_STORAGE_CONTAINER = os.environ.get('PAPER_STORAGE_CONTAINER', 'paper-metrics')
7 |
8 | if AWS_ACCESS_KEY_ID:
9 | storage = S3Driver(key=AWS_ACCESS_KEY_ID, secret=AWS_SECRET_ACCESS_KEY)
10 |
11 | def file(filename):
12 | container = storage.get_container('paper-metrics')
13 | blob = container.upload_blob(filename, acl='public-read')
14 | url = blob.generate_download_url()
15 | if '?' in url:
16 | url = url.split('?')[0]
17 | return url
18 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/invoice_items.sql:
--------------------------------------------------------------------------------
1 | with invoice_lines as (
2 | select *,
3 | jsonb_array_elements(lines_data) as lines
4 | from {{ref('invoices')}} as i
5 | ), invoice_lines2 as (
6 | select
7 | il.customer_id,
8 | il.invoice_id,
9 | il.created_on,
10 | lines ->> 'id' as line_id, -- this is the subscription_id if it is a subscription item
11 | lines -> 'plan' ->> 'billing_scheme' as billing_scheme,
12 | lines -> 'plan' ->> 'id' as plan_id,
13 | (lines -> 'plan' ->> 'amount')::integer as plan_amount,
14 | lines -> 'plan' ->> 'interval' as plan_interval,
15 | lines ->> 'amount' as invoice_amount,
16 | (lines ->> 'quantity')::int as quantity,
17 | (lines ->> 'proration')::bool as proration,
18 | lines -> 'plan' ->> 'tiers' as plan_tiers
19 | from invoice_lines as il
20 | )
21 | select i.*
22 | from
23 | invoice_lines2 as i
24 | -- order by i.created_on desc
--------------------------------------------------------------------------------
/frontend/src/assets/styles/tailwind.css:
--------------------------------------------------------------------------------
1 | /* purgecss start ignore */
2 | @tailwind base;
3 |
4 | @tailwind components;
5 |
6 | .funders-table {
7 | @apply min-w-full;
8 | }
9 |
10 | .funders-th {
11 | @apply px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider;
12 | }
13 |
14 | .funders-tbody {
15 | @apply bg-white;
16 | }
17 |
18 | .funders-td {
19 | @apply px-6 py-4 text-sm font-medium text-gray-900;
20 | }
21 |
22 | @tailwind utilities;
23 |
24 | /* purgecss end ignore */
25 |
26 | .w-28 {
27 | width: 7rem;
28 | }
29 |
30 | .tooltip .tooltip-text {
31 | visibility: hidden;
32 | text-align: center;
33 | padding: 2px 6px;
34 | position: absolute;
35 | z-index: 100;
36 | /* margin-left: -10px; */
37 | }
38 | .tooltip:hover .tooltip-text {
39 | visibility: visible;
40 | }
41 |
42 | .w-11 {
43 | width: 2.75rem;
44 | }
--------------------------------------------------------------------------------
/frontend/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
16 |
17 | Paper
18 |
19 |
20 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Paper Financial
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/subscriptions.sql:
--------------------------------------------------------------------------------
1 | select
2 | s.details ->> 'id' as "subscription_id",
3 | s.details ->> 'customer' as "customer_id",
4 | c.email,
5 | s.details ->> 'status' as "status",
6 | (s.details -> 'plan' ->> 'amount')::integer as "plan_amount",
7 | (s.details -> 'plan' ->> 'interval') as "plan_interval",
8 | s.details -> 'items' -> 'data' -> 0 -> 'quantity' as "quantity",
9 | s.details -> 'items' -> 'data' -> 0 -> 'plan' -> 'tiers' as "tiers",
10 | s.details -> 'discount' -> 'coupon' -> 'percent_off' as "percent_off",
11 | s.details -> 'discount' -> 'coupon' -> 'amount_off' as "amount_off",
12 | s.details -> 'discount' -> 'coupon' -> 'duration' as "discount_duration",
13 | to_timestamp((s.details ->> 'canceled_at')::int) as "canceled_dt",
14 | to_timestamp((s.details ->> 'start_date')::int) as "start_dt",
15 | to_timestamp((s.details ->> 'created')::int) as "created_on",
16 | (s.details ->> 'created')::int as "created",
17 | (s.details ->> 'canceled_at')::int as "canceled_at"
18 | from
19 | public.stripe_subscriptions as s left join
20 | {{ref('customers')}} as c on (s.details ->> 'customer') = c.customer_id
21 | where team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/invoice_facts.sql:
--------------------------------------------------------------------------------
1 | with untiered as (
2 | select
3 | i.*,
4 | plan_amount as amount,
5 | line_id as subscription_id
6 | from {{ref('invoice_items')}} as i
7 | where proration = 'no'
8 | and billing_scheme != 'tiered'
9 | ), tier_and_untiered as (
10 | select a.*
11 | from
12 | (
13 | select
14 | invoice_id,
15 | customer_id,
16 | subscription_id,
17 | plan_id,
18 | plan_interval,
19 | amount,
20 | quantity
21 | from {{ref('invoices_tiered')}} as tiered
22 | union all
23 | select
24 | invoice_id,
25 | customer_id,
26 | subscription_id,
27 | plan_id,
28 | plan_interval,
29 | amount*quantity as amount,
30 | quantity
31 | from untiered as untiered
32 | where quantity > 0
33 | ) as a
34 | ), invoices2 as (
35 | select
36 | i.*,
37 | case
38 | when plan_interval = 'year' then il.amount / (12*100)
39 | else il.amount / 100
40 | end as base_mrr,
41 | il.quantity,
42 | il.plan_interval,
43 | il.plan_id,
44 | il.subscription_id
45 | from
46 | {{ref('invoices')}} as i left join
47 | tier_and_untiered as il on i.invoice_id = il.invoice_id
48 | )
49 |
50 | select i.*
51 | from
52 | invoices2 as i
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/invoices_tiered.sql:
--------------------------------------------------------------------------------
1 |
2 | with sum_invoice_items as (
3 | select
4 | customer_id,
5 | invoice_id,
6 | line_id as subscription_id,
7 | plan_id,
8 | plan_interval,
9 | sum(i.quantity) as quantity
10 | from {{ref('invoice_items')}} as i
11 | where proration = 'no'
12 | and billing_scheme = 'tiered'
13 | group by
14 | customer_id,
15 | invoice_id,
16 | line_id,
17 | plan_id,
18 | plan_interval
19 | ), invoice_items as (
20 | select
21 | customer_id,
22 | invoice_id,
23 | subscription_id,
24 | plan_id,
25 | plan_interval,
26 | generate_series(1, i.quantity) as quantity
27 | from sum_invoice_items as i
28 | ), invoice_items2 as (
29 | select
30 | customer_id,
31 | invoice_id,
32 | subscription_id,
33 | i.plan_id,
34 | i.plan_interval,
35 | sum(pt.amount) as amount,
36 | sum(quantity) as quantity
37 | from
38 | invoice_items as i left join
39 | {{ref('plan_tiers')}} as pt on i.plan_id = pt.plan_id and
40 | i.quantity >= pt.lower_tier and i.quantity <= pt.upper_tier
41 | group by
42 | customer_id,
43 | invoice_id,
44 | subscription_id,
45 | i.plan_id,
46 | i.plan_interval
47 | )
48 |
49 | select i.*
50 | from
51 | invoice_items2 as i
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/plan_tiers.sql:
--------------------------------------------------------------------------------
1 | with plans as (
2 | select
3 | plan_id,
4 | jsonb_array_elements(tiers) as tiers
5 | from {{ref('plans')}} as p
6 | where p.tiers != 'null'
7 | ), plans2 as (
8 | select
9 | (tiers ->> 'up_to')::int as up_to,
10 | (tiers ->> 'flat_amount')::float as flat_amount,
11 | (tiers ->> 'amount')::float as amount,
12 | *
13 | from plans
14 | ), plans3 as (
15 | select
16 | plan_id,
17 | coalesce(flat_amount, amount) as amount,
18 | coalesce(up_to, 999999999999) as up_to,
19 | row_number() OVER (order by plan_id desc, up_to) as rnum
20 | from plans2
21 | order by plan_id desc, up_to asc
22 | ), plans4 as (
23 | select
24 | p.plan_id,
25 | p.amount,
26 | -- p.up_to as lower_tier,
27 | case when p.up_to = 999999999999 then p3.up_to + 1 else p.up_to end as lower_tier,
28 | case
29 | when p.up_to = 999999999999 then p.up_to
30 | when p2.up_to = 999999999999 then p.up_to else p2.up_to - 1 end as upper_tier
31 | -- p.up_to,
32 | -- p2.up_to as p2_up_to,
33 | -- p3.up_to as past
34 | from plans3 as p left join
35 | plans3 as p2 on p.plan_id = p2.plan_id and p.rnum = p2.rnum - 1 left join
36 | plans3 as p3 on p.plan_id = p3.plan_id and p.rnum = p3.rnum + 1
37 | )
38 |
39 | select * from plans4
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/invoices.sql:
--------------------------------------------------------------------------------
1 | --TODO: support multiple invoices for a single customer
2 | with invoices as (
3 | select
4 | details ->> 'customer' as customer_id,
5 | details ->> 'id' as invoice_id,
6 | to_timestamp((details ->> 'created')::int) as created_on,
7 | date_trunc('month', to_timestamp((details ->> 'created')::int)) as created_month,
8 | details ->> 'total' as total,
9 | details ->> 'amount_due' as amount_due,
10 | (details -> 'lines' -> 'data') as lines_data,
11 | (((details ->> 'discount')::jsonb) -> 'coupon' ->> 'amount_off')::float as amount_off,
12 | (((details ->> 'discount')::jsonb) -> 'coupon' ->> 'percent_off_precise')::float as percent_off_precise,
13 | (((details ->> 'discount')::jsonb) -> 'coupon' ->> 'duration')::text as coupon_duration
14 | from
15 | public.stripe_invoices as si
16 | where 1=1
17 | and team_id = {{ env_var('PAPER_DBT_TEAM_ID') }}
18 | ), max_in_month as (
19 | select customer_id, created_month, max(created_on) as created_on
20 | from
21 | invoices as i
22 | group by customer_id, created_month
23 | ), invoices2 as (
24 | select
25 | i.*
26 | from invoices as i inner join
27 | max_in_month as m on i.customer_id = m.customer_id and i.created_on = m.created_on
28 | )
29 | select * from invoices2
--------------------------------------------------------------------------------
/render.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | # A Docker web service
3 | - type: web
4 | name: pulse-backend
5 | env: docker
6 | repo: https://github.com/mike-paper/pulse.git # optional
7 | region: oregon # optional (defaults to oregon)
8 | plan: starter plus # optional (defaults to starter)
9 | branch: main # optional (uses repo default)
10 | dockerfilePath: ./backend/Dockerfile
11 | # dockerCommand: .
12 | dockerContext: ./backend
13 | numInstances: 1 # optional (defaults to 1)
14 | healthCheckPath: /ping
15 | envVars:
16 | - key: WORKERS
17 | value: 4
18 | - key: PORT
19 | value: 5000
20 | - fromGroup: pulse
21 | # A static site
22 | - type: web
23 | env: static
24 | name: pulse-frontend
25 | repo: https://github.com/mike-paper/pulse.git
26 | branch: main # optional (uses repo default)
27 | buildCommand: cd frontend && yarn add @vue/cli-service && yarn build
28 | staticPublishPath: frontend/dist
29 | pullRequestPreviewsEnabled: true # optional
30 | domains:
31 | - pulse.trypaper.io
32 | routes:
33 | - type: rewrite
34 | source: /*
35 | destination: /index.html
36 | - type: redirect
37 | source: /
38 | destination: /metrics
--------------------------------------------------------------------------------
/backend/dbt/dbt_project.yml:
--------------------------------------------------------------------------------
1 |
2 | # Name your project! Project names should contain only lowercase characters
3 | # and underscores. A good package name should reflect your organization's
4 | # name or the intended use of these models
5 | name: 'paper'
6 | version: '1.0.1'
7 | config-version: 2
8 |
9 | # This setting configures which "profile" dbt uses for this project.
10 | profile: 'default'
11 |
12 | # These configurations specify where dbt should look for different types of files.
13 | # The `source-paths` config, for example, states that models in this project can be
14 | # found in the "models/" directory. You probably won't need to change these!
15 | source-paths: ["models"]
16 | analysis-paths: ["analysis"]
17 | test-paths: ["tests"]
18 | data-paths: ["data"]
19 | macro-paths: ["macros"]
20 | snapshot-paths: ["snapshots"]
21 |
22 | target-path: "target" # directory which will store compiled SQL files
23 | clean-targets: # directories to be removed by `dbt clean`
24 | - "target"
25 | - "dbt_modules"
26 |
27 |
28 | # Configuring models
29 | # Full documentation: https://docs.getdbt.com/docs/configuring-models
30 |
31 | # In this example config, we tell dbt to build all models in the example/ directory
32 | # as tables. These settings can be overridden in the individual model files
33 | # using the `{{ config(...) }}` macro.
34 | models:
35 | paper:
36 | stripe:
37 | materialized: table
38 | schema: stripe
39 |
40 |
--------------------------------------------------------------------------------
/frontend/src/components/PaperToggle.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
15 |
16 |
17 |
18 |
19 |
20 |
50 |
--------------------------------------------------------------------------------
/frontend/src/router/index.js:
--------------------------------------------------------------------------------
1 | import {createRouter, createWebHistory} from 'vue-router'
2 | import Metrics from '../views/Metrics.vue'
3 | import Analyze from '../views/Analyze.vue'
4 | import Settings from '../views/Settings.vue'
5 | import Login from '../views/Login.vue'
6 | import Callback from '../views/Callback.vue'
7 | import Slack2 from '../views/Slack2.vue'
8 | import Logout from '../views/Logout.vue'
9 |
10 | // Vue.use(VueRouter)
11 |
12 | const routerHistory = createWebHistory()
13 |
14 | const routes = [
15 | {
16 | path: '/',
17 | name: 'Landing',
18 | component: Metrics
19 | },
20 | {
21 | path: '/metrics',
22 | name: 'Metrics',
23 | component: Metrics
24 | },
25 | {
26 | path: '/analyze',
27 | name: 'Analyze',
28 | component: Analyze
29 | },
30 | {
31 | path: '/settings',
32 | name: 'Settings',
33 | component: Settings
34 | },
35 | {
36 | path: '/team',
37 | name: 'Team',
38 | component: Settings
39 | },
40 | {
41 | path: '/login',
42 | name: 'Login',
43 | component: Login
44 | },
45 | {
46 | path: '/slack2',
47 | name: 'Slack2',
48 | component: Slack2
49 | },
50 | {
51 | path: '/callback',
52 | name: 'Callback',
53 | component: Callback
54 | },
55 | {
56 | path: '/logout',
57 | name: 'Logout',
58 | component: Logout
59 | },
60 | ]
61 |
62 | const router = new createRouter({
63 | mode: 'history',
64 | history: routerHistory,
65 | base: process.env.BASE_URL,
66 | routes: routes
67 | })
68 |
69 | export default router
70 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pulse - Open source SaaS metrics
2 |
3 |
4 |
5 | \
6 |
7 |
8 | - **Transparency** - Easily drill down and see how metrics are calculated at the customer level
9 | - **Open source &** **Private** - The entire app is open source and easy to self host
10 | - **Extensible &** **Hackable** - Have some weird edge case you need to exclude? Easily update your data model with a little SQL.
11 | - **Push first** - Metrics can be pushed to Slack, Sheets, and email so you don't need to check yet another dashboard
12 |
13 | \
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | \
22 |
23 | \
24 |
25 |
26 | ### There is a [free hosted version here](https://pulse.trypaper.io/?ref=github)
27 |
28 | ## Self-Hosted
29 |
30 | * Docker - See [docker-compose.yml](docker-compose.yml)
31 | * Render - See [render.yaml](render.yaml) as a guide (you'll need to remove the domain)
32 | * GCP - TODO
33 | * AWS - TODO
34 |
35 | ## Google Sheets
36 |
37 | 1. Go to the Sheet you want to connect to Pulse
38 | 2. Click "Share" in the top right
39 | 3. Share the sheet with paperbot@paperfinancial.iam.gserviceaccount.com
40 | 4. Add the Spreadsheet ID (long ID in the Sheet URL) and sheet name to https://pulse.trypaper.io/settings
41 |
42 | 
43 |
44 |
45 |
--------------------------------------------------------------------------------
/frontend/src/store.js:
--------------------------------------------------------------------------------
1 | // import axios from 'axios';
2 |
3 | export const store = {
4 | state: {
5 | showDebugStuffNow: false,
6 | isLoggedIn: false,
7 | checkedLogin: false,
8 | gotUserData: false,
9 | gotDbt: false,
10 | hideSidebar: false,
11 | gotMetrics: false,
12 | gotEvents: false,
13 | metricData: {},
14 | events: {data: []},
15 | user: {
16 | ok: false,
17 | oauth: false,
18 | hasStripe: false,
19 | settings: {}
20 | },
21 | slackCode: false,
22 | // settings: {
23 | // notifications: {
24 | // alerts: {
25 | // slack: true,
26 | // email: false,
27 | // },
28 | // weekly: {
29 | // slack: true,
30 | // email: true,
31 | // },
32 | // monthly: {
33 | // slack: true,
34 | // email: true,
35 | // },
36 | // }
37 | // },
38 | dbt: {},
39 | jobStatuses: {},
40 | analysis: {
41 | uuid: false,
42 | mode: 'search',
43 | code: 'select *\nfrom customers as c',
44 | results: {
45 | rows: [],
46 | cols: [],
47 | },
48 | viz: {
49 | type: 'grid',
50 | encoding: {
51 | "x": {"field": false, "type": "ordinal"},
52 | "y": {"field": false, "type": "quantitative"},
53 | }
54 | }
55 | },
56 | userData: {
57 | savedFunders: []
58 | },
59 | msg: {
60 | show: false,
61 | primary: '',
62 | secondary: '',
63 | icon: '',
64 | type: '',
65 | time: 8000,
66 | }
67 | }
68 | }
--------------------------------------------------------------------------------
/backend/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 |
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | .hypothesis/
50 | .pytest_cache/
51 |
52 | # Translations
53 | *.mo
54 | *.pot
55 |
56 | # Django stuff:
57 | *.log
58 | local_settings.py
59 | db.sqlite3
60 |
61 | # Flask stuff:
62 | instance/
63 | .webassets-cache
64 |
65 | # Scrapy stuff:
66 | .scrapy
67 |
68 | # Sphinx documentation
69 | docs/_build/
70 |
71 | # PyBuilder
72 | target/
73 |
74 | # Jupyter Notebook
75 | .ipynb_checkpoints
76 |
77 | # pyenv
78 | .python-version
79 |
80 | # celery beat schedule file
81 | celerybeat-schedule
82 |
83 | # SageMath parsed files
84 | *.sage.py
85 |
86 | # Environments
87 | .env
88 | .venv
89 | env/
90 | venv/
91 | ENV/
92 | env.bak/
93 | venv.bak/
94 |
95 | # Spyder project settings
96 | .spyderproject
97 | .spyproject
98 |
99 | # Rope project settings
100 | .ropeproject
101 |
102 | # mkdocs documentation
103 | /site
104 |
105 | # mypy
106 | .mypy_cache/
107 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "paper",
3 | "version": "0.2.0",
4 | "private": true,
5 | "license": "BUSL-1.1",
6 | "scripts": {
7 | "serve": "vue-cli-service serve",
8 | "build": "vue-cli-service build",
9 | "lint": "vue-cli-service lint"
10 | },
11 | "dependencies": {
12 | "@headlessui/vue": "^1.2.0",
13 | "@heroicons/vue": "^1.0.1",
14 | "@magic-ext/oauth": "^0.7.0",
15 | "@tailwindcss/aspect-ratio": "^0.2.0",
16 | "@tailwindcss/forms": "^0.2.1",
17 | "@tailwindcss/typography": "^0.4.0",
18 | "axios": "^0.21.0",
19 | "core-js": "^3.6.5",
20 | "gridjs": "^5.0.0",
21 | "magic-sdk": "^4.2.1",
22 | "mitt": "^2.1.0",
23 | "mousetrap": "^1.6.5",
24 | "postcss": "^8.2.6",
25 | "ssf": "^0.11.2",
26 | "tailwindcss": "^2.1.0",
27 | "typed.js": "^2.0.11",
28 | "vega": "^5.20.2",
29 | "vega-embed": "^6.18.1",
30 | "vega-lite": "^5.1.0",
31 | "vue": "^3.0.5",
32 | "vue-currency-input": "2.0.0-rc.1",
33 | "vue-router": "4",
34 | "vue3-popper": "^1.2.0"
35 | },
36 | "devDependencies": {
37 | "@vue/cli-plugin-babel": "~4.5.0",
38 | "@vue/cli-plugin-eslint": "~4.5.0",
39 | "@vue/cli-plugin-router": "~4.5.0",
40 | "@vue/cli-service": "~4.5.0",
41 | "@vue/compiler-sfc": "^3.0.5",
42 | "autoprefixer": "^9.0.0",
43 | "babel-eslint": "^10.1.0",
44 | "eslint": "^6.7.2",
45 | "eslint-plugin-vue": "^6.2.2"
46 | },
47 | "eslintConfig": {
48 | "root": true,
49 | "env": {
50 | "node": true
51 | },
52 | "extends": [
53 | "plugin:vue/essential",
54 | "eslint:recommended"
55 | ],
56 | "parserOptions": {
57 | "parser": "babel-eslint"
58 | },
59 | "rules": {
60 | "no-unused-vars": "off",
61 | "vue/valid-v-model": "off"
62 | }
63 | },
64 | "browserslist": [
65 | "> 1%",
66 | "last 2 versions",
67 | "not dead"
68 | ]
69 | }
70 |
--------------------------------------------------------------------------------
/backend/pints/modeling.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 | import json
4 | import yaml
5 |
6 | from logger import logger
7 |
8 | def getDbt():
9 | with open(r'./dbt/models/stripe/models.yml') as file:
10 | d = yaml.safe_load(file)
11 | logger.info(f'getDbt: {d}')
12 | return d
13 |
14 | def getCols(sql, cols):
15 | cols2 = []
16 | for col in cols:
17 | # import pdb; pdb.set_trace()
18 | colFormat = [s for s in sql['selected'] if s['alias'] == col]
19 | if colFormat:
20 | colFormat = colFormat[0].get('format', False)
21 | else:
22 | colFormat = False
23 | col2 = {
24 | 'name': col,
25 | 'format': colFormat
26 | }
27 | cols2.append(col2)
28 | return cols2
29 |
30 | def runDbt(teamId):
31 | logger.info(f"run_dbt user: {teamId}")
32 | os.environ['PAPER_DBT_SCHEMA'] = f"team_{teamId}"
33 | os.environ['PAPER_DBT_TEAM_ID'] = f"{teamId}"
34 | session = subprocess.Popen(['./dbt/run_dbt.sh'],
35 | stdout=subprocess.PIPE, stderr=subprocess.PIPE)
36 | stdout, stderr = session.communicate()
37 | logger.info(f'run_dbt stdout: {stdout}')
38 | dbtLogs = []
39 | dbtErrors = []
40 | if stderr:
41 | logger.error(f'run_dbt stderr: {stderr}')
42 | try:
43 | s = stdout.strip()
44 | lines = s.splitlines()
45 | for line in lines:
46 | j = json.loads(line)
47 | if j['levelname'] == 'ERROR':
48 | logger.error(f"run_dbt error: {j['message']}")
49 | dbtErrors.append(j)
50 | if 'OK created table' in j['message']:
51 | count = int(j['message'].split('SELECT')[1].split('\x1b')[0])
52 | table = j['extra']['unique_id']
53 | table = table.split('.')
54 | table = table[len(table)-1]
55 | print(table, count)
56 | j['table'] = table
57 | j['count'] = count
58 | dbtLogs.append(j)
59 | except Exception as e:
60 | logger.error(f'run_dbt log parse error: {e}')
61 | return dbtLogs, dbtErrors
--------------------------------------------------------------------------------
/backend/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9
2 |
3 | # Update the package list and install chrome
4 | # Set up the Chrome PPA
5 | RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -
6 | RUN echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list
7 |
8 | # Update the package list and install chrome
9 | RUN apt-get update -y
10 | RUN apt-get install -y libglib2.0-0 \
11 | libnss3 \
12 | libgconf-2-4 \
13 | libfontconfig1
14 | ENV CHROME_VERSION 90.0.4430.212
15 | # => was getting ERROR [ 6/18] RUN apt-get update -y && apt-get install -y google-chrome-stable=91.0.4472.164-1
16 | # upgraded to 92.0.4515.107, might need to downgrade to 91.0.4472.164 if something breaks in Slack
17 | # RUN apt-get update -y && apt-get install -y google-chrome-stable=$CHROME_VERSION-1
18 | # RUN apt-get install -y google-chrome-stable=$CHROME_VERSION-1
19 |
20 | # Check available versions here: https://www.ubuntuupdates.org/package/google_chrome/stable/main/base/google-chrome-stable
21 | # ARG CHROME_VERSION="81.0.4044.113-1"
22 | RUN wget --no-verbose -O /tmp/chrome.deb https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROME_VERSION}-1_amd64.deb \
23 | && apt install -y /tmp/chrome.deb \
24 | && rm /tmp/chrome.deb
25 |
26 | # Set up Chromedriver Environment variables
27 | ENV CHROMEDRIVER_VERSION 90.0.4430.24
28 | ENV CHROMEDRIVER_DIR /chromedriver
29 | RUN mkdir $CHROMEDRIVER_DIR
30 |
31 | # Download and install Chromedriver
32 | RUN wget -q --continue -P $CHROMEDRIVER_DIR "http://chromedriver.storage.googleapis.com/$CHROMEDRIVER_VERSION/chromedriver_linux64.zip"
33 | RUN unzip $CHROMEDRIVER_DIR/chromedriver* -d $CHROMEDRIVER_DIR
34 |
35 | # Put Chromedriver into the PATH
36 | ENV PATH $CHROMEDRIVER_DIR:$PATH
37 | # update config to not run as root
38 | RUN sed -i 's|exec -a "$0" "$HERE/chrome" "$@"|exec -a "$0" "$HERE/chrome" "$@" --no-sandbox --user-data-dir |g' /opt/google/chrome/google-chrome
39 |
40 | COPY requirements.txt /app/
41 | RUN pip3 --default-timeout=600 install -r /app/requirements.txt
42 |
43 | EXPOSE 22
44 | EXPOSE 5000/tcp
45 |
46 | ENV PORT 5000
47 |
48 | COPY app.py /app/
49 | COPY logger.py /app/
50 | ADD pints /app/pints
51 | ADD metrics /app/metrics
52 | ADD static /app/static
53 | ADD dbt /app/dbt
54 |
55 | CMD cd app && exec gunicorn -t 900 --bind :$PORT --workers $WORKERS --threads 1 --preload app:app
--------------------------------------------------------------------------------
/frontend/src/views/Slack2.vue:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
84 |
85 |
--------------------------------------------------------------------------------
/frontend/src/views/Logout.vue:
--------------------------------------------------------------------------------
1 |
2 |
18 |
26 |
27 |
28 |
89 |
90 |
--------------------------------------------------------------------------------
/frontend/src/components/PaperMenu.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
62 |
63 |
64 |
65 |
66 |
82 |
--------------------------------------------------------------------------------
/frontend/src/views/Callback.vue:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 |
12 |
104 |
105 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/models.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | models:
4 | - name: mrr_facts
5 | description: >
6 | MRR (monthly recurring revenue) by month at customer level
7 | docs:
8 | show: true
9 | meta:
10 | label: Revenue History
11 | joins:
12 | - join: customers
13 | sql_on: mrr_facts.email = customers.email
14 | columns:
15 | - name: email
16 | description: Billing email for customer (get from Invoice if possible)
17 | meta:
18 | dimension:
19 | label: Email
20 | measures:
21 | customers:
22 | label: Customers
23 | type: count
24 | - name: plan_interval
25 | description: Monthly or Yearly (Annual)
26 | tests:
27 | - accepted_values:
28 | values: ['month', 'year']
29 | meta:
30 | dimension:
31 | label: Plan Interval
32 | - name: mrr_month_dt
33 | description: Data as of date (month)
34 | meta:
35 | dimension:
36 | label: Month
37 | timeframes: [week, month, year]
38 | - name: current_month
39 | description: Churned MRR (monthly recurring revenue)
40 | meta:
41 | dimension:
42 | label: Current Month (Flag)
43 | - name: mrr
44 | description: MRR (monthly recurring revenue)
45 | meta:
46 | dimension:
47 | label: MRR
48 | format: $#,##0
49 | measures:
50 | mrr_sum:
51 | label: Total MRR
52 | type: sum
53 | format: $#,##0
54 | mrr_avg:
55 | label: Avg MRR
56 | type: avg
57 | format: $#,##0
58 | - name: churned_mrr
59 | description: Churned MRR (monthly recurring revenue)
60 | meta:
61 | dimension:
62 | label: Churned MRR
63 | measures:
64 | churned_mrr_sum:
65 | label: Churned MRR
66 | type: sum
67 | - name: customers
68 | description: >
69 | Customers https://stripe.com/docs/api/customers
70 | docs:
71 | show: true
72 | meta:
73 | measures:
74 | total:
75 | type: count
76 | label: Count
77 | uniques:
78 | sql: count(distinct(email))
79 | label: Unique
80 | columns:
81 | - name: customer_id
82 | description: Customer ID
83 | meta:
84 | dimension:
85 | label: Customer ID
86 | - name: email
87 | description: Customer email
88 | meta:
89 | dimension:
90 | label: Email
91 | measures:
92 | customers:
93 | label: Customers
94 | type: count
95 | - name: created_on
96 | description: Customer created on
97 | meta:
98 | dimension:
99 | label: Customer Start Date
100 | timeframes: [week, month, year]
101 | # - name: plans
102 | # description: >
103 | # Plans https://stripe.com/docs/api/plans
104 | # docs:
105 | # show: true
106 | # columns:
107 | # - name: plan_id
108 | # description: Plan ID
109 | # meta:
110 | # dimension:
111 | # label: Plan ID
112 | # measures:
113 | # plans:
114 | # label: Plans
115 | # type: count
116 | # - name: name
117 | # description: Name
118 | # meta:
119 | # dimension:
120 | # label: Name
121 | # measures:
122 | # customers:
123 | # label: Customers
124 | # type: count
125 | # - name: created_on
126 | # description: Plan created on
127 | # meta:
128 | # dimension:
129 | # label: Created On
130 | # timeframes: [week, month, year]
--------------------------------------------------------------------------------
/backend/pints/yaml2sql.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | from logger import logger
3 |
4 | def getYaml(s):
5 | return yaml.safe_load(s)
6 |
7 | def getAlias(column, dimOrMeas):
8 | a = column.get('meta', {}).get(dimOrMeas, {'label': column['name']})['label']
9 | print('alias:', a)
10 | return a
11 |
12 | def getMeasAlias(measure, name):
13 | return measure.get('label', name)
14 |
15 | def getDimSql(model, selected, column):
16 | logger.info(f'column: {column}')
17 | timeframe = column.get('meta', {}).get('dimension', {}).get('timeframe', False)
18 | colFormat = column.get('meta', {}).get('dimension', {}).get('format', False)
19 | if timeframe in ['year', 'month', 'day']:
20 | addSql = f"date_trunc('{timeframe}', {model['name']}.{column['name']})::date::text"
21 | elif timeframe:
22 | addSql = f"date_trunc('{timeframe}', {model['name']}.{column['name']})"
23 | else:
24 | addSql = f"{model['name']}.{column['name']}"
25 | if len(selected) > 0:
26 | addSql = f'\n,{addSql}'
27 | # return addSql
28 | return {
29 | 'sql': addSql,
30 | 'format': colFormat,
31 | 'column': column,
32 | 'dimOrMeas': 'dimension'
33 | }
34 |
35 | def getMeasSql(model, selected, measure, name, column):
36 | alias = getMeasAlias(measure, name)
37 | agg = measure.get('type', False)
38 | colFormat = measure.get('format', False)
39 | if agg:
40 | addSql = f'''{agg}({model['name']}.{column['name']}) as "{alias}"'''
41 | if len(selected) > 0:
42 | addSql = f'\n,{addSql}'
43 | sqlAgg = measure.get('sql', False)
44 | # TODO: implement custom sql aggs
45 | if sqlAgg:
46 | addSql = f'''{sqlAgg} as "{alias}"'''
47 | if len(selected) > 0:
48 | addSql = f'\n,{addSql}'
49 | # import pdb; pdb.set_trace()
50 | return {
51 | 'sql': addSql,
52 | 'format': colFormat,
53 | 'alias': alias,
54 | 'column': name,
55 | 'dimOrMeas': 'measure'
56 | }
57 |
58 | def dbt2Sql(d, schema):
59 | sql = 'select\n'
60 | selected = []
61 | tables = []
62 | tableNames = []
63 | groupBy = '\ngroup by\n'
64 | orderBy = '\norder by\n'
65 | hasOrder = False
66 | hasGroup = False
67 | for model in d['models']:
68 | for column in model.get('columns', []):
69 | dimension = column.get('meta', {}).get('dimension', {})
70 | if dimension.get('selected', False):
71 | alias = getAlias(column, 'dimension')
72 | dim = getDimSql(model, selected, column)
73 | dim['alias'] = alias
74 | addSql = dim['sql']
75 | groupBy+=addSql
76 | hasGroup = True
77 | if dimension.get('order_by', False):
78 | hasOrder = True
79 | orderBy+= f"{addSql} {dimension.get('order_by', 'asc')}"
80 | addSql = f'{addSql} as "{alias}"'
81 | sql+=addSql
82 | selected.append(dim)
83 | if model['name'] not in tableNames:
84 | tables.append(model)
85 | tableNames.append(model['name'])
86 | for model in d['models']:
87 | for column in model.get('columns', []):
88 | measures = column.get('meta', {}).get('measures', {})
89 | for key, measure in measures.items():
90 | if measure.get('selected', False):
91 | meas = getMeasSql(model, selected, measure, key, column)
92 | addSql = meas['sql']
93 | selected.append(meas)
94 | sql+=addSql
95 | if model['name'] not in tableNames:
96 | tables.append(model)
97 | tableNames.append(model['name'])
98 | sql+= '\nfrom\n'
99 | join = False
100 | for idx, table in enumerate(tables):
101 | logger.info(f'table: {table}')
102 | sql+= f"{schema}.{table['name']} as {table['name']}"
103 | if join:
104 | sql+= f" on {join}"
105 | join = False
106 | if idx < len(tables)-1:
107 | defaultJoin = [{'sql_on': '1=1'}]
108 | join = table['meta'].get('joins', defaultJoin)[0]['sql_on']
109 | # TODO: support multiple joins
110 | sql+= f" left join \n"
111 | logger.info(f'adding join: {join}')
112 |
113 | if hasGroup:
114 | sql+= groupBy
115 | if hasOrder:
116 | sql+= orderBy
117 | return {
118 | 'sql': sql,
119 | 'selected': selected
120 | }
121 |
--------------------------------------------------------------------------------
/backend/dbt/models/stripe/mrr_facts.sql:
--------------------------------------------------------------------------------
1 |
2 | with annuals as (
3 | select
4 | generate_series(
5 | i.created_on,
6 | (
7 | select last_day_of_month --needs to be capped at a year
8 | from public.d_date as d
9 | where d.date_actual = (
10 | least(
11 | (i.created_on::timestamp + interval '11 months')::date,
12 | s.canceled_dt::date,
13 | current_timestamp::date
14 | )
15 | )
16 | ),
17 | '1 month'::interval
18 | ) as monthly,
19 | i.invoice_id
20 | from {{ref('invoice_facts')}} as i left join
21 | {{ref('subscriptions')}} as s on i.subscription_id = s.subscription_id
22 | where i.plan_interval = 'year'
23 | ), annuals2 as (
24 | select
25 | i.*,
26 | a.monthly as mrr_dt
27 | from
28 | {{ref('invoice_facts')}} as i inner join
29 | annuals as a on i.invoice_id = a.invoice_id
30 | ), mrr as (
31 | select
32 | mrr.customer_id ,
33 | mrr.invoice_id ,
34 | mrr.subscription_id ,
35 | mrr.mrr_dt,
36 | mrr.created_on ,
37 | mrr.total ,
38 | mrr.amount_due ,
39 | mrr.amount_off ,
40 | mrr.percent_off_precise ,
41 | mrr.coupon_duration ,
42 | mrr.plan_interval ,
43 | mrr.plan_id ,
44 | mrr.base_mrr,
45 | s.canceled_dt,
46 | -- TODO: flat amount discounts, I have someone using these now s
47 | case
48 | when coupon_duration = 'forever' then
49 | (base_mrr * (1 - percent_off_precise / 100))
50 | else base_mrr
51 | end as discounted_mrr,
52 | case
53 | when percent_off_precise is not null then
54 | (base_mrr * (1 - percent_off_precise / 100))
55 | else base_mrr
56 | end as fully_discounted_mrr
57 | from
58 | (
59 | select *, i.created_on as mrr_dt
60 | from {{ref('invoice_facts')}} as i
61 | where plan_interval != 'year'
62 | union all
63 | select *
64 | from annuals2
65 | ) as mrr left join
66 | {{ref('subscriptions')}} as s on mrr.subscription_id = s.subscription_id
67 | ), max_mrr as ( --get the most recent mrr record not in the current month
68 | select
69 | customer_id,
70 | max(mrr_dt) as mrr_dt
71 | from mrr as mrr
72 | where canceled_dt is null
73 | group by customer_id
74 | ), current_mrr as (
75 | select
76 | mrr.customer_id ,
77 | invoice_id ,
78 | subscription_id ,
79 | current_timestamp as mrr_dt,
80 | current_timestamp as created_on,
81 | total ,
82 | amount_due ,
83 | amount_off ,
84 | percent_off_precise ,
85 | coupon_duration ,
86 | plan_interval ,
87 | plan_id ,
88 | base_mrr,
89 | canceled_dt,
90 | discounted_mrr,
91 | fully_discounted_mrr
92 | from
93 | mrr as mrr inner join
94 | max_mrr as max_mrr on mrr.customer_id = max_mrr.customer_id and mrr.mrr_dt = max_mrr.mrr_dt
95 | where date_trunc('month', mrr.mrr_dt) = date_trunc('month', current_date - interval '1 month')
96 | ), mrr2 as (
97 | select
98 | *
99 | from (
100 | select *, 'sent' as invoice_status from mrr
101 | union all
102 | select *, 'pending' as invoice_status from current_mrr
103 | ) as mrr
104 | ), mrr3 as (
105 | select
106 | ce.email,
107 | c.created_on as customer_created_on,
108 | mrr.*,
109 | case
110 | when date_trunc('month', mrr.canceled_dt) = date_trunc('month', mrr.mrr_dt) then 0
111 | else discounted_mrr end as mrr
112 | from mrr2 as mrr left join
113 | {{ref('customers')}} as c on mrr.customer_id = c.customer_id left join
114 | {{ref('customer_emails')}} as ce on mrr.customer_id = ce.customer_id
115 | ), mrr4 as (
116 | select mrr.*,
117 | date_trunc('month', mrr.mrr_dt) as mrr_month_dt,
118 | prev_mrr.mrr as prev_mrr,
119 | case
120 | when prev_mrr.mrr is null then 'new'
121 | when mrr.mrr > prev_mrr.mrr then 'expansion'
122 | when mrr.mrr < prev_mrr.mrr then 'churn'
123 | when date_trunc('month', mrr.canceled_dt) = date_trunc('month', mrr.mrr_dt) then 'churn'
124 | else 'same' end as mrr_status,
125 | case
126 | when prev_mrr.mrr is null then 0
127 | when date_trunc('month', mrr.canceled_dt) = date_trunc('month', mrr.mrr_dt) then mrr.mrr
128 | when mrr.mrr < prev_mrr.mrr then prev_mrr.mrr - mrr.mrr
129 | else 0 end as churned_mrr,
130 | case
131 | when prev_mrr.mrr is null then 0
132 | when mrr.mrr > prev_mrr.mrr then mrr.mrr - prev_mrr.mrr
133 | else 0 end as expansion_mrr,
134 | case
135 | when prev_mrr.mrr is null then mrr.mrr
136 | else 0 end as new_mrr,
137 | prev_mrr.mrr_dt as prev_mrr_dt
138 | from mrr3 as mrr left join
139 | mrr3 as prev_mrr on mrr.customer_id = prev_mrr.customer_id and
140 | date_trunc('month', mrr.mrr_dt) = date_trunc('month', prev_mrr.mrr_dt + interval '1 month')
141 | )
142 |
143 | select
144 | mrr.*,
145 | date_trunc('month', mrr.customer_created_on)::date::text as vintage,
146 | (extract(year from age(mrr.mrr_month_dt, date_trunc('month', mrr.customer_created_on))) * 12 +
147 | extract(month from age(mrr.mrr_month_dt, date_trunc('month', mrr.customer_created_on)))) as vintage_age,
148 | case
149 | when mrr.mrr_month_dt = max_month.mrr_month_dt then 1
150 | else 0 end as current_month,
151 | cume_dist() OVER (PARTITION BY mrr.mrr_month_dt ORDER BY mrr.mrr) as mrr_percentile,
152 | ntile(10) OVER (PARTITION BY mrr.mrr_month_dt ORDER BY mrr.mrr) as mrr_rank
153 | from mrr4 as mrr left join
154 | (select max(mrr4.mrr_month_dt) as mrr_month_dt from mrr4) as max_month on mrr.mrr_month_dt = max_month.mrr_month_dt
--------------------------------------------------------------------------------
/frontend/src/components/PaperSelect.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 | {{label}}
11 |
12 |
13 |
14 |
17 |
18 | {{ selectedd.name || selectedd.label }}
19 |
20 |
23 |
36 |
37 |
38 |
39 |
40 |
45 |
49 |
53 |
59 |
64 |
69 | {{ option.name || option.label }}
70 |
71 |
77 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
--------------------------------------------------------------------------------
/backend/pints/stripe.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | pd.set_option("display.max_rows", 1000)
3 | pd.set_option("display.max_columns", 200)
4 | pd.set_option("display.max_colwidth", 2000)
5 | import datetime
6 | import os
7 | import sys
8 | import time
9 | import requests
10 | import json
11 | import io
12 | import uuid
13 | import concurrent.futures
14 | import pdb
15 | import collections
16 | import pints
17 | import stripe
18 | from logger import logger
19 |
20 | longrun = concurrent.futures.ThreadPoolExecutor(max_workers=None)
21 |
22 | PAPER_STRIPE_LITE = int(os.environ.get('PAPER_STRIPE_LITE', 0)) > 0
23 |
24 | objs = {
25 | 'coupons': {
26 | 'api': stripe.Coupon,
27 | 'expand': []
28 | },
29 | 'customers': {
30 | 'api': stripe.Customer,
31 | 'expand': []
32 | },
33 | 'subscriptions': {
34 | 'api': stripe.Subscription,
35 | 'expand': [],
36 | 'all': True,
37 | },
38 | 'plans': {
39 | 'api': stripe.Plan,
40 | 'expand': []
41 | },
42 | 'events': {
43 | 'api': stripe.Event,
44 | 'expand': [],
45 | 'types': [
46 | 'customer.subscription.deleted',
47 | 'customer.subscription.created',
48 | 'invoice.payment_action_required',
49 | 'invoice.voided',
50 | 'invoice.payment_failed'
51 | ]
52 | },
53 | 'invoices': {
54 | 'api': stripe.Invoice,
55 | 'expand': ['data.discounts']
56 | }
57 | }
58 |
59 | if PAPER_STRIPE_LITE:
60 | del objs['subscriptions']
61 |
62 | def testKey(apiKey):
63 | try:
64 | stripe.Customer.list(limit=1, api_key=apiKey)
65 | return {'ok': True}
66 | except Exception as e:
67 | return {'ok': False, 'error': str(e)}
68 |
69 | def getAll(engine, teamId):
70 | apiKey = pints.postgres.getStripeApiKey(engine, teamId)
71 | if not apiKey:
72 | return False
73 | jobUuids = []
74 | for key, obj in objs.items():
75 | logger.info(f'getAll: {key}')
76 | if key == 'events':
77 | for t in obj['types']:
78 | logger.info(f'event: {t}')
79 | jobUuid = uuid.uuid4().hex
80 | jobUuids.append(jobUuid)
81 | longrun.submit(getObject, engine, teamId, jobUuid, key, eventType=t)
82 | else:
83 | jobUuid = uuid.uuid4().hex
84 | jobUuids.append(jobUuid)
85 | longrun.submit(getObject, engine, teamId, jobUuid, key)
86 | return jobUuids
87 |
88 | def clearAll(engine, teamId):
89 | for key, obj in objs.items():
90 | table = f'stripe_{key}'
91 | logger.info(f'clearAll: {key} {obj} {table}')
92 | pints.postgres.deleteRows(engine, table, teamId)
93 | return True
94 |
95 | def getObject(engine, teamId, jobUuid, obj, eventType=None):
96 | logger.info(f'getObject: {obj}')
97 | job = {
98 | 'type': 'stripe',
99 | 'operation': 'getObject',
100 | 'obj': obj,
101 | 'status': 'running'
102 | }
103 | jobId = pints.postgres.updateJob(engine, teamId, False, jobUuid, job)
104 | ls = []
105 | try:
106 | apiKey = pints.postgres.getStripeApiKey(engine, teamId)
107 | table = f'stripe_{obj}'
108 | getAll = objs[obj].get('all', False)
109 | if getAll:
110 | mr = False
111 | else:
112 | mr = pints.postgres.getMaxRecord(engine, table, teamId)
113 | logger.info(f'{obj} mr: {mr}')
114 | if eventType:
115 | logger.info(f'{obj} with mr: {mr}')
116 | temp = objs[obj]['api'].list(limit=100, api_key=apiKey, created={'gt': mr}, type=eventType)
117 | elif mr:
118 | logger.info(f'{obj} with mr: {mr}')
119 | temp = objs[obj]['api'].list(limit=100, api_key=apiKey, created={'gt': mr}, expand=objs[obj]['expand'])
120 | elif getAll:
121 | temp = objs[obj]['api'].list(limit=100, status='all', api_key=apiKey, expand=objs[obj]['expand'])
122 | else:
123 | logger.info(f'{obj} without mr: {mr}')
124 | temp = objs[obj]['api'].list(limit=100, api_key=apiKey, expand=objs[obj]['expand'])
125 | for t in temp.auto_paging_iter():
126 | ls.append(t)
127 | logger.info(f'done getting {obj}, got {len(ls)} after {mr}')
128 | if getAll:
129 | pints.postgres.deleteRows(engine, table, teamId)
130 | logger.info(f'done deleteRows for {obj} ({len(ls)} rows)')
131 | pints.postgres.insertManyRows(engine, table, ls, teamId)
132 | logger.info(f'done inserting rows for {obj} ({len(ls)} rows)')
133 | except Exception as e:
134 | logger.error(f'getObject error {str(e)}')
135 | job['status'] = 'error'
136 | job['error'] = str(e)
137 | jobId = pints.postgres.updateJob(engine, teamId, jobId, jobUuid, job)
138 | return job
139 | job['status'] = 'complete'
140 | job['rows'] = len(ls)
141 | jobId = pints.postgres.updateJob(engine, teamId, jobId, jobUuid, job)
142 | checkDbtRun(engine, teamId)
143 | return ls
144 |
145 | def checkDbtRun(engine, teamId):
146 | logger.info(f'checkDbtRun {teamId}')
147 | running = pints.postgres.getJobSummary(engine, teamId)
148 | running = running[(running.status == 'running') & (running.type == 'stripe')]
149 | logger.info(f'checkDbtRun, {len(running)} still running...')
150 | if len(running) == 0:
151 | try:
152 | logger.info(f'running dbt...')
153 | pints.modeling.runDbt(teamId)
154 | except Exception as e:
155 | logger.error(f'checkDbtRun error {str(e)}')
156 | else:
157 | logger.info(f'not running dbt yet...')
--------------------------------------------------------------------------------
/backend/metrics/metrics.py:
--------------------------------------------------------------------------------
1 | import json
2 | import uuid
3 | import altair
4 | import altair_saver
5 | import datetime
6 | import pandas as pd
7 | import pints
8 | from logger import logger
9 |
10 | from selenium import webdriver
11 | chrome_options = webdriver.ChromeOptions()
12 | chrome_options.add_argument('--no-sandbox')
13 | chrome_options.add_argument("--disable-setuid-sandbox")
14 | # chrome_options.add_argument('--window-size=1420,1080')
15 | chrome_options.add_argument('--headless')
16 | chrome_options.add_argument("--remote-debugging-port=9222")
17 | chrome_options.add_argument('--disable-infobars')
18 | chrome_options.add_argument('--disable-gpu')
19 | chrome_options.add_argument("--disable-dev-shm-usage")
20 | chrome_options.add_argument("--disable-extensions")
21 | # options.binary_location = '/opt/google/chrome/google-chrome'
22 | service_log_path = "chromedriver.log".format('app')
23 | service_args = ['--verbose']
24 | driver = webdriver.Chrome('/chromedriver/chromedriver',
25 | chrome_options=chrome_options,
26 | service_args=service_args,
27 | service_log_path=service_log_path)
28 |
29 | vegaConfig = {
30 | "view": {
31 | "stroke": "transparent"
32 | },
33 | "axis": {
34 | "grid": False
35 | }
36 | }
37 |
38 | vegaSpec = {
39 | "$schema": "https://vega.github.io/schema/vega-lite/v5.json",
40 | "config": vegaConfig,
41 | "description": "MRR by Month",
42 | "mark": {
43 | "type": "line",
44 | "tooltip": False,
45 | "fill": None,
46 | "stroke": "#010101",
47 | "point": {"color": "#010101"},
48 | },
49 | "encoding": {
50 | "x": {
51 | "field": "mrr_month_dt",
52 | "timeUnit": "yearmonth",
53 | "type": "temporal",
54 | "title": None,
55 | # "axis": {
56 | # "values": [
57 | # {"year": 2019, "month": "may", "date": 1},
58 | # {"year": 2021, "month": "may", "date": 1}
59 | # ]
60 | # }
61 | },
62 | "y": {
63 | "field": "mrr",
64 | "aggregate": "sum",
65 | "type": "quantitative",
66 | "title": None
67 | },
68 | },
69 | "data": {"values": []},
70 | }
71 |
72 | def getMrrFacts(engine, teamId):
73 | logger.info(f'getMrrFacts... {teamId}')
74 | sql = '''
75 | select
76 | *,
77 | 1 as active
78 | from
79 | "team_{teamId}_stripe".mrr_facts as mrr
80 | order by mrr.mrr_dt asc
81 | '''.format(teamId=teamId)
82 | df = pd.read_sql(sql, engine)
83 | piv = df.pivot_table(index='mrr_month_dt', values=['mrr', 'active', 'churned_mrr'], aggfunc='sum')
84 | return {
85 | 'df': df,
86 | 'summary': piv,
87 | }
88 | # df = json.loads(df.to_json(orient='records'))
89 |
90 | def getMrrChart(d):
91 | chartId = uuid.uuid4().hex
92 | # spec = json.loads(specs['mrr'])
93 | vegaSpec['data']['values'] = d
94 | chart = altair.Chart.from_dict(vegaSpec)
95 | filename = f'./static/{chartId}.png'
96 | altair_saver.save(chart, filename, method='selenium', webdriver=driver)
97 | return {'ok': True, 'chartId': chartId, 'filename': filename}
98 |
99 | def getCustomerChart(d):
100 | chartId = uuid.uuid4().hex
101 | vegaSpec['data']['values'] = d
102 | vegaSpec['description'] = 'Customers by Month'
103 | vegaSpec['encoding']['y']['field'] = 'active'
104 | chart = altair.Chart.from_dict(vegaSpec)
105 | filename = f'./static/{chartId}.png'
106 | altair_saver.save(chart, filename, method='selenium', webdriver=driver)
107 | return {'ok': True, 'chartId': chartId, 'filename': filename}
108 |
109 | def getSummary(last3):
110 | logger.info(f'getSummary... {last3}')
111 | summary = {}
112 | dt = datetime.datetime.utcnow()
113 | curr = last3[2]
114 | prev = last3[1]
115 | summary['growthGoal'] = .2
116 | summary['prcntThruMonth'] = dt.day / 30
117 | summary['growthGoalNow'] = summary['growthGoal'] * summary['prcntThruMonth']
118 |
119 | # mrr
120 | summary['currentMrr'] = curr['mrr']
121 | summary['previousMrr'] = prev['mrr']
122 | summary['mrrGrowth'] = summary['currentMrr'] - summary['previousMrr']
123 | summary['mrrGrowthPrcnt'] = summary['mrrGrowth'] / summary['previousMrr']
124 | summary['mrrArrow'] = 'arrow_up'
125 | if summary['mrrGrowth'] < 0:
126 | summary['mrrArrow'] = 'arrow_down'
127 | summary['growthGoalProgress'] = "You're ahead of your goal!"
128 | if summary['mrrGrowthPrcnt'] < summary['growthGoalNow']:
129 | summary['growthGoalProgress'] = "You're behind your goal, but lets catch up!"
130 | summary['currentMrrK'] = round(summary['currentMrr'] / 1000, 1)
131 | summary['mrrGrowthK'] = round(summary['mrrGrowth'] / 1000, 1)
132 | summary['mrrGrowthPrcntRounded'] = round(summary['mrrGrowthPrcnt'] * 100, 1)
133 |
134 | summary['mrrMsg'] = f"{summary['growthGoalProgress']} MRR is currently "
135 | summary['mrrMsg'] += f"*${summary['currentMrrK']}k* :tada: \n:{summary['mrrArrow']}: "
136 | summary['mrrMsg'] += f"{summary['mrrGrowthPrcntRounded']}% (${summary['mrrGrowthK']}k) MTD."
137 |
138 | # customers
139 | summary['currentCustomers'] = curr['active']
140 | summary['previousCustomers'] = prev['active']
141 | summary['customerGrowth'] = summary['currentCustomers'] - summary['previousCustomers']
142 | summary['customerGrowthPrcnt'] = summary['customerGrowth'] / summary['previousCustomers']
143 | summary['customersArrow'] = 'arrow_up'
144 | if summary['customerGrowth'] < 0:
145 | summary['customersArrow'] = 'arrow_down'
146 | summary['customerGrowthPrcntRounded'] = round(summary['customerGrowthPrcnt'] * 100, 1)
147 | # \n\n account for xx% of your MRR"
148 |
149 | summary['customerMsg'] = f"You currently have {summary['currentCustomers']} customers :tada: \n"
150 | summary['customerMsg'] += f":{summary['customersArrow']}: "
151 | summary['customerMsg'] += f"{summary['customerGrowthPrcntRounded']}% ({summary['customerGrowth']}) MTD."
152 | return summary
153 |
154 | def getSlackMsg(engine, teamId):
155 | sql = '''
156 | select
157 | *,
158 | 1 as active
159 | from
160 | "team_{teamId}_stripe".mrr_facts as mrr
161 | order by mrr.mrr_dt asc
162 | '''.format(teamId=teamId)
163 | df = pd.read_sql(sql, engine)
164 | piv = df.pivot_table(index='mrr_month_dt', values=['mrr', 'active', 'churned_mrr'], aggfunc='sum')
165 | df = json.loads(df.to_json(orient='records'))
166 | summary = piv.tail(3).to_dict(orient='records')
167 | logger.info(f"piv summary: {summary}")
168 | toSlack = {}
169 | chart = getMrrChart(df)
170 | toSlack['mrrChartUrl'] = pints.cabinet.file(chart['filename'])
171 | chart = getCustomerChart(df)
172 | toSlack['customerChartUrl'] = pints.cabinet.file(chart['filename'])
173 | try:
174 | toSlack['summary'] = getSummary(summary)
175 | except Exception as e:
176 | logger.error(f"error getting summary: {str(e)}")
177 | toSlack['summary'] = False
178 | logger.info(f"piv summary: {toSlack}")
179 | return toSlack
--------------------------------------------------------------------------------
/backend/pints/sheets.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import string
4 | import datetime
5 | import pandas as pd
6 | import time
7 | # from googleapiclient import discovery
8 | # from googleapiclient.errors import HttpError
9 | from googleapiclient.discovery import build
10 |
11 | from google.oauth2 import service_account
12 | from logger import logger
13 |
14 | j = os.environ.get('PAPER_GOOGLE_SERVICE_ACCOUNT_CREDENTIALS')
15 | if j:
16 | service_account_info = json.loads(j)
17 | service_account_info['private_key'] = service_account_info['private_key'].replace('\\n', '\n')
18 | # service_account_info = j
19 | credentials = service_account.Credentials.from_service_account_info(
20 | service_account_info)
21 |
22 | LIMIT_ERROR = 'action would increase the number of cells in the workbook above the limit'
23 | FULL_LIMIT_ERROR = 'This action would increase the number of cells in the workbook above the limit of 5000000 cells.'
24 |
25 | def hasNumbers(inputString):
26 | return any(char.isdigit() for char in inputString)
27 |
28 | alpha1 = dict(enumerate(string.ascii_lowercase, 1))
29 | alpha2 = {}
30 | i = 27
31 | for v in alpha1.values():
32 | for v2 in alpha1.values():
33 | alpha2[i] = v+v2
34 | i+=1
35 | alphabet = alpha1.copy()
36 | alphabet.update(alpha2)
37 | alphabetRev = {}
38 | for k in alphabet.keys():
39 | alphabetRev[alphabet[k]] = k
40 |
41 | def test(spreadsheetId):
42 | d = {'col1': [1, 2], 'col2': [3, 4]}
43 | df = pd.DataFrame(data=d)
44 | d2 = {
45 | 'df': df,
46 | 'spreadsheetId': spreadsheetId,
47 | 'startCell': 'B2',
48 | 'sheet': 'test'
49 | }
50 | push(d2)
51 | return True
52 |
53 | def pushAll(data, ssId):
54 | sheetNames = ['pulse_mrr', 'pulse_churn', 'pulse_customers']
55 | d = {'col1': [1, 2], 'col2': [3, 4]}
56 | df = pd.DataFrame(data=d)
57 | for d in data:
58 | d2 = {
59 | 'df': d['df'],
60 | 'spreadsheetId': data['spreadsheetId'],
61 | 'startCell': 'A1',
62 | 'sheet': d['sheet']
63 | }
64 | push(d2)
65 | return True
66 |
67 | def push(d):
68 | logger.info(f"sheets push...")
69 | df = d['df']
70 | totalRowsReturned = len(df)
71 | logger.info(f"sheets push rows: {totalRowsReturned}")
72 | df_dtypes = df.dtypes.to_dict()
73 | discoveryUrl = 'https://sheets.googleapis.com/$discovery/rest?version=v4'
74 | service = build('sheets', 'v4', credentials=credentials, discoveryServiceUrl=discoveryUrl)
75 | # k = 'AIzaSyCvXb3IvQ4hk1CT-i447TA8NqzHtfM3W-Q'
76 | # service = build('sheets', 'v4', developerKey=k, discoveryServiceUrl=discoveryUrl)
77 | sheetName = d['sheet']
78 | spreadsheetId = d['spreadsheetId']
79 | startCell = d.get('startCell', 'A1')
80 | if len(startCell) < 2:
81 | startCell = 'A1'
82 | startCol = 'A'
83 | startRow = '1'
84 | elif hasNumbers(startCell[:2]):
85 | startCol = startCell[:1]
86 | startRow = startCell[1:]
87 | else:
88 | startCol = startCell[:2]
89 | startRow = startCell[2:]
90 | startColStr = startCol.upper()
91 | startCol = alphabetRev.get(startCol.lower(), 1)
92 | try:
93 | startRowNum = int(float(startRow))
94 | except:
95 | startRowNum = 1
96 | try:
97 | startColNum = int(float(startCol))
98 | except:
99 | startColNum = 1
100 | endRow = len(df.values) + startRowNum
101 | endCol = len(df.columns.tolist())+(startCol-1)
102 | if endCol == 0:
103 | return {'ok': False, 'reason': 'No columns', 'error': 'No columns'}
104 | endCol = alphabet[endCol].upper()
105 | endCell = endCol + str(endRow)
106 | rangeName = sheetName + '!' + startCell + ':' + endCell
107 | print('writing DF to sheets', rangeName, startCol, endCol, flush = True)
108 | clearRangeName = sheetName + '!' + startCell + ':' + endCol
109 | clearSheetRangeName = sheetName
110 | colNames = df.columns.tolist()
111 | df = df.to_json(date_format = 'iso', orient='values',
112 | default_handler=str)
113 | df = json.loads(df, strict=False)
114 | body = {
115 | 'values': [colNames] + df
116 | }
117 | try: #try to clear the sheet, this also checks it exists
118 | result = service.spreadsheets().values().clear(
119 | spreadsheetId=spreadsheetId, range=clearRangeName, body={}).execute()
120 | print('cleared cols...', flush=True)
121 | except Exception as e: #if couldnt clear, check if it's bc the sheet doesnt exist and add it
122 | print('googleapiclient.errors ', e, flush=True)
123 | e = str(e)
124 | if 'Unable to parse range' in e:
125 | newSheet = {
126 | "requests": {
127 | "addSheet": {
128 | "properties": {
129 | "title": sheetName,
130 | "gridProperties": {
131 | "rowCount": 1000,
132 | "columnCount": 100
133 | },
134 | "tabColor": {
135 | "red": 0.0,
136 | "green": 0.0,
137 | "blue": 0.0
138 | }
139 | }
140 | }
141 | }
142 | }
143 | try:
144 | result = service.spreadsheets().batchUpdate(spreadsheetId=spreadsheetId, body=newSheet).execute()
145 | print('add sheet result ', result, flush=True)
146 | except Exception as e: #if couldnt add sheet, check if it's bc 5m cell limit
147 | if LIMIT_ERROR in str(e):
148 | return {'ok': False, 'reason': FULL_LIMIT_ERROR, 'error': str(e)}
149 | else:
150 | return {'ok': False, 'reason': 'unknown', 'error': str(e)}
151 | writeResultsTry = 0
152 | result = False
153 | writeResultsToTry = 6
154 | while writeResultsTry <= writeResultsToTry:
155 | try:
156 | # result = service.spreadsheets().values().get(spreadsheetId=spreadsheetId, range=rangeName).execute()
157 | result = service.spreadsheets().values().update(
158 | spreadsheetId=spreadsheetId, range=rangeName,
159 | valueInputOption='USER_ENTERED', body=body).execute()
160 | print('write successful...', flush=True)
161 | writeResultsTry = 100
162 | except Exception as e:
163 | print('writeResultsTry #', writeResultsTry, e, flush = True)
164 | writeResultsTry+=1
165 | if LIMIT_ERROR in str(e):
166 | return {'ok': False, 'reason': FULL_LIMIT_ERROR, 'error': str(e)}
167 | if 'Invalid values' in str(e) and 'struct_value' in str(e):
168 | return {'ok': False, 'reason': 'JSON data in results', 'error': str(e)}
169 | if 'Invalid values' in str(e) and 'list_value' in str(e):
170 | return {'ok': False, 'reason': 'List / array in results', 'error': str(e)}
171 | if 'caller does not have permission' in str(e):
172 | return {'ok': False, 'reason': 'The caller does not have permission', 'error': str(e)}
173 | if writeResultsTry == writeResultsToTry:
174 | return {'ok': False, 'reason': 'maxAttempts', 'error': str(e)}
175 | # raise
176 | time.sleep(2)
177 | lastRunDt = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
178 | note = f'Pulse updated {len(df)} rows at {lastRunDt} UTC\n\nMore info at https://pulse.trypaper.io/?ref=sheets'
179 | sheet_metadata = service.spreadsheets().get(spreadsheetId=spreadsheetId).execute()
180 | sheetId = [sheet['properties']['sheetId'] for sheet in sheet_metadata['sheets'] if sheet['properties']['title'] == sheetName]
181 | if len(sheetId) == 0:
182 | return {'ok': True}
183 | body = {
184 | "requests": [
185 | {
186 | "repeatCell": {
187 | "range": {
188 | "sheetId": sheetId[0],
189 | "startRowIndex": startRowNum-1,
190 | "endRowIndex": startRowNum,
191 | "startColumnIndex": startColNum-1,
192 | "endColumnIndex": startColNum,
193 | },
194 | "cell": {"note": note},
195 | "fields": "note",
196 | }
197 | }
198 | ]
199 | }
200 | try:
201 | result = service.spreadsheets().batchUpdate(
202 | spreadsheetId=spreadsheetId, body=body).execute()
203 | print('wrote note to sheet...', flush=True)
204 | except Exception as e:
205 | print('error writing note ', e, flush=True)
206 | return {'ok': True}
--------------------------------------------------------------------------------
/frontend/public/sign-in-with-google.svg:
--------------------------------------------------------------------------------
1 |
37 |
--------------------------------------------------------------------------------
/backend/pints/scheduler.py:
--------------------------------------------------------------------------------
1 | import pints
2 | from logger import logger
3 | import datetime
4 | import random
5 | import sqlalchemy
6 | import json
7 | import pytz
8 | import uuid
9 | import os
10 | import time
11 | import pandas as pd
12 | import traceback
13 | from metrics import metrics
14 | from app import app, db
15 | import concurrent.futures
16 |
17 | longrun = concurrent.futures.ThreadPoolExecutor(max_workers=None)
18 |
19 | PAPER_DO_NOT_START_SCHEDULER = int(os.environ.get('PAPER_DO_NOT_START_SCHEDULER', 0)) > 0
20 |
21 |
22 | def startScheduler():
23 | logger.info(f'startScheduler? {(not PAPER_DO_NOT_START_SCHEDULER)}')
24 | if PAPER_DO_NOT_START_SCHEDULER:
25 | # runWeekly()
26 | logger.info(f'not starting scheduler...')
27 | return False
28 | logger.info(f'starting scheduler...')
29 | pints.postgres.deleteIncompleteJobs(db.engine)
30 | longrun.submit(runScheduler)
31 |
32 | def runScheduler():
33 | runningScheduler = True
34 | while runningScheduler:
35 | try:
36 | checkForScheduledJobs()
37 | except Exception as e:
38 | logger.error(f'error in checkForScheduledJobs: {str(e)}')
39 | logger.error(f'error in checkForScheduledJobs traceback: {traceback.format_exc()}')
40 | try:
41 | checkQueue()
42 | except Exception as e:
43 | logger.error(f'error in checkQueue: {str(e)}')
44 | logger.error(f'error in checkQueue traceback: {traceback.format_exc()}')
45 |
46 | time.sleep(60)
47 | return True
48 |
49 | def runHourlyTeam(team):
50 | logger.info(f'runHourlyTeam...')
51 | with app.app_context():
52 | logger.info(f"team {team['id']}...")
53 | settings = pints.postgres.getSettings(db.engine, team['id'])
54 | jobUuids = fullRefresh(db.engine, team['id'])
55 | if not jobUuids:
56 | return False
57 | slackOn = settings['notifications'].get('alerts', {}).get('slack', False)
58 | slackChannel = settings['notifications'].get('slackChannel', False)
59 | if slackOn and slackChannel:
60 | dt = datetime.datetime.utcnow().isoformat().replace('T', ' ')
61 | details = {
62 | 'status': 'pending',
63 | 'dependencies': jobUuids,
64 | 'type': 'sendNotifications',
65 | 'maxCreatedOn': dt,
66 | 'maxCanceledOn': dt,
67 | }
68 | jobUuid = uuid.uuid4().hex
69 | targetId = pints.postgres.addJob(db.engine, team['id'], details, jobUuid)
70 | messageId = pints.postgres.addMessage(db.engine, team['id'], targetId, details, jobUuid)
71 | logger.info(f'runHourly slack messageId... {messageId}')
72 | if settings.get('sheets', {}).get('spreadsheetId', False):
73 | spreadsheetId = settings.get('sheets', {}).get('spreadsheetId', False)
74 | dt = datetime.datetime.utcnow().isoformat().replace('T', ' ')
75 | details = {
76 | 'status': 'pending',
77 | 'dependencies': jobUuids,
78 | 'type': 'updateSheets',
79 | 'maxCreatedOn': dt,
80 | 'maxCanceledOn': dt,
81 | }
82 | logger.info(f'runHourly sheet... {spreadsheetId}')
83 | jobUuid = uuid.uuid4().hex
84 | targetId = pints.postgres.addJob(db.engine, team['id'], details, jobUuid)
85 | messageId = pints.postgres.addMessage(db.engine, team['id'], targetId, details, jobUuid)
86 | logger.info(f'runHourly sheet messageId... {messageId}')
87 | return True
88 |
89 | def runWeekly():
90 | teams = pints.postgres.getTeams(db.engine)
91 | for team in teams:
92 | runWeeklyTeam(db.engine, team['id'])
93 |
94 | def runWeeklyTeam(engine, teamId):
95 | logger.info(f'runWeeklyTeam... {teamId}')
96 | settings = pints.postgres.getSettings(engine, teamId)
97 | logger.info(f'runWeeklyTeam settings... {settings}')
98 | if settings['notifications'].get('weekly', {}).get('slack', False):
99 | toSlack = metrics.getSlackMsg(engine, teamId)
100 | slackInfo = pints.postgres.getSlackInfo(engine, teamId)
101 | pints.slack.weekly(toSlack, slackInfo['bot_token'])
102 | return True
103 |
104 | def runMonthly(engine):
105 | return True
106 |
107 | def fullRefresh(engine, teamId):
108 | logger.info(f'fullRefresh...')
109 | jobUuids = pints.stripe.getAll(engine, teamId)
110 | logger.info(f'fullRefresh jobUuids: {jobUuids}')
111 | return jobUuids
112 |
113 | def do_some_work(jobRow):
114 | logger.info(f'do_some_work... {jobRow}')
115 | if random.choice([True, False]):
116 | logger.info(f'do_some_work failed... {jobRow}')
117 | raise Exception
118 | else:
119 | logger.info(f'do_some_work SUCCESS... {jobRow}')
120 |
121 | def testSched():
122 | logger.info(f'testSched...')
123 | return True
124 |
125 | def checkForScheduledJobs():
126 | logger.info(f'checkForScheduledJobs...')
127 | teams = pints.postgres.getTeams(db.engine)
128 | totalRan = 0
129 | for team in teams:
130 | runHourlyNow = False
131 | runWeeklyNow = False
132 | if pd.isnull(team['last_hourly']):
133 | runHourlyNow = True
134 | else:
135 | dt = datetime.datetime.utcnow()
136 | diff = (dt - team['last_hourly'])
137 | totalSeconds = diff.total_seconds()
138 | minutes = totalSeconds / 60
139 | logger.info(f'checkForScheduledJobs minutes: {minutes}')
140 | if minutes > 60:
141 | runHourlyNow = True
142 | if pd.isnull(team['last_weekly']):
143 | runWeeklyNow = True
144 | else:
145 | dt = datetime.datetime.utcnow()
146 | diff = (dt - team['last_weekly'])
147 | totalSeconds = diff.total_seconds()
148 | minutes = totalSeconds / 60
149 | minutesInWeek = 7*24*60
150 | if minutes > minutesInWeek:
151 | runWeeklyNow = True
152 | logger.info(f"checkForScheduledJobs (now is {dt}): {team['id']}, {team['last_hourly']}, {runHourlyNow}")
153 | if runHourlyNow:
154 | totalRan+=1
155 | logger.info(f'runHourlyNow...')
156 | details = {
157 | 'type': 'hourly',
158 | 'status': 'complete',
159 | }
160 | jobUuid = uuid.uuid4().hex
161 | pints.postgres.addJob(db.engine, team['id'], details, jobUuid)
162 | runHourlyTeam(team)
163 | if runWeeklyNow:
164 | totalRan+=1
165 | logger.info(f'runWeeklyNow...')
166 | details = {
167 | 'type': 'weekly',
168 | 'status': 'complete',
169 | }
170 | jobUuid = uuid.uuid4().hex
171 | pints.postgres.addJob(db.engine, team['id'], details, jobUuid)
172 | runWeeklyTeam(db.engine, team['id'])
173 | logger.info(f'done checkForScheduledJobs, totalRan: {totalRan}')
174 | return True
175 |
176 | def checkQueue():
177 | logger.info(f'checkQueue...')
178 | gettingJobs = True
179 | while gettingJobs:
180 | jobRow, queueRow = pints.postgres.getMessages(db.engine)
181 | if jobRow:
182 | logger.info(f"checkQueue jobRow: {jobRow}")
183 | try:
184 | if jobRow['details']['type'] == 'sendNotifications':
185 | logger.info(f'sendNotifications...')
186 | lastJob = pints.postgres.getLastJob(db.engine, jobRow['team_id'], 'sendNotifications')
187 | if not lastJob:
188 | logger.info(f'adding first lastJob...')
189 | dt = datetime.datetime.utcnow().isoformat().replace('T', ' ')
190 | details = {
191 | 'maxCreatedOn': dt,
192 | 'maxCanceledOn': dt,
193 | 'type': 'sendNotifications',
194 | 'status': 'complete',
195 | }
196 | jobUuid = uuid.uuid4().hex
197 | pints.postgres.addJob(db.engine, jobRow['team_id'], details, jobUuid)
198 | lastJob = pints.postgres.getLastJob(db.engine, jobRow['team_id'], 'sendNotifications')
199 | logger.info(f'sendNotifications lastJob: {lastJob}')
200 | alerts = pints.postgres.getAlerts(db.engine, jobRow['team_id'], lastJob)
201 | logger.info(f'sendNotifications alerts: {alerts}')
202 | for alert in alerts:
203 | logger.info(f'sendNotifications alert: {alert}')
204 | # TODO send alert
205 | # pints.postgres.updateMessage(db.engine, alert['message_id'], {'status': 'sent'})
206 | settings = pints.postgres.getSettings(db.engine, jobRow['team_id'])
207 | d = {
208 | 'email': alert['email'],
209 | 'mrr': alert['mrr'],
210 | 'prev_mrr': alert['prev_mrr'],
211 | 'msg': f"Originally signed up on {alert['customer_created_on2']} ({alert['created_days_ago']} days to convert)",
212 | 'slackChannel': settings['notifications']['slackChannel']
213 | }
214 | slackInfo = pints.postgres.getSlackInfo(db.engine, jobRow['team_id'])
215 | if alert['alert_type'] == 'canceled':
216 | pints.slack.churnAlert(d, slackInfo['bot_token'])
217 | else:
218 | pints.slack.customerAlert(d, slackInfo['bot_token'])
219 | dt = datetime.datetime.utcnow().isoformat().replace('T', ' ')
220 | details = {
221 | 'maxCreatedOn': dt,
222 | 'maxCanceledOn': dt,
223 | }
224 | pints.postgres.updateJobStatus(db.engine, queueRow['target_id'], 'complete', None, details)
225 | if jobRow['details']['type'] == 'updateSheets':
226 | logger.info(f'updateSheets...')
227 | settings = pints.postgres.getSettings(db.engine, jobRow['team_id'])
228 | spreadsheetId = settings.get('sheets', {}).get('spreadsheetId', False)
229 | if not spreadsheetId:
230 | logger.info(f'updateSheets no spreadsheetId...')
231 | pints.postgres.updateJobStatus(db.engine, queueRow['target_id'], 'error', 'No Spreadsheet ID', None)
232 | return False
233 | mrrFacts = metrics.getMrrFacts(db.engine, jobRow['team_id'])
234 | d = {
235 | 'df': mrrFacts['df'],
236 | 'spreadsheetId': spreadsheetId,
237 | 'startCell': 'A1',
238 | 'sheet': 'pulse_mrr_facts'
239 | }
240 | pints.sheets.push(d)
241 | except Exception as e:
242 | logger.error(f'checkQueue error: {str(e)}')
243 | pints.postgres.updateJobStatus(db.engine, queueRow['target_id'], 'error', str(e), None)
244 | raise e
245 | # if we want the job to run again, insert a new item to the message queue with this job id
246 | # con.execute(sql, (queue_item['target_id'],))
247 | else:
248 | logger.info(f'no jobs to run...')
249 | gettingJobs = False
250 | return True
--------------------------------------------------------------------------------
/backend/pints/slack.py:
--------------------------------------------------------------------------------
1 | import os
2 | from slack_sdk import WebClient
3 | from slack_sdk.errors import SlackApiError
4 | import pints
5 | from logger import logger
6 |
7 | token = os.environ.get('PAPER_SLACK_TOKEN')
8 | CLIENT_ID = os.environ.get('PAPER_SLACK_CLIENT_ID')
9 | CLIENT_SECRET = os.environ.get('PAPER_SLACK_SECRET')
10 |
11 | # client = WebClient(token=token)
12 |
13 | def testPush(d, token):
14 | print('testPush...')
15 | client = WebClient(token=token)
16 | response = client.chat_postMessage(
17 | channel='#demo',
18 | blocks = [
19 | {
20 | "type": "section",
21 | "text": {
22 | "type": "plain_text",
23 | "text": d.get('msg', 'test msg...')
24 | },
25 | }
26 | ]
27 | )
28 | print('testPush result...', response["message"])
29 |
30 | def getToken(code):
31 | client = WebClient()
32 | oauth_response = client.oauth_v2_access(
33 | client_id=CLIENT_ID,
34 | client_secret=CLIENT_SECRET,
35 | redirect_uri='https://pulse.trypaper.io/slack2',
36 | code=code
37 | )
38 | installed_enterprise = {}
39 | is_enterprise_install = oauth_response.get("is_enterprise_install")
40 | if is_enterprise_install:
41 | installed_enterprise = oauth_response.get("enterprise", {})
42 | installed_team = oauth_response.get("team", {})
43 | installer = oauth_response.get("authed_user", {})
44 | incoming_webhook = oauth_response.get("incoming_webhook", {})
45 |
46 | bot_token = oauth_response.get("access_token")
47 | # NOTE: oauth.v2.access doesn't include bot_id in response
48 | bot_id = None
49 | enterprise_url = None
50 | if bot_token is not None:
51 | auth_test = client.auth_test(token=bot_token)
52 | bot_id = auth_test["bot_id"]
53 | if is_enterprise_install is True:
54 | enterprise_url = auth_test.get("url")
55 |
56 | installation = {
57 | 'app_id': oauth_response.get("app_id"),
58 | 'enterprise_id': installed_enterprise.get("id"),
59 | 'enterprise_name': installed_enterprise.get("name"),
60 | 'enterprise_url': enterprise_url,
61 | 'team_id': installed_team.get("id"),
62 | 'team_name': installed_team.get("name"),
63 | 'bot_token': bot_token,
64 | 'bot_id': bot_id,
65 | 'bot_user_id': oauth_response.get("bot_user_id"),
66 | 'bot_scopes': oauth_response.get("scope"), # comma-separated string
67 | 'user_id': installer.get("id"),
68 | # 'user_token': installer.get("access_token"),
69 | 'user_scopes': installer.get("scope"), # comma-separated string
70 | 'incoming_webhook_url': incoming_webhook.get("url"),
71 | 'incoming_webhook_channel': incoming_webhook.get("channel"),
72 | 'incoming_webhook_channel_id': incoming_webhook.get("channel_id"),
73 | 'incoming_webhook_configuration_url': incoming_webhook.get("configuration_url"),
74 | 'is_enterprise_install': is_enterprise_install,
75 | 'token_type': oauth_response.get("token_type"),
76 | }
77 | logger.info(f'getToken... {installation}')
78 | return installation
79 |
80 | def weekly(d, token):
81 | logger.info(f'push...')
82 | client = WebClient(token=token)
83 | try:
84 | # response = client.chat_postMessage(channel='#random', text="Hello world!")
85 | response = client.chat_postMessage(
86 | channel='#demo',
87 | text="Paper Alert",
88 | blocks = [
89 | {
90 | "type": "header",
91 | "text": {
92 | "type": "plain_text",
93 | "text": f":moneybag: MRR: ${d['summary']['currentMrrK']}k",
94 | "emoji": True
95 | }
96 | },
97 | {
98 | "type": "section",
99 | "text": {
100 | "type": "mrkdwn",
101 | "text": d['summary']['mrrMsg']
102 | },
103 | "accessory": {
104 | "type": "image",
105 | "image_url": d['mrrChartUrl'],
106 | "alt_text": "MRR"
107 | }
108 | },
109 | {
110 | "type": "divider"
111 | },
112 | {
113 | "type": "header",
114 | "text": {
115 | "type": "plain_text",
116 | "text": f":smiley: Customers: {d['summary']['currentCustomers']}",
117 | "emoji": True
118 | }
119 | },
120 | {
121 | "type": "section",
122 | "text": {
123 | "type": "mrkdwn",
124 | "text": d['summary']['customerMsg']
125 | },
126 | "accessory": {
127 | "type": "image",
128 | "image_url": d['customerChartUrl'],
129 | "alt_text": "MRR"
130 | }
131 | },
132 | {
133 | "type": "divider"
134 | },
135 | {
136 | "type": "actions",
137 | "elements": [
138 | {
139 | "type": "button",
140 | "text": {
141 | "type": "plain_text",
142 | "text": "Open Pulse",
143 | "emoji": True
144 | },
145 | "value": "open_paper",
146 | "url": "https://pulse.trypaper.io?ref=slack_alert",
147 | "action_id": "open_paper"
148 | },
149 | {
150 | "type": "button",
151 | "text": {
152 | "type": "plain_text",
153 | "text": "Update Goals",
154 | "emoji": True
155 | },
156 | "value": "set_goals",
157 | "url": "https://pulse.trypaper.io?ref=set_goals",
158 | "action_id": "set_goals"
159 | }
160 | ]
161 | },
162 | ]
163 | )
164 | print('push...', response)
165 | except SlackApiError as e:
166 | # You will get a SlackApiError if "ok" is False
167 | assert e.response["ok"] is False
168 | assert e.response["error"] # str like 'invalid_auth', 'channel_not_found'
169 | print(f"slack pints error: {e.response['error']}")
170 |
171 | def customerAlert(d, token):
172 | logger.info(f'customerAlert...')
173 | client = WebClient(token=token)
174 | try:
175 | # response = client.chat_postMessage(channel='#random', text="Hello world!")
176 | response = client.chat_postMessage(
177 | channel=f"#{d['slackChannel']}",
178 | text="Paper Alert",
179 | blocks = [
180 | {
181 | "type": "header",
182 | "text": {
183 | "type": "plain_text",
184 | "text": f":moneybag: New MRR: ${d['mrr']} from {d['email']}",
185 | "emoji": True
186 | }
187 | },
188 | {
189 | "type": "section",
190 | "text": {
191 | "type": "mrkdwn",
192 | "text": d['msg']
193 | },
194 | },
195 | {
196 | "type": "divider"
197 | },
198 | {
199 | "type": "actions",
200 | "elements": [
201 | {
202 | "type": "button",
203 | "text": {
204 | "type": "plain_text",
205 | "text": "Open Pulse",
206 | "emoji": True
207 | },
208 | "value": "open_paper",
209 | "url": "https://pulse.trypaper.io?ref=slack_alert",
210 | "action_id": "open_paper"
211 | },
212 | {
213 | "type": "button",
214 | "text": {
215 | "type": "plain_text",
216 | "text": "Update Goals",
217 | "emoji": True
218 | },
219 | "value": "set_goals",
220 | "url": "https://pulse.trypaper.io?ref=set_goals",
221 | "action_id": "set_goals"
222 | }
223 | ]
224 | },
225 | ]
226 | )
227 | print('push...', response)
228 | except SlackApiError as e:
229 | # You will get a SlackApiError if "ok" is False
230 | assert e.response["ok"] is False
231 | assert e.response["error"] # str like 'invalid_auth', 'channel_not_found'
232 | print(f"slack pints error: {e.response['error']}")
233 |
234 | def churnAlert(d, token):
235 | logger.info(f'churnAlert...')
236 | client = WebClient(token=token)
237 | try:
238 | # response = client.chat_postMessage(channel='#random', text="Hello world!")
239 | response = client.chat_postMessage(
240 | channel=f"#{d['slackChannel']}",
241 | text="Pulse Alert",
242 | blocks = [
243 | {
244 | "type": "header",
245 | "text": {
246 | "type": "plain_text",
247 | "text": f"Churned ${d['prev_mrr']} from {d['email']}",
248 | "emoji": True
249 | }
250 | },
251 | {
252 | "type": "section",
253 | "text": {
254 | "type": "mrkdwn",
255 | "text": d['msg']
256 | },
257 | },
258 | {
259 | "type": "divider"
260 | },
261 | {
262 | "type": "actions",
263 | "elements": [
264 | {
265 | "type": "button",
266 | "text": {
267 | "type": "plain_text",
268 | "text": "Open Pulse",
269 | "emoji": True
270 | },
271 | "value": "open_paper",
272 | "url": "https://pulse.trypaper.io?ref=slack_alert",
273 | "action_id": "open_paper"
274 | },
275 | {
276 | "type": "button",
277 | "text": {
278 | "type": "plain_text",
279 | "text": "Update Goals",
280 | "emoji": True
281 | },
282 | "value": "set_goals",
283 | "url": "https://pulse.trypaper.io?ref=set_goals",
284 | "action_id": "set_goals"
285 | }
286 | ]
287 | },
288 | ]
289 | )
290 | print('push...', response)
291 | except SlackApiError as e:
292 | # You will get a SlackApiError if "ok" is False
293 | assert e.response["ok"] is False
294 | assert e.response["error"] # str like 'invalid_auth', 'channel_not_found'
295 | print(f"slack pints error: {e.response['error']}")
--------------------------------------------------------------------------------
/frontend/src/views/Login.vue:
--------------------------------------------------------------------------------
1 |
2 |
18 |
19 |
25 |
26 |
27 |

28 |
31 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
49 |
50 |
51 |
52 |
53 |
54 |
57 |
58 |
59 |
64 |
65 |
66 |
67 |
76 |
77 |
78 |
84 |
85 |
86 |
87 |
88 |
89 |
354 |
355 |
--------------------------------------------------------------------------------
/backend/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | import flask
3 |
4 | import json
5 | import os
6 | import sys
7 | import uuid
8 | from flask_cors import CORS
9 | from flask_sqlalchemy import SQLAlchemy
10 | # from sqlalchemy.sql import text
11 | import sqlalchemy
12 | import pandas as pd
13 | import yaml
14 | import subprocess
15 |
16 | from magic_admin import Magic
17 | from magic_admin.error import DIDTokenError
18 | from magic_admin.error import RequestError
19 |
20 | from logger import logger
21 |
22 |
23 | SQLALCHEMY_DATABASE_URI = os.environ.get('PAPER_SQLALCHEMY_DATABASE_URI')
24 | PAPER_MAGIC_SECRET_KEY = os.environ.get('PAPER_MAGIC_SECRET_KEY')
25 | PAPER_API_KEY = os.environ.get('PAPER_API_KEY', False)
26 | # SQLALCHEMY_ECHO = True
27 | SQLALCHEMY_TRACK_MODIFICATIONS = False
28 |
29 | app = Flask(__name__)
30 | app.config.from_object(__name__)
31 |
32 | CORS(app)
33 |
34 | session_options = {
35 | 'autocommit': True
36 | }
37 | db = SQLAlchemy(app, session_options=session_options)
38 |
39 | import pints
40 | from metrics import metrics
41 |
42 | # dbUser = pints.postgres.createReadOnlyUser(db.engine, 10)
43 | pints.stripe.getAll(db.engine, 23)
44 |
45 | pints.scheduler.startScheduler()
46 |
47 | # pints.sheets.test('1JoXzCGI61dTfWohBJmlW7kBXexNl2ws863MvfFUpvLU')
48 |
49 | def getTeamDomain(email):
50 | return email.split('@')[1]
51 |
52 | def checkForTeam(engine, email, userId):
53 | domain = getTeamDomain(email)
54 | sql = '''
55 | SELECT
56 | u.id as user_id, t.id as team_id
57 | FROM
58 | public.users as u inner join
59 | public.team_membership as tm on u.id = tm.user_id inner join
60 | public.teams as t on tm.team_id = t.id
61 | WHERE
62 | t.domain = '{domain}'
63 | order by u.created_on desc
64 | '''.format(domain = domain)
65 | df = pd.read_sql(sql, db.engine)
66 | if len(df) > 0:
67 | logger.info(f'already team for {domain}')
68 | else:
69 | logger.info(f'no team for {domain}, creating...')
70 | teamId = pints.postgres.insertTeam(engine, domain)
71 | logger.info(f'adding user to team {teamId}...')
72 | teamMembershipId = pints.postgres.insertTeamMember(engine, teamId, userId)
73 | logger.info(f'teamMembershipId: {teamMembershipId}.')
74 | dbUser = pints.postgres.createReadOnlyUser(engine, teamId)
75 | logger.info(f'dbUser user {dbUser}...')
76 | return True
77 |
78 | @app.route('/ping', methods=["GET"])
79 | def ping():
80 | j = {'ok': True}
81 | return json.dumps(j), 200, {'ContentType':'application/json'}
82 |
83 | @app.route('/get_stripe', methods=["GET", "POST"])
84 | def get_stripe():
85 | data = flask.request.get_json()
86 | logger.info(f'get_stripe: {data}')
87 | user = getUser(data)
88 | jobIds = pints.stripe.getAll(db.engine, user['team_id'])
89 | return json.dumps({'ok' : True, 'jobIds': jobIds}), 200, {'ContentType':'application/json'}
90 |
91 | @app.route('/get_dbt', methods=["GET", "POST"])
92 | def get_dbt():
93 | data = flask.request.get_json()
94 | logger.info(f'get_dbt: {data}')
95 | user = getUser(data)
96 | d = pints.modeling.getDbt()
97 | return json.dumps({'ok' : True, 'data': d}), 200, {'ContentType':'application/json'}
98 |
99 | @app.route('/get_raw_counts', methods=["GET", "POST"])
100 | def get_raw_counts():
101 | data = flask.request.get_json()
102 | logger.info(f'get_raw_counts: {data}')
103 | user = getUser(data)
104 | counts = pints.postgres.getRawTableCounts(db.engine, user['team_id'])
105 | return json.dumps({'ok' : True, 'counts': counts}), 200, {'ContentType':'application/json'}
106 |
107 | @app.route('/run_dbt', methods=["GET", "POST"])
108 | def run_dbt():
109 | data = flask.request.get_json()
110 | logger.info(f'run_dbt: {data}')
111 | user = getUser(data)
112 | dbtLogs, dbtErrors = pints.modeling.runDbt(user['team_id'])
113 | return json.dumps({
114 | 'ok' : True,
115 | 'dbtLogs': dbtLogs,
116 | 'dbtErrors': dbtErrors
117 | }), 200, {'ContentType':'application/json'}
118 |
119 | @app.route('/run_analysis', methods=["GET", "POST"])
120 | def run_analysis():
121 | data = flask.request.get_json()
122 | user = getUser(data)
123 | logger.info(f'run_analysis...')
124 | if data['analysis']['mode'] == 'search':
125 | dbtModel = pints.yaml2sql.dbt2Sql(data['dbt'], f"team_{user['team_id']}_stripe")
126 | sql = dbtModel['sql']
127 | userEngine = db.engine
128 | else:
129 | sql = data['analysis']['code']
130 | userPass = os.environ.get('PAPER_READONLY_PASSWORD')
131 | db_url = f"postgresql://team_{user['team_id']}_readonly:{userPass}@oregon-postgres.render.com/paperdb"
132 | # logger.info(f'db_url: {db_url}')
133 | userEngine = sqlalchemy.create_engine(db_url, connect_args={'options': f"-csearch_path=team_{user['team_id']}_stripe"})
134 | dbtModel = {'sql': sql}
135 | logger.info(f'run_analysis sql: {sql}')
136 |
137 | try:
138 | df = pd.read_sql(sql, userEngine)
139 | except Exception as e:
140 | logger.error(f'run_analysis error: {e}')
141 | return {
142 | 'ok': False,
143 | 'error': str(e),
144 | 'sql': dbtModel,
145 | }
146 | logger.info(f'run_analysis df: {df.head()}')
147 | cols = df.columns.tolist()
148 |
149 | if data['analysis']['mode'] == 'search':
150 | cols2 = pints.modeling.getCols(dbtModel, cols)
151 | else:
152 | cols2 = [{'name': col, 'format': ''} for col in cols]
153 | df = df.to_json(date_format = 'iso', orient='values',
154 | default_handler=str)
155 | return json.dumps(
156 | {
157 | 'ok' : True,
158 | 'sql': dbtModel,
159 | 'rows': json.loads(df, strict=False),
160 | 'cols': cols2,
161 | }), 200, {'ContentType':'application/json'}
162 |
163 | def getUser(data):
164 | logger.debug(f'getUser: {data}')
165 | publicAddress = data['user'].get('publicAddress', False)
166 | if not publicAddress:
167 | logger.error(f'no publicAddress: {data}')
168 | d = {
169 | 'ok': False,
170 | 'reason': 'noPublicAddress'
171 | }
172 | return d
173 | sql = '''
174 | SELECT
175 | u.id as user_id, u.email, u.details, tm.team_id as team_id
176 | FROM
177 | public.users as u left join
178 | public.team_membership as tm on u.id = tm.user_id
179 | WHERE
180 | u.details ->> 'publicAddress' = '{publicAddress}'
181 | order by tm.created_on desc
182 | limit 1
183 | '''.format(publicAddress = publicAddress)
184 | df = pd.read_sql(sql, db.engine)
185 | if len(df) == 0:
186 | d = {
187 | 'ok': False,
188 | 'reason': 'noUser'
189 | }
190 | return d
191 | else:
192 | return json.loads(df.to_json(orient='records'))[0]
193 |
194 |
195 | @app.route('/update_settings', methods=["GET", "POST"])
196 | def update_settings():
197 | data = flask.request.get_json()
198 | logger.info(f"update_settings: {data}")
199 | user = getUser(data)
200 | pints.postgres.updateSettings(db.engine, user['team_id'], data['user']['settings'])
201 | ret = {'ok': True}
202 | return json.dumps(ret), 200, {'ContentType':'application/json'}
203 |
204 | @app.route('/update_secret', methods=["GET", "POST"])
205 | def update_secret():
206 | data = flask.request.get_json()
207 | logger.info(f"update_secret: {data}")
208 | if data['type'] == 'stripe':
209 | user = getUser(data)
210 | keyTest = pints.stripe.testKey(data['stripeApiKey'])
211 | if keyTest['ok']:
212 | secrets = pints.postgres.getSecrets(db.engine, user['team_id'])
213 | secrets['stripeApiKey'] = pints.utils.encrypt(data['stripeApiKey'])
214 | pints.postgres.updateSecrets(db.engine, user['team_id'], secrets)
215 | jobIds = pints.stripe.getAll(db.engine, user['team_id'])
216 | keyTest['jobIds'] = jobIds
217 | return json.dumps(keyTest), 200, {'ContentType':'application/json'}
218 | elif data['type'] == 'sheets':
219 | logger.info(f"update_secret sheets...")
220 | user = getUser(data)
221 | logger.info(f"update_secret sheets...")
222 | secrets = pints.postgres.getSecrets(db.engine, user['team_id'])
223 | sheets = pints.utils.encrypt(data['secret'])
224 | secrets['sheets'] = sheets
225 | pints.postgres.updateSecrets(db.engine, user['team_id'], secrets)
226 | logger.info(f"update_secret slackAuth success...")
227 | elif data['type'] == 'slack':
228 | logger.info(f"update_secret slack...")
229 | user = getUser(data)
230 | slackAuth = pints.slack.getToken(data['code'])
231 | logger.info(f"update_secret slackAuth...")
232 | if slackAuth.get('bot_token', False):
233 | logger.info(f"update_secret slackAuth has token...")
234 | secrets = pints.postgres.getSecrets(db.engine, user['team_id'])
235 | slackAuth['bot_token'] = pints.utils.encrypt(slackAuth['bot_token'])
236 | secrets['slack'] = slackAuth
237 | pints.postgres.updateSecrets(db.engine, user['team_id'], secrets)
238 | logger.info(f"update_secret slackAuth success...")
239 | return json.dumps({'ok': True}), 200, {'ContentType':'application/json'}
240 |
241 | @app.route('/get_recent_jobs', methods=["GET", "POST"])
242 | def get_recent_jobs():
243 | data = flask.request.get_json()
244 | user = getUser(data)
245 | jobs = pints.postgres.getRecentJobs(db.engine, user['team_id'])
246 | for key, job in jobs.items():
247 | logger.info(f'get_recent_jobs job: {job}')
248 | logger.info(f'get_recent_jobs job: {key}')
249 | if 'job' in job and 'obj' in job['job']:
250 | job['jobId'] = str(key)
251 | logger.info(f'job: {job}')
252 | job['job']['count'] = pints.postgres.getRawTableCount(db.engine,
253 | user['team_id'], job['job']['obj'])
254 | jobs[key] = job['job']
255 | return json.dumps({'ok': True, 'jobs': jobs}), 200, {'ContentType':'application/json'}
256 |
257 | @app.route('/get_job', methods=["GET", "POST"])
258 | def get_job():
259 | data = flask.request.get_json()
260 | job = pints.postgres.getJob(db.engine, data['jobId'])
261 | if job['ok'] and job['job']['status'] == 'complete' and job['job']['type'] == 'stripe':
262 | user = getUser(data)
263 | job['job']['count'] = pints.postgres.getRawTableCount(db.engine,
264 | user['team_id'], job['job']['obj'])
265 | return json.dumps(job), 200, {'ContentType':'application/json'}
266 |
267 | @app.route('/login', methods=["GET", "POST"])
268 | def login():
269 | logger.info(f'login...')
270 | data = flask.request.get_json()
271 | email = data.get('email', None)
272 | apiKey = data.get('apiKey', None)
273 | did_token = data.get('idToken', None)
274 | if not did_token and not apiKey:
275 | d = {
276 | 'ok': False,
277 | 'new': False,
278 | 'error': 'noAuth'
279 | }
280 | return json.dumps(d), 200, {'ContentType':'application/json'}
281 | magic = Magic(api_secret_key=PAPER_MAGIC_SECRET_KEY)
282 | if PAPER_API_KEY and apiKey == PAPER_API_KEY:
283 | logger.info(f'using apiKey...')
284 | publicAddress = data['publicAddress']
285 | issuer = 'api'
286 | else:
287 | try:
288 | magic.Token.validate(did_token)
289 | issuer = magic.Token.get_issuer(did_token)
290 | publicAddress = data['publicAddress']
291 | except DIDTokenError as e:
292 | logger.error(f'DID Token is invalid: {e}')
293 | d = {
294 | 'ok': False,
295 | 'new': False,
296 | 'error': 'DID Token is invalid'
297 | }
298 | return json.dumps(d), 200, {'ContentType':'application/json'}
299 | except RequestError as e:
300 | logger.error(f'RequestError: {e}')
301 | d = {
302 | 'ok': False,
303 | 'new': False,
304 | 'error': 'noAuth'
305 | }
306 | return json.dumps(d), 200, {'ContentType':'application/json'}
307 | sql = '''
308 | SELECT
309 | u.id as user_id,
310 | u.email,
311 | u.details,
312 | t.details as settings,
313 | case when s.details ->> 'stripeApiKey' is not null then 1 else 0 end as has_stripe,
314 | (SELECT EXISTS (
315 | SELECT FROM information_schema.tables
316 | WHERE table_schema = 'team_'|| tm.team_id || '_stripe'
317 | AND table_name = 'mrr_facts'
318 | )) as has_mrr_facts
319 | FROM
320 | public.users as u left join
321 | public.team_membership as tm on u.id = tm.user_id left join
322 | public.teams as t on tm.team_id = t.id left join
323 | public.secrets as s on tm.team_id = s.team_id
324 | WHERE
325 | u.details ->> 'publicAddress' = '{publicAddress}'
326 | order by u.created_on desc
327 | limit 1
328 | '''.format(publicAddress = publicAddress)
329 | df = pd.read_sql(sql, db.engine)
330 | if len(df) > 0:
331 | user = df.details[0]
332 | email = df.email[0]
333 | validIssuer = user['issuer'] == issuer
334 | if not validIssuer and not issuer == 'api':
335 | logger.info(f'Invalid issuer: {email} {issuer}')
336 | d = {
337 | 'ok': False,
338 | 'new': False,
339 | 'user': False,
340 | 'error': 'invalid_issuer'
341 | }
342 | return json.dumps(d), 200, {'ContentType':'application/json'}
343 | logger.info(f'issuer {issuer} is valid? {validIssuer}')
344 | user['settings'] = df.settings[0]
345 | user['hasStripe'] = bool(df.has_stripe[0] > 0)
346 | user['hasMrrFacts'] = bool(df.has_mrr_facts[0])
347 | d = {
348 | 'ok': True,
349 | 'new': False,
350 | 'user': user
351 | }
352 | else:
353 | userId = pints.postgres.insertUser(db.engine, email, data)
354 | checkForTeam(db.engine, email, userId)
355 | d = {
356 | 'ok': True,
357 | 'new': True
358 | }
359 | return json.dumps(d), 200, {'ContentType':'application/json'}
360 |
361 | @app.route('/get_funders', methods=["GET", "POST"])
362 | def get_funders():
363 | data = flask.request.get_json()
364 | sql = '''
365 | SELECT
366 | f.public_id,
367 | f.name,
368 | f.domain,
369 | f.max_loan_amount,
370 | f.min_loan_amount,
371 | f.min_annual_revenue,
372 | f.paper_rank,
373 | f.focus,
374 | f.max_mrr_multiple,
375 | f.loan_type,
376 | f.payment_details,
377 | f.saas_focus,
378 | f.warrants,
379 | f.region,
380 | f.personal_guarantor,
381 | f.days_to_close,
382 | cb."data"
383 | FROM
384 | public.funders as f left join
385 | public.clearbit as cb on f."domain" = cb."domain"
386 | WHERE f.active = '1'
387 | and f.loan_type != 'Shared Earnings'
388 | order by f.paper_rank desc
389 | ;
390 | '''.format(active = 1)
391 | df = pd.read_sql(sql, db.engine)
392 | cols = json.loads(df.dtypes.to_json())
393 | d = {
394 | 'ok': True,
395 | 'data': json.loads(df.to_json(orient='records')),
396 | 'columns': cols
397 | }
398 | return json.dumps(d), 200, {'ContentType':'application/json'}
399 |
400 | @app.route('/update_user_data', methods=["GET", "POST"])
401 | def update_user_data():
402 | data = flask.request.get_json()
403 | logger.info(f'update_user_data: {data}')
404 | publicAddress = data['user'].get('publicAddress', False)
405 | if not publicAddress:
406 | d = {
407 | 'ok': False,
408 | 'reason': 'noUser'
409 | }
410 | return json.dumps(d), 200, {'ContentType':'application/json'}
411 | sql = '''
412 | SELECT
413 | u.id as user_id, u.email, u.details, ud.details as user_data
414 | FROM
415 | public.users as u left join
416 | public.user_data as ud on u.id = ud.user_id
417 | WHERE
418 | u.details ->> 'publicAddress' = '{publicAddress}'
419 | order by u.created_on desc
420 | '''.format(publicAddress = publicAddress)
421 | df = pd.read_sql(sql, db.engine)
422 | if len(df) == 0:
423 | d = {
424 | 'ok': False,
425 | 'reason': 'noUser'
426 | }
427 | return json.dumps(d), 200, {'ContentType':'application/json'}
428 | with db.engine.connect() as con:
429 | d = { "user_id": int(df.user_id[0]), "details": json.dumps(data['userData']) }
430 | sql = '''
431 | INSERT INTO public.user_data(user_id, details)
432 | VALUES(:user_id, :details)
433 | ON CONFLICT (user_id) DO UPDATE
434 | SET details = :details
435 | '''
436 | statement = sqlalchemy.sql.text(sql)
437 | con.execute(statement, **d)
438 | return json.dumps(d), 200, {'ContentType':'application/json'}
439 |
440 | # CREATE TABLE public.applications (
441 | # id SERIAL PRIMARY KEY,
442 | # public_uuid uuid DEFAULT uuid_generate_v1(),
443 | # user_id integer,
444 | # created_on timestamp DEFAULT current_timestamp,
445 | # updated_on timestamp DEFAULT current_timestamp,
446 | # details JSONB
447 | # );
448 |
449 | @app.route('/get_events', methods=["GET", "POST"])
450 | def get_events():
451 | data = flask.request.get_json()
452 | logger.info(f'get_events: {data}')
453 | user = getUser(data)
454 | logger.info(f'get_events user: {user}')
455 | sql = '''
456 | select
457 | *
458 | from
459 | "team_{teamId}_stripe".events as e
460 | order by e.created_on desc
461 | limit 100
462 | '''.format(teamId=user['team_id'])
463 | try:
464 | df = pd.read_sql(sql, db.engine)
465 | except Exception as e:
466 | logger.error(f'get_events error: {e}')
467 | ret = {
468 | 'ok': False,
469 | 'error': 'noData',
470 | }
471 | return json.dumps(ret), 200, {'ContentType':'application/json'}
472 | df = pd.read_sql(sql, db.engine)
473 | df = json.loads(df.to_json(orient='records'))
474 | ret = {
475 | 'ok': True,
476 | 'data': df,
477 | }
478 | return json.dumps(ret), 200, {'ContentType':'application/json'}
479 |
480 | @app.route('/get_metrics', methods=["GET", "POST"])
481 | def get_metrics():
482 | data = flask.request.get_json()
483 | logger.info(f'get_metrics: {data}')
484 | user = getUser(data)
485 | logger.info(f'get_metrics user: {user}')
486 | sql = '''
487 | select
488 | *,
489 | 1 as active
490 | from
491 | "team_{teamId}_stripe".mrr_facts as mrr
492 | order by mrr.mrr_dt asc
493 | '''.format(teamId=user['team_id'])
494 | try:
495 | df = pd.read_sql(sql, db.engine)
496 | df['mrr_month_dt'] = df.mrr_month_dt + pd.Timedelta(hours=12)
497 | except Exception as e:
498 | logger.error(f'get_metrics error: {e}')
499 | ret = {
500 | 'ok': False,
501 | 'error': 'noData',
502 | }
503 | return json.dumps(ret), 200, {'ContentType':'application/json'}
504 | piv = df.pivot_table(index='mrr_month_dt', values=['mrr', 'active', 'churned_mrr'], aggfunc='sum')
505 | df = json.loads(df.to_json(orient='records'))
506 | sql = '''
507 | with c1 as (
508 | select
509 | sum(mrr.churned_mrr) / (sum(mrr.mrr) / 3) as churn_rate,
510 | sum(mrr.churned_mrr) as churned_mrr,
511 | sum(mrr.mrr) as mrr,
512 | avg(mrr.mrr) as avg_mrr,
513 | sum(mrr.mrr) / count(1) as arpu
514 |
515 | from
516 | "team_{teamId}_stripe".mrr_facts as mrr
517 | where mrr.current_month = 0
518 | and mrr.mrr_month_dt > current_timestamp - interval '4 months'
519 | )
520 | select
521 | *,
522 | 100 / (churn_rate*100) as lifetime_months,
523 | 100 / (churn_rate*100) * avg_mrr as clv
524 | from c1;
525 | '''.format(teamId=user['team_id'])
526 | ltv = pd.read_sql(sql, db.engine)
527 | ltv = json.loads(ltv.to_json(orient='records'))[0]
528 | sql = '''
529 | with mrr as (
530 | select
531 | mrr.mrr_month_dt,
532 | mrr.customer_created_on,
533 | date_trunc('month', mrr.customer_created_on)::date::text as vintage,
534 | vintage_age,
535 | mrr.mrr
536 | from
537 | "team_{teamId}_stripe".mrr_facts as mrr
538 | where customer_created_on > (current_timestamp - interval '13 months')
539 | ), vintage_start as (
540 | select
541 | vintage,
542 | sum(mrr.mrr) as starting_mrr,
543 | count(1) as starting_customers
544 | from
545 | mrr as mrr
546 | where mrr.vintage_age = 0
547 | group by 1
548 | ), vintage_perf as (
549 | select
550 | vintage,
551 | vintage_age,
552 | sum(mrr.mrr) as mrr,
553 | count(1) as customers
554 | from
555 | mrr as mrr
556 | group by 1, 2
557 | ), res as (
558 | select
559 | vp.vintage,
560 | vp.vintage_age,
561 | vp.customers,
562 | vp.mrr,
563 | (vp.mrr * 100.0 / vs.starting_mrr) / 100 as revenue_retention,
564 | (vp.customers * 100.0 / vs.starting_customers) / 100 as customer_retention
565 | from vintage_perf vp join
566 | vintage_start as vs on vp.vintage = vs.vintage
567 | order by vp.vintage, vp.vintage_age
568 | )
569 | select *,
570 | round((revenue_retention*100)::decimal, 0)::text as revenue_retention_text,
571 | round((customer_retention*100)::decimal, 0)::text as customer_retention_text
572 | from res
573 | '''.format(teamId=user['team_id'])
574 | retention = pd.read_sql(sql, db.engine)
575 | retention = json.loads(retention.to_json(orient='records'))
576 |
577 | ret = {
578 | 'ok': True,
579 | 'data': df,
580 | 'ltv': ltv,
581 | 'retention': retention,
582 | 'summary': piv.tail(3).to_dict(orient='records')
583 | }
584 | return json.dumps(ret), 200, {'ContentType':'application/json'}
585 |
586 | @app.route('/to_slack', methods=["GET", "POST"])
587 | def to_slack():
588 | data = flask.request.get_json()
589 | logger.info(f'to_slack: {data}')
590 | publicId = uuid.uuid4().hex
591 | return json.dumps({'ok' : True}), 200, {'ContentType':'application/json'}
--------------------------------------------------------------------------------
/backend/pints/postgres.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import sqlalchemy
3 | from sqlalchemy import create_engine, MetaData, Table, Column, Integer, DateTime, String, inspect
4 | from sqlalchemy.dialects.postgresql import JSONB
5 | from sqlalchemy.sql.elements import quoted_name
6 | import json
7 | import os
8 | import io
9 | import uuid
10 | from logger import logger
11 | import pints
12 |
13 | teamSettings = {
14 | 'notifications': {
15 | 'alerts': {
16 | 'slack': True,
17 | 'email': False,
18 | },
19 | 'weekly': {
20 | 'slack': True,
21 | 'email': True,
22 | },
23 | 'monthly': {
24 | 'slack': True,
25 | 'email': True,
26 | },
27 | },
28 | 'sheets': {}
29 | }
30 |
31 | def createReadOnlyUser(engine, teamId):
32 | userName = f"team_{teamId}_readonly"
33 | logger.info(f'createReadOnlyUser, creating {userName}...')
34 | readonlyPassword = os.environ.get('PAPER_READONLY_PASSWORD')
35 | with engine.connect() as con:
36 | try:
37 | trans = con.begin()
38 | schemaName = f"team_{teamId}_stripe"
39 | sql = f"CREATE SCHEMA IF NOT EXISTS {schemaName}"
40 | con.execute(sql)
41 | sql = f"create user {userName} PASSWORD '{readonlyPassword}';"
42 | con.execute(sql)
43 | sql = f"grant usage on schema {schemaName} to {userName};"
44 | con.execute(sql)
45 | sql = f"grant select on all tables in schema {schemaName} to {userName};"
46 | con.execute(sql)
47 | sql = f"alter user {userName} set search_path = {schemaName};"
48 | con.execute(sql)
49 | sql = f'''
50 | ALTER DEFAULT PRIVILEGES IN SCHEMA {schemaName}
51 | GRANT SELECT ON TABLES TO {userName};
52 | '''
53 | con.execute(sql)
54 | trans.commit()
55 | except Exception as e:
56 | {'ok': False, 'error': str(e)}
57 | return {'ok': True, 'userName': userName}
58 |
59 | def insertRows(engine, table, rows, teamId):
60 | with engine.connect() as con:
61 | totalRows = len(rows)
62 | for idx, row in enumerate(rows):
63 | d = {
64 | "team_id": teamId,
65 | "details": json.dumps(row)
66 | }
67 | sql = f'''
68 | INSERT INTO {table} (team_id, details)
69 | VALUES(:team_id, :details)
70 | '''
71 | statement = sqlalchemy.sql.text(sql)
72 | res = con.execute(statement, **d)
73 | if idx % 100 == 0:
74 | logger.info(f'inserted row {idx} of {totalRows}.')
75 |
76 | def insertManyRows(engine, table, rows, teamId):
77 | totalRows = len(rows)
78 | if totalRows == 0:
79 | logger.info(f'insertManyRows, no rows...')
80 | return True
81 | logger.info(f'insertManyRows, inserting {totalRows} into {table}...')
82 | cols = ['team_id', 'details']
83 | data = io.StringIO()
84 | for idx, row in enumerate(rows):
85 | data.write('|'.join(
86 | [
87 | str(teamId),
88 | json.dumps(row),
89 | ]
90 | ) + '\n')
91 | data.seek(0)
92 | raw = engine.raw_connection()
93 | curs = raw.cursor()
94 | curs.copy_from(data, table, columns=cols, sep='|')
95 | curs.connection.commit()
96 |
97 | def getMaxRecord(engine, table, teamId):
98 | with engine.connect() as con:
99 | sql = f'''
100 | select max(t.details ->> 'created')
101 | from "public".{table} as t
102 | where t.team_id = {teamId}
103 | '''
104 | statement = sqlalchemy.sql.text(sql)
105 | res = con.execute(statement).fetchone()[0]
106 | return res
107 |
108 | def getMaxJobRun(engine, teamId, schedule):
109 | with engine.connect() as con:
110 | sql = f'''
111 | select j.created_on
112 | from "public".jobs as j
113 | where j.team_id = {teamId}
114 | and j.details ->> 'type' = '{schedule}'
115 | order by j.id desc
116 | '''
117 | statement = sqlalchemy.sql.text(sql)
118 | res = con.execute(statement).fetchone()
119 | if res:
120 | return res[0]
121 | else:
122 | return False
123 |
124 | def deleteRows(engine, table, teamId):
125 | with engine.connect() as con:
126 | sql = f'''
127 | delete from "public".{table} as t
128 | where t.team_id = {teamId}
129 | '''
130 | statement = sqlalchemy.sql.text(sql)
131 | res = con.execute(statement)
132 | return res
133 |
134 |
135 | def insertUser(engine, email, data):
136 | with engine.connect() as con:
137 | d = { "email": email, "details": json.dumps(data) }
138 | sql = '''
139 | INSERT INTO users(email, details)
140 | VALUES(:email, :details)
141 | RETURNING id;
142 | '''
143 | statement = sqlalchemy.sql.text(sql)
144 | res = con.execute(statement, **d)
145 | userId = res.fetchone()[0]
146 | return userId
147 |
148 | def insertTeam(engine, domain):
149 | with engine.connect() as con:
150 | teamUuid = uuid.uuid4().hex
151 | d = { "public_uuid": teamUuid, "domain": domain, "details": json.dumps(teamSettings) }
152 | sql = '''
153 | INSERT INTO teams(public_uuid, domain, details)
154 | VALUES(:public_uuid, :domain, :details)
155 | RETURNING id;
156 | '''
157 | statement = sqlalchemy.sql.text(sql)
158 | res = con.execute(statement, **d)
159 | teamId = res.fetchone()[0]
160 | return teamId
161 |
162 | def insertTeamMember(engine, teamId, userId):
163 | with engine.connect() as con:
164 | teamMembershipUuid = uuid.uuid4().hex
165 | d = {
166 | "team_id": teamId,
167 | "user_id": userId,
168 | "public_uuid": teamMembershipUuid,
169 | "role": "admin",
170 | "details": json.dumps({})
171 | }
172 | sql = '''
173 | INSERT INTO team_membership(team_id, user_id, public_uuid, role, details)
174 | VALUES(:team_id, :user_id, :public_uuid, :role, :details)
175 | RETURNING id;
176 | '''
177 | statement = sqlalchemy.sql.text(sql)
178 | res = con.execute(statement, **d)
179 | teamMembershipId = res.fetchone()[0]
180 | return teamMembershipId
181 |
182 | def updateSettings(engine, teamId, details):
183 | with engine.connect() as con:
184 | d = {
185 | "details": json.dumps(details),
186 | "team_id": teamId,
187 | }
188 | sql = '''
189 | UPDATE public.teams
190 | SET details = :details
191 | WHERE id = :team_id
192 | '''
193 | statement = sqlalchemy.sql.text(sql)
194 | res = con.execute(statement, **d)
195 | return True
196 |
197 | def getSettings(engine, teamId):
198 | with engine.connect() as con:
199 | sql = f'''
200 | select t.details
201 | from "public".teams as t
202 | where t.id = {teamId}
203 | limit 1
204 | '''
205 | statement = sqlalchemy.sql.text(sql)
206 | res = con.execute(statement).fetchone()
207 | if res:
208 | return res[0]
209 | else:
210 | return False
211 |
212 | def getTeams(engine):
213 | sql = f'''
214 | with last_job as (
215 | select
216 | j.team_id,
217 | max(j.created_on) as created_on,
218 | max(case when j.details ->> 'type' = 'hourly' then j.created_on end) as last_hourly,
219 | max(case when j.details ->> 'type' = 'weekly' then j.created_on end) as last_weekly,
220 | max(case when j.details ->> 'type' = 'sendNotifications' then j.created_on end) as last_notifications
221 | from "public".jobs as j
222 | where 1=1
223 | and j.status = 'complete'
224 | group by 1
225 | )
226 |
227 | select
228 | t.id,
229 | t.domain,
230 | t.details as settings,
231 | lj.last_hourly,
232 | lj.last_weekly,
233 | lj.last_notifications
234 | from
235 | "public".teams as t left join
236 | last_job as lj on t.id = lj.team_id
237 | '''
238 | df = pd.read_sql(sql, engine)
239 | return df.to_dict(orient='records')
240 |
241 | def createSecrets(engine, teamId):
242 | with engine.connect() as con:
243 | d = {
244 | "team_id": teamId,
245 | "details": json.dumps({})
246 | }
247 | sql = '''
248 | INSERT INTO public.secrets(team_id, details)
249 | VALUES(:team_id, :details)
250 | RETURNING id;
251 | '''
252 | statement = sqlalchemy.sql.text(sql)
253 | res = con.execute(statement, **d)
254 | secretId = res.fetchone()[0]
255 | return secretId
256 |
257 | def getSecrets(engine, teamId):
258 | with engine.connect() as con:
259 | sql = f'''
260 | select details
261 | from "public".secrets as s
262 | where s.team_id = {teamId}
263 | order by id desc
264 | limit 1
265 | '''
266 | statement = sqlalchemy.sql.text(sql)
267 | res = con.execute(statement).fetchone()
268 | if not res:
269 | logger.info(f'no secrets, creating...')
270 | createSecrets(engine, teamId)
271 | return {}
272 | return res[0]
273 |
274 | def updateSecrets(engine, teamId, details):
275 | with engine.connect() as con:
276 | d = {
277 | "details": json.dumps(details),
278 | "team_id": teamId,
279 | }
280 | sql = '''
281 | UPDATE public.secrets
282 | SET details = :details
283 | WHERE team_id = :team_id
284 | '''
285 | statement = sqlalchemy.sql.text(sql)
286 | res = con.execute(statement, **d)
287 | return True
288 |
289 | def getStripeApiKey(engine, teamId):
290 | with engine.connect() as con:
291 | sql = f'''
292 | select details
293 | from "public".secrets as s
294 | where s.team_id = {teamId}
295 | order by id desc
296 | limit 1
297 | '''
298 | statement = sqlalchemy.sql.text(sql)
299 | res = con.execute(statement).fetchone()
300 | if not res:
301 | return False
302 | res = res[0]
303 | if res.get('stripeApiKey', False):
304 | return pints.utils.decrypt(res['stripeApiKey'])
305 | return False
306 |
307 | def getSlackInfo(engine, teamId):
308 | with engine.connect() as con:
309 | sql = f'''
310 | select details
311 | from "public".secrets as s
312 | where s.team_id = {teamId}
313 | order by id desc
314 | limit 1
315 | '''
316 | statement = sqlalchemy.sql.text(sql)
317 | res = con.execute(statement).fetchone()
318 | if not res:
319 | return False
320 | res = res[0]
321 | if not res.get('slack', False):
322 | return False
323 | res = res['slack']
324 | if res.get('bot_token', False):
325 | res['bot_token'] = pints.utils.decrypt(res['bot_token'])
326 | return res
327 | return False
328 |
329 | def getRawTableCount(engine, teamId, table):
330 | with engine.connect() as con:
331 | schema = 'public'
332 | sql = f'''
333 | select
334 | count(1) as ct
335 | from "{schema}"."stripe_{table}"
336 | where team_id = {teamId}
337 | '''
338 | statement = sqlalchemy.sql.text(sql)
339 | res = con.execute(statement).fetchone()
340 | if not res:
341 | return {'ok': False}
342 | return res[0]
343 |
344 | def getRawTableCounts(engine, teamId):
345 | schema = 'public'
346 | tables = ['customers', 'coupons']
347 | for index, table in enumerate(tables):
348 | sql += f'''
349 | select
350 | '{table}' as "table",
351 | 'raw' as "type",
352 | count(1) as ct
353 | from "{schema}"."stripe_{table}"
354 | where team_id = {teamId}
355 | '''
356 | if index < len(tables)-1:
357 | sql+= 'union all\n'
358 | logger.info(f'getTableCounts {sql}...')
359 | df = pd.read_sql(sql, engine)
360 | df = df.to_json(date_format = 'iso', orient='values', default_handler=str)
361 | return json.loads(df)
362 |
363 | def getDbtTableCounts(engine, teamId):
364 | tables = ['customers', 'subscriptions', 'mrr_facts']
365 | sql = ''
366 | schema = f'team_{teamId}_stripe'
367 | for index, table in enumerate(tables):
368 | sql += f'''
369 | select
370 | '{table}' as "table",
371 | 'modeled' as "type",
372 | count(1) as ct
373 | from "{schema}"."{table}"
374 | '''
375 | if index < len(tables)-1:
376 | sql+= 'union all\n'
377 | df = pd.read_sql(sql, engine)
378 | df = df.to_json(date_format = 'iso', orient='values', default_handler=str)
379 | return json.loads(df)
380 |
381 | def getJobSummary(engine, teamId):
382 | sql = f'''
383 | select
384 | team_id,
385 | details ->> 'obj' as obj,
386 | details ->> 'status' as "status",
387 | details ->> 'type' as "type",
388 | count(1) as ct,
389 | max(j.id) as id
390 | from "public".jobs as j
391 | where 1=1
392 | and team_id = {teamId}
393 | group by
394 | 1, 2, 3, 4
395 | '''
396 | df = pd.read_sql(sql, engine)
397 | return df
398 |
399 | def getRecentJobs(engine, teamId):
400 | sql = f'''
401 | with jobs2 as (
402 | select
403 | details ->> 'obj' as obj,
404 | count(1) as ct,
405 | max(j.id) as id
406 | from "public".jobs as j
407 | where details ->> 'status' = 'complete'
408 | and team_id = {teamId}
409 | group by
410 | 1
411 | )
412 |
413 | select
414 | j.public_uuid::text as public_uuid,
415 | details as job
416 | from jobs2 as j2 inner join
417 | "public".jobs as j on j.id = j2.id
418 | '''
419 | df = pd.read_sql(sql, engine)
420 | df = df.set_index('public_uuid')
421 | df = df.to_dict(orient='index')
422 | return df
423 |
424 | def getSchedulerRow(engine):
425 | sql = '''
426 | SELECT *
427 | FROM
428 | public.aps_scheduler
429 | '''
430 | try:
431 | df = pd.read_sql(sql, engine)
432 | return len(df) > 0
433 | except:
434 | return False
435 |
436 | def truncateTable(engine, table):
437 | with engine.connect() as con:
438 | sql = f'''
439 | TRUNCATE "public".{table}
440 | '''
441 | statement = sqlalchemy.sql.text(sql)
442 | try:
443 | res = con.execute(statement)
444 | return res
445 | except Exception as e:
446 | return False
447 |
448 | def updateJob(engine, teamId, jobId, jobUuid, details):
449 | logger.info(f'updateJob {jobId} {jobUuid}...')
450 | with engine.connect() as con:
451 | d = {
452 | "details": json.dumps(details),
453 | "team_id": teamId,
454 | "job_id": jobId,
455 | "public_uuid": jobUuid,
456 | "status": details['status']
457 | }
458 |
459 | if jobId:
460 | sql = '''
461 | UPDATE public.jobs
462 | SET details = :details,
463 | status = :status
464 | WHERE id = :job_id
465 | '''
466 | statement = sqlalchemy.sql.text(sql)
467 | res = con.execute(statement, **d)
468 | return jobId
469 | else:
470 | sql = '''
471 | INSERT INTO public.jobs(team_id, public_uuid, details)
472 | VALUES(:team_id, :public_uuid, :details)
473 | RETURNING id;
474 | '''
475 | statement = sqlalchemy.sql.text(sql)
476 | res = con.execute(statement, **d).fetchone()
477 | logger.info(f'updateJob INSERT {res}...')
478 | return res[0]
479 |
480 | def updateJobStatus(engine, jobId, status, error=None, details=None):
481 | with engine.connect() as con:
482 | d = {
483 | "jobId": jobId,
484 | "status": status,
485 | "error": error,
486 | }
487 | sql = '''
488 | UPDATE jobs
489 | SET status = :status,
490 | details = jsonb_set(details, '{status}', to_jsonb((:status)::text))
491 | where id = :jobId;
492 | '''
493 | statement = sqlalchemy.sql.text(sql)
494 | res = con.execute(statement, **d)
495 | if error:
496 | sql = '''
497 | UPDATE jobs
498 | SET details = jsonb_set(details, '{error}', to_jsonb((:error)::text))
499 | where id = :jobId;
500 | '''
501 | statement = sqlalchemy.sql.text(sql)
502 | res = con.execute(statement, **d)
503 | if details:
504 | for k, v in details.items():
505 | logger.info(f'details.items {k} {v}...')
506 | d2 = {
507 | 'k': k,
508 | 'v': v,
509 | 'jobId': jobId,
510 | }
511 | sql = f'''
512 | UPDATE jobs
513 | SET details = jsonb_set(details, '{{{k}}}', to_jsonb('{v}'::text))
514 | where id = {jobId};
515 | '''
516 | res = con.execute(sql)
517 | logger.info(f'updated job id {jobId}...')
518 | return True
519 |
520 | def getJob(engine, jobUuid):
521 | with engine.connect() as con:
522 | sql = f'''
523 | select details, updated_on
524 | from public.jobs as j
525 | where public_uuid = '{jobUuid}'
526 | order by id desc
527 | limit 1
528 | '''
529 | statement = sqlalchemy.sql.text(sql)
530 | res = con.execute(statement).fetchone()
531 | if not res:
532 | return {'ok': False}
533 | ret = res[0]
534 | ret['updated_on'] = res[1].timestamp()
535 | return {'ok': True, 'job': ret}
536 |
537 | def getLastJob(engine, teamId, jobType):
538 | with engine.connect() as con:
539 | sql = f'''
540 | select *
541 | from "public".jobs as j
542 | where j.details ->> 'type' = '{jobType}'
543 | and j.team_id = {teamId}
544 | and j.status = 'complete'
545 | order by id desc
546 | limit 1
547 | '''
548 | statement = sqlalchemy.sql.text(sql)
549 | res = con.execute(statement).fetchone()
550 | return res
551 |
552 | def addJob(engine, teamId, details, jobUuid):
553 | with engine.connect() as con:
554 | d = {
555 | "details": json.dumps(details),
556 | "team_id": teamId,
557 | "status": details['status'],
558 | "public_uuid": jobUuid
559 | }
560 | sql = '''
561 | INSERT INTO public.jobs(team_id, public_uuid, status, details)
562 | VALUES(:team_id, :public_uuid, :status, :details)
563 | RETURNING id;
564 | '''
565 | statement = sqlalchemy.sql.text(sql)
566 | res = con.execute(statement, **d).fetchone()
567 | logger.info(f'addJob res {res}...')
568 | return res[0]
569 |
570 | def addMessage(engine, teamId, targetId, message, jobUuid):
571 | with engine.connect() as con:
572 | d = {
573 | "target_id": targetId,
574 | "team_id": teamId,
575 | "details": json.dumps(message),
576 | "public_uuid": jobUuid
577 | }
578 | sql = '''
579 | INSERT INTO public.message_queue(target_id, team_id, public_uuid, details)
580 | VALUES(:target_id, :team_id, :public_uuid, :details)
581 | RETURNING id;
582 | '''
583 | statement = sqlalchemy.sql.text(sql)
584 | res = con.execute(statement, **d).fetchone()
585 | logger.info(f'addMessage res {res}...')
586 | return res[0]
587 |
588 | def deleteIncompleteJobs(engine):
589 | with engine.connect() as con:
590 | sql = '''
591 | DELETE FROM public.jobs
592 | WHERE "status" != 'complete'
593 | '''
594 | statement = sqlalchemy.sql.text(sql)
595 | res = con.execute(statement)
596 | return res
597 |
598 | def getMessages(engine):
599 | with engine.connect() as con:
600 | trans = con.begin()
601 | sql = '''
602 | with q as (
603 | select
604 | jsonb_array_elements_text(mq.details -> 'dependencies')::text as dependency,
605 | mq.target_id,
606 | mq.id
607 | from public.message_queue as mq
608 | where 1=1
609 | ), c as (
610 | select j.public_uuid::text, j.status
611 | from public.jobs as j
612 | ), q2 as (
613 | select
614 | q.dependency,
615 | q.target_id,
616 | q.id,
617 | case when c.status = 'complete' then 1 else 0 end as complete
618 | from
619 | q inner join
620 | c as c on q.dependency = c.public_uuid
621 | ), q3 as (
622 | select
623 | id,
624 | count(1) as jobs,
625 | sum(complete) as complete
626 | from q2
627 | group by 1
628 | having
629 | count(1) = sum(complete)
630 | )
631 |
632 | DELETE FROM public.message_queue
633 | WHERE id = (
634 | SELECT mq.id
635 | FROM public.message_queue as mq inner join
636 | q3 on mq.id = q3.id
637 | ORDER BY mq.id ASC
638 | FOR UPDATE SKIP LOCKED
639 | LIMIT 1
640 | )
641 | RETURNING *;
642 | '''
643 | queueRow = con.execute(sql).fetchone()
644 | trans.commit()
645 | if queueRow:
646 | logger.info(f"message_queue process queue id: {queueRow['id']} and target_id: {queueRow['target_id']}...")
647 | sql = '''
648 | SELECT *
649 | FROM public.jobs as j
650 | WHERE j.id = %s
651 | AND j.status = 'pending'
652 | FOR UPDATE;
653 | '''
654 | jobRow = con.execute(sql, (queueRow['target_id'],)).fetchone()
655 | if jobRow:
656 | logger.info(f"jobRow id: {jobRow['id']}...")
657 | return jobRow, queueRow
658 | return False, False
659 |
660 | def getAlerts(engine, teamId, lastJob):
661 | sql = f'''
662 | select
663 | mrr.email,
664 | mrr.customer_created_on,
665 | to_char(mrr.customer_created_on::date, 'Mon dd, yyyy') as customer_created_on2,
666 | current_date - mrr.customer_created_on::date as created_days_ago,
667 | mrr.mrr,
668 | mrr.prev_mrr,
669 | mrr.mrr_status,
670 | mrr.mrr_rank,
671 | mrr.percent_off_precise,
672 | case
673 | when mrr.canceled_dt > '{lastJob['details']['maxCanceledOn']}' then 'canceled'
674 | else 'new'
675 | end as alert_type
676 | from team_{teamId}_stripe.mrr_facts as mrr
677 | where
678 | mrr.current_month = 1
679 | and (
680 | mrr.customer_created_on > '{lastJob['details']['maxCreatedOn']}'
681 | or mrr.canceled_dt > '{lastJob['details']['maxCanceledOn']}'
682 | )
683 | order by mrr.created_on desc
684 | '''
685 | df = pd.read_sql(sql, engine)
686 | return df.to_dict(orient='records')
--------------------------------------------------------------------------------
/frontend/src/App.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 | {{storeState.msg.primary}}
272 |
273 |
274 |
275 |
276 |
277 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
561 |
562 |
565 |
--------------------------------------------------------------------------------