├── .dockerignore
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── deploy.yml
│ └── test-deploy.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.md
├── deployment
├── docker
│ ├── Dockerfile.api
│ ├── Dockerfile.client
│ ├── Dockerfile.simple
│ └── docker-compose.yml
├── nginx
│ └── nginx.default.conf
└── terraform
│ ├── .terraform.lock.hcl
│ ├── main.tf
│ ├── outputs.tf
│ ├── terraform.tfvars
│ ├── user_data.sh
│ ├── variables.tf
│ └── versions.tf
├── documentation
├── .eslintrc.js
├── .github
│ ├── CODEOWNERS
│ ├── pull_request_template.md
│ └── workflows
│ │ └── autoformat.yml
├── .gitignore
├── .gitmodules
├── .nvmrc
├── .prettierignore
├── .prettierrc.json
├── LICENSE
├── README.md
├── babel.config.js
├── changelog
│ ├── 2022-04-01-v010-release.md
│ └── 2022-04-13-v011-release.md
├── docs
│ ├── components
│ │ ├── CenteredImage.js
│ │ ├── CustomWarning.js
│ │ ├── DetermineHeader.js
│ │ ├── RelatedReadList.js
│ │ └── RowOfImages.js
│ ├── contributing
│ │ ├── local-development.md
│ │ └── overview.md
│ ├── guides
│ │ └── quick-install.md
│ ├── integrations
│ │ ├── bigquery.md
│ │ ├── email.md
│ │ ├── postgresql.md
│ │ ├── redshift.md
│ │ ├── slack.md
│ │ ├── snowflake.md
│ │ └── webhooks.md
│ ├── msi
│ │ ├── bootstrap
│ │ │ └── index.md
│ │ ├── collectors-yml.md
│ │ ├── configuration.md
│ │ ├── destinations.md
│ │ ├── environment-variables.md
│ │ ├── functions.md
│ │ ├── how-to-install-msi.md
│ │ ├── how-to-use-msi.md
│ │ ├── index.md
│ │ ├── init
│ │ │ └── index.md
│ │ ├── inputs-yml.md
│ │ ├── msi-project-yml.md
│ │ ├── normalization.md
│ │ ├── outputs-yml.md
│ │ ├── routes-yml.md
│ │ ├── run
│ │ │ └── index.md
│ │ ├── sources.md
│ │ ├── state.md
│ │ └── test-connection
│ │ │ └── index.md
│ ├── supportfaq
│ │ └── usage-data-preferences.md
│ └── user-guide
│ │ ├── aws-ec2.md
│ │ ├── custom-sql.md
│ │ ├── distribution.md
│ │ ├── freshness.md
│ │ ├── getting-started.md
│ │ ├── hybrid-deployment.md
│ │ ├── introduction.md
│ │ ├── kubernetes.md
│ │ ├── local-deployment.md
│ │ ├── schema-changes.md
│ │ ├── table-health.md
│ │ └── terraform-deployment.md
├── docusaurus.config.js
├── netlify.toml
├── package.json
├── plugins
│ └── docusaurus-tailwindcss-loader
│ │ └── index.js
├── postcss.config.js
├── sidebars.js
├── src
│ ├── components
│ │ ├── Community.js
│ │ ├── Integrations.js
│ │ ├── Intro.js
│ │ ├── MonosiCloud.js
│ │ ├── Newsletter.js
│ │ ├── Resources.js
│ │ ├── index.js
│ │ ├── responsive-player
│ │ │ └── ResponsivePlayer.js
│ │ └── shared
│ │ │ └── Button.js
│ ├── css
│ │ └── custom.css
│ ├── fonts
│ │ ├── Aeonik-Bold.woff
│ │ ├── Aeonik-Light.woff
│ │ ├── Aeonik-Regular.woff
│ │ ├── AeonikTRIAL-Bold.ttf
│ │ ├── AeonikTRIAL-Light.ttf
│ │ └── AeonikTRIAL-Regular.ttf
│ ├── pages
│ │ ├── index.js
│ │ ├── integrations.js
│ │ └── styles.module.css
│ └── theme
│ │ ├── BlogLayout
│ │ └── index.js
│ │ ├── BlogListPage
│ │ └── index.js
│ │ ├── BlogPostItem
│ │ └── index.js
│ │ ├── BlogPostPage
│ │ └── index.js
│ │ ├── BlogSidebar
│ │ └── index.js
│ │ ├── BlogTagsListPage
│ │ └── index.js
│ │ ├── BlogTagsPostsPage
│ │ └── index.js
│ │ ├── DocItem
│ │ ├── index.js
│ │ └── styles.module.css
│ │ ├── DocPage
│ │ ├── index.js
│ │ └── styles.module.css
│ │ ├── Footer
│ │ └── index.js
│ │ ├── MDXComponents
│ │ └── index.js
│ │ ├── Tag
│ │ └── index.js
│ │ └── TagsListByLetter
│ │ └── index.js
├── static
│ ├── CNAME
│ ├── _redirects
│ ├── img
│ │ ├── Slack_Mark.svg
│ │ ├── airflow.png
│ │ ├── alerts
│ │ │ ├── alerts.png
│ │ │ └── create.png
│ │ ├── bigquery.svg
│ │ ├── changelog
│ │ │ └── v011
│ │ │ │ ├── issues_page.png
│ │ │ │ └── onboarding_form.png
│ │ ├── cloud.png
│ │ ├── cube.svg
│ │ ├── datasource
│ │ │ ├── bigquery_connection.png
│ │ │ ├── connect.png
│ │ │ ├── create-1.png
│ │ │ └── create-2.png
│ │ ├── dbt.png
│ │ ├── doc.svg
│ │ ├── dotnet.svg
│ │ ├── email.png
│ │ ├── example.gif
│ │ ├── favicon.ico
│ │ ├── favicon.png
│ │ ├── finish.svg
│ │ ├── foreign.svg
│ │ ├── integrations
│ │ │ ├── create.png
│ │ │ ├── overview.png
│ │ │ └── webhook_alert.png
│ │ ├── layers.png
│ │ ├── looker.png
│ │ ├── metabase.svg
│ │ ├── mode.svg
│ │ ├── monitors
│ │ │ ├── monitors.png
│ │ │ ├── monitors_index.png
│ │ │ └── table_health.png
│ │ ├── moon.svg
│ │ ├── mysql.png
│ │ ├── pagerduty.png
│ │ ├── postgresql.svg
│ │ ├── powerbi.png
│ │ ├── redshift.svg
│ │ ├── ruby.svg
│ │ ├── server.svg
│ │ ├── slack_alert.svg
│ │ ├── snowflake.svg
│ │ ├── sun.svg
│ │ ├── tableau.png
│ │ ├── tool.svg
│ │ ├── undraw_docusaurus_mountain.svg
│ │ ├── undraw_docusaurus_react.svg
│ │ ├── undraw_docusaurus_tree.svg
│ │ └── webhooks.svg
│ └── scripts
│ │ ├── feedback.js
│ │ └── fullstory.js
├── tailwind.config.js
├── versions.json
└── yarn.lock
├── pyproject.toml
├── requirements.api.txt
├── requirements.pkg.txt
├── requirements.test.txt
├── requirements.txt
├── setup.py
├── src
├── ingestion
│ ├── __init__.py
│ ├── collector.py
│ ├── destinations
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── kafka.py
│ │ ├── monosi.py
│ │ └── s3.py
│ ├── pipeline.py
│ ├── sources
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── bigquery.py
│ │ ├── kafka.py
│ │ ├── monosi.py
│ │ ├── postgresql.py
│ │ ├── redshift.py
│ │ └── snowflake.py
│ ├── task.py
│ └── transformers
│ │ ├── __init__.py
│ │ ├── base.py
│ │ └── monosi
│ │ ├── __init__.py
│ │ ├── anomalies.py
│ │ ├── issues.py
│ │ ├── metrics.py
│ │ ├── monitors.py
│ │ └── zscores.py
├── scheduler
│ ├── __init__.py
│ ├── api.py
│ ├── base.py
│ ├── constants.py
│ ├── db.py
│ ├── handlers
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── executions.py
│ │ └── jobs.py
│ ├── job.py
│ ├── manager.py
│ └── models
│ │ ├── __init__.py
│ │ └── execution.py
├── server
│ ├── __init__.py
│ ├── config.py
│ ├── handlers
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── datasources.py
│ │ ├── integrations.py
│ │ ├── issues.py
│ │ ├── metrics.py
│ │ ├── monitors.py
│ │ └── users.py
│ ├── integrations
│ │ ├── __init__.py
│ │ ├── slack.py
│ │ └── webhook.py
│ ├── jobs
│ │ ├── analysis.py
│ │ ├── base.py
│ │ ├── schema.py
│ │ └── table_health.py
│ ├── middleware
│ │ ├── __init__.py
│ │ ├── api.py
│ │ ├── db.py
│ │ ├── scheduler.py
│ │ └── ui.py
│ ├── models.py
│ ├── pipeline.py
│ └── wsgi.py
├── telemetry
│ ├── __init__.py
│ └── events.py
└── ui
│ ├── package.json
│ ├── public
│ ├── favicon.ico
│ ├── index.html
│ ├── manifest.json
│ └── robots.txt
│ ├── src
│ ├── App.css
│ ├── App.tsx
│ ├── components
│ │ ├── Flyout
│ │ │ ├── flyout.css
│ │ │ └── index.tsx
│ │ ├── Navigation
│ │ │ └── index.tsx
│ │ ├── Page
│ │ │ ├── bootstrap_page.css
│ │ │ └── index.tsx
│ │ └── forms
│ │ │ ├── DatasourceForm
│ │ │ └── index.tsx
│ │ │ ├── IntegrationForm
│ │ │ └── index.tsx
│ │ │ └── ProfileForm
│ │ │ └── index.tsx
│ ├── images
│ │ ├── BigQueryLogo.tsx
│ │ ├── PagerDutyLogo.tsx
│ │ ├── WebhookLogo.tsx
│ │ └── index.ts
│ ├── index.tsx
│ ├── pages
│ │ ├── app
│ │ │ ├── dashboard
│ │ │ │ └── Index
│ │ │ │ │ ├── dashboard.css
│ │ │ │ │ └── index.tsx
│ │ │ ├── executions
│ │ │ │ └── Index
│ │ │ │ │ ├── components
│ │ │ │ │ ├── ExecutionsTable.tsx
│ │ │ │ │ └── JobsTable.tsx
│ │ │ │ │ └── index.tsx
│ │ │ ├── issues
│ │ │ │ └── Index
│ │ │ │ │ ├── components
│ │ │ │ │ └── IssuesTable.tsx
│ │ │ │ │ └── index.tsx
│ │ │ ├── metrics
│ │ │ │ └── Detail
│ │ │ │ │ └── index.tsx
│ │ │ ├── monitors
│ │ │ │ ├── Detail
│ │ │ │ │ └── index.tsx
│ │ │ │ └── Index
│ │ │ │ │ ├── components
│ │ │ │ │ ├── MonitorForm
│ │ │ │ │ │ └── index.tsx
│ │ │ │ │ └── MonitorsTable
│ │ │ │ │ │ ├── index.tsx
│ │ │ │ │ │ └── table.css
│ │ │ │ │ └── index.tsx
│ │ │ └── onboarding
│ │ │ │ ├── GettingStarted
│ │ │ │ └── index.tsx
│ │ │ │ └── Walkthrough
│ │ │ │ └── index.tsx
│ │ └── settings
│ │ │ ├── Integrations
│ │ │ ├── components
│ │ │ │ └── IntegrationsTable.tsx
│ │ │ └── index.tsx
│ │ │ ├── Profile
│ │ │ └── index.tsx
│ │ │ └── Sources
│ │ │ ├── components
│ │ │ └── SourcesTable.tsx
│ │ │ └── index.tsx
│ ├── react-app-env.d.ts
│ ├── reportWebVitals.ts
│ ├── services
│ │ ├── common
│ │ │ ├── base.tsx
│ │ │ ├── constants.tsx
│ │ │ └── http.tsx
│ │ ├── datasources.tsx
│ │ ├── executions.tsx
│ │ ├── integrations.tsx
│ │ ├── issues.tsx
│ │ ├── jobs.tsx
│ │ ├── monitors.tsx
│ │ └── users.tsx
│ └── utils
│ │ └── timestampFormatting.ts
│ ├── tsconfig.json
│ └── yarn.lock
└── tests
├── __init__.py
├── context.py
├── ingestion
├── __init__.py
├── sources
│ ├── __init__.py
│ └── test_base.py
└── transformers
│ ├── monosi
│ ├── test_anomalies.py
│ ├── test_metrics.py
│ ├── test_monitors.py
│ └── test_zscores.py
│ └── test_base.py
├── scheduler
├── __init__.py
└── test_base.py
├── server
├── __init__.py
├── handlers
│ ├── __init__.py
│ ├── test_base.py
│ ├── test_datasources.py
│ ├── test_integrations.py
│ ├── test_metrics.py
│ └── test_monitors.py
├── integrations
│ └── test_slack.py
└── middleware
│ ├── __init__.py
│ ├── test_api.py
│ ├── test_config.py
│ ├── test_db.py
│ ├── test_scheduler.py
│ └── test_ui.py
└── test_basic.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | .dockerignore
2 | .git
3 | .idea
4 | **/build
5 | **/node_modules
6 | Dockerfile.*
7 | docker-compose*.yaml
8 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug Report
3 | about: 🐛 Found a bug? Let us know!
4 | ---
5 |
6 | ## Description
7 |
8 | _Please provide a high-level description of what you were trying to accomplish and what went wrong._
9 |
10 | ## Expected behavior
11 |
12 | _Please provide clear and concise description of what you expected to happen._
13 |
14 | ## Steps to reproduce
15 |
16 | 1. Go to '...'
17 | 2. Click on '....'
18 | 3. See error
19 |
20 | ## Additional context
21 |
22 | _Please provide any additional contex of screenshots of the._
23 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature Request
3 | about: ✨ Suggest new functionality to the project.
4 | ---
5 |
6 | ## Problem
7 | _A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]_
8 |
9 | ## Solution
10 | _A clear and concise description of what you want to happen._
11 |
12 | ## Requirements
13 | _Any requirements that will be necessary for the feature to work._
14 |
15 | ## Additional Context
16 | _Add any other context, screenshots, or related issues about the feature request here._
17 |
18 | ## Questions, or need help getting started?
19 | Feel free to ask below, or [ping us on Slack](https://monosi.dev/slack)
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Pull request type
2 |
3 |
4 |
5 | Please check the type of change your PR introduces:
6 |
7 | - [ ] Bugfix
8 | - [ ] Feature
9 | - [ ] Other (please describe):
10 |
11 | ## Issue
12 |
13 | _Link to the issue (if available)_
14 |
15 | _Please describe the current behavior._
16 |
17 | ## Description of change & new behavior
18 |
19 | _Please describe the changes you made and their new behavior._
20 |
21 | ## Screenshots
22 |
23 | _For frontend updates, please include a screenshot._
24 |
25 | ## Technical Spec/Implementation Notes
26 |
27 | Please check if your PR fulfills the following requirements:
28 |
29 | - [ ] If it's a backend change, tests for the changes have been added and run successfully.
30 | - [ ] If it's a frontend change, Prettier has been run & all tests pass
--------------------------------------------------------------------------------
/.github/workflows/deploy.yml:
--------------------------------------------------------------------------------
1 | name: Deploy to GitHub Pages
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | # Review gh actions docs if you want to further define triggers, paths, etc
8 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
9 |
10 | jobs:
11 | deploy:
12 | name: Deploy to GitHub Pages
13 | runs-on: ubuntu-latest
14 | defaults:
15 | run:
16 | working-directory: ./documentation
17 | steps:
18 | - uses: actions/checkout@v2
19 | - uses: actions/setup-node@v3
20 | with:
21 | node-version: 16.x
22 | cache-dependency-path: ./documentation/yarn.lock
23 | cache: yarn
24 |
25 | - name: Install dependencies
26 | run: yarn install --frozen-lockfile
27 | - name: Build website
28 | run: yarn build
29 |
30 | # Popular action to deploy to GitHub Pages:
31 | # Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus
32 | - name: Deploy to GitHub Pages
33 | uses: peaceiris/actions-gh-pages@v3
34 | with:
35 | github_token: ${{ secrets.GITHUB_TOKEN }}
36 | # Build output to publish to the `gh-pages` branch:
37 | publish_dir: ./documentation/build
38 | # The following lines assign commit authorship to the official
39 | # GH-Actions bot for deploys to `gh-pages` branch:
40 | # https://github.com/actions/checkout/issues/13#issuecomment-724415212
41 | # The GH actions bot is used by default if you didn't specify the two fields.
42 | # You can swap them out with your own user credentials.
43 | user_name: github-actions[bot]
44 | user_email: 41898282+github-actions[bot]@users.noreply.github.com
45 |
--------------------------------------------------------------------------------
/.github/workflows/test-deploy.yml:
--------------------------------------------------------------------------------
1 | name: Test deployment
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - master
7 | # Review gh actions docs if you want to further define triggers, paths, etc
8 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
9 |
10 | jobs:
11 | test-deploy:
12 | name: Test deployment
13 | runs-on: ubuntu-latest
14 | defaults:
15 | run:
16 | working-directory: ./documentation
17 | steps:
18 | - uses: actions/checkout@v2
19 | - uses: actions/setup-node@v3
20 | with:
21 | node-version: 16.x
22 | cache-dependency-path: ./documentation/yarn.lock
23 | cache: yarn
24 |
25 | - name: Install dependencies
26 | run: yarn install --frozen-lockfile
27 | - name: Test build website
28 | run: yarn build
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask stuff:
57 | instance/
58 | .webassets-cache
59 |
60 | # Scrapy stuff:
61 | .scrapy
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | # IPython Notebook
70 | .ipynb_checkpoints
71 |
72 | # pyenv
73 | .python-version
74 |
75 | # celery beat schedule file
76 | celerybeat-schedule
77 |
78 | # dotenv
79 | .env
80 |
81 | # virtualenv
82 | .venv/
83 | venv/
84 | ENV/
85 |
86 | # Spyder project settings
87 | .spyderproject
88 |
89 | # Rope project settings
90 | .ropeproject
91 | tags
92 | .DS_Store
93 |
94 | # Node
95 | node_modules/
96 |
97 | # Terraform - Local .terraform directories
98 | **/.terraform/*
99 |
100 | # Terraform - .tfstate files
101 | *.tfstate
102 | *.tfstate.*
103 |
104 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include requirements.txt
3 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | compose-build:
2 | cd ./deployment/docker; docker compose build
3 |
4 | compose-up:
5 | cd ./deployment/docker; docker compose up -d
6 |
7 | compose-pull:
8 | cd ./deployment/docker; docker compose pull
9 |
10 | compose:
11 | make compose-pull
12 | make compose-up
13 |
14 | compose-down:
15 | cd ./deployment/docker; docker compose down
16 |
17 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Open Source Data Observability Platform
8 |
9 |
10 | Newsletter
11 | |
12 | Docs
13 | |
14 | Website
15 | |
16 | Contact us
17 |
18 |
19 |
20 | Join the Data Reliability Engineering Community
21 |
22 |
23 |
24 | Monosi offers data quality monitoring as a service to root cause data issues with end-to-end observability.
25 |
26 | 🏆 Ensure data quality
27 |
28 | 🚨 Monitor your data & alert on anomalies
29 |
30 | 🎛 Analyze root cause of data issues
31 |
32 | This project is an OSS alternative to proprietary data quality and observability systems. Get started monitoring your data in [less than 10 minutes](https://docs.monosi.dev/docs/user-guide/getting-started).
33 |
34 |
35 | ## Installation
36 |
37 | _Note: Monosi works through Docker, ensure Docker Compose v2 is installed._
38 |
39 | Run the following commands:
40 |
41 | ```
42 | git clone https://github.com/monosidev/monosi.git
43 | cd monosi
44 | make compose
45 | ```
46 |
47 | Navigate to http://localhost:3000 to access the web application once it has started.
48 |
49 | For more instructions on getting started, check out our [documentation](https://docs.monosi.dev/docs/user-guide/getting-started).
50 |
51 | ## Community
52 |
53 | * [Join us on Slack](https://monosi.dev/slack)
54 | * [Newsletter](https://www.monosi.dev/community.html)
55 | * [Contact the development team](mailto:support@monosi.dev)
56 |
57 | ## Overview
58 |
59 | Start the UI through Docker and quickly connect your data sources and alert integrations
60 |
61 |
62 |
63 | Get alerts in slack when Monosi detects anomalies in defined monitors.
64 |
65 |
66 |
67 | ### Own your stack
68 | Avoid integration pitfalls with fragmented, legacy tools by using open source & prevent vendor lock-in by owning your (meta)data.
69 |
70 | ## Contributing
71 |
72 | To start contributing, check out our [Contributing Guide](CONTRIBUTING.md) and [join the Slack](https://monosi.dev/slack).
73 |
74 |
75 |
76 |
--------------------------------------------------------------------------------
/deployment/docker/Dockerfile.api:
--------------------------------------------------------------------------------
1 | FROM python:3.9
2 | WORKDIR /app
3 |
4 | COPY ./requirements.txt ./
5 | COPY ./requirements.api.txt ./
6 | COPY ./src .
7 | RUN rm -rf ./src/ui
8 | RUN pip install -r ./requirements.txt
9 | RUN pip install -r ./requirements.api.txt
10 |
11 | EXPOSE 5000
12 | CMD ["gunicorn", "--bind", "0.0.0.0:5000", "server.wsgi:app"]
13 |
--------------------------------------------------------------------------------
/deployment/docker/Dockerfile.client:
--------------------------------------------------------------------------------
1 | FROM node:16-alpine as build-client
2 |
3 | ENV REACT_APP_API_URL=/v1/api/
4 | ENV GENERATE_SOURCEMAP=false
5 |
6 | WORKDIR /app
7 | COPY ./src/ui .
8 | RUN yarn
9 | RUN yarn build
10 |
11 | FROM nginx:stable-alpine
12 | COPY --from=build-client /app/build /usr/share/nginx/html
13 | COPY deployment/nginx/nginx.default.conf /etc/nginx/conf.d/default.conf
14 |
--------------------------------------------------------------------------------
/deployment/docker/Dockerfile.simple:
--------------------------------------------------------------------------------
1 | FROM node:16-alpine as build-step
2 |
3 | WORKDIR /app
4 | ENV PATH /app/node_modules/.bin:$PATH
5 | COPY ./src/ui/tsconfig.json ./src/ui/yarn.lock ./src/ui/package.json ./
6 | COPY ./src/ui/src ./src
7 | COPY ./src/ui/public ./public
8 | RUN yarn
9 | RUN yarn build
10 |
11 | # Build step #2: build the API with the client as static files
12 | FROM --platform=linux/x86-64 python:3.9
13 | WORKDIR /app
14 |
15 | COPY ./requirements.api.txt ./requirements.txt
16 | COPY ./src ./
17 | RUN pip install -r ./requirements.txt
18 | ENV FLASK_ENV production
19 | ENV SERVE_UI 1
20 |
21 | RUN rm -rf ./ui
22 | RUN mkdir ./ui
23 | COPY --from=build-step /app/build ./ui/build
24 |
25 | EXPOSE 3000
26 | WORKDIR /app
27 | CMD ["gunicorn", "-b", ":3000", "server.wsgi:app"]
28 |
--------------------------------------------------------------------------------
/deployment/docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | volumes:
2 | data:
3 | name: data
4 | services:
5 | db:
6 | image: postgres:14.1-alpine
7 | restart: always
8 | environment:
9 | - POSTGRES_USER=postgres
10 | - POSTGRES_PASSWORD=postgres
11 | - POSTGRES_DB=postgres
12 | ports:
13 | - '5432:5432'
14 | volumes:
15 | - data:/var/lib/postgresql/data/
16 | healthcheck:
17 | test: ["CMD-SHELL", "pg_isready -U postgres"]
18 | interval: 5s
19 | timeout: 5s
20 | retries: 5
21 | monosi-server:
22 | build:
23 | context: ../../
24 | dockerfile: deployment/docker/Dockerfile.api
25 | image: monosi/monosi-server:latest
26 | depends_on:
27 | db:
28 | condition: service_healthy
29 | environment:
30 | DB_USER: postgres
31 | DB_PASSWORD: postgres
32 | DB_HOST: db
33 | DB_PORT: 5432
34 | DB_DATABASE: postgres
35 | DB_SCHEMA: public
36 | links:
37 | - db
38 | monosi-client:
39 | build:
40 | context: ../../
41 | dockerfile: deployment/docker/Dockerfile.client
42 | image: monosi/monosi-client:latest
43 | depends_on:
44 | - monosi-server
45 | environment:
46 | - REACT_APP_API_URL=/v1/api/
47 | ports:
48 | - "3000:80"
49 |
--------------------------------------------------------------------------------
/deployment/nginx/nginx.default.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 | root /usr/share/nginx/html;
4 | index index.html;
5 |
6 | location / {
7 | try_files $uri $uri/ /index.html =404;
8 | add_header Cache-Control "no-cache";
9 | }
10 |
11 | location /v1/api {
12 | proxy_pass http://monosi-server:5000;
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/deployment/terraform/.terraform.lock.hcl:
--------------------------------------------------------------------------------
1 | # This file is maintained automatically by "terraform init".
2 | # Manual edits may be lost in future updates.
3 |
4 | provider "registry.terraform.io/hashicorp/aws" {
5 | version = "4.6.0"
6 | constraints = ">= 3.25.0"
7 | hashes = [
8 | "h1:OoqtFHduPuYD6R4cAvxJJ6cLQi0iRrmxB/C5XXXujS8=",
9 | "h1:mO4WiTtBisbUQqiR4V6c4h5moZdki1vyySOMVsRFrWY=",
10 | "zh:43d00e886384dc48ca3e2949327af0dba3eb3052104367456b47882a8456ba91",
11 | "zh:7d586c26021fd3ea9d377f8024a5bb3f8d877a84792d39f2e2e96a3ec0848480",
12 | "zh:84a01d4060916daf2973eaaabab0cadbb97fa850b74458b0bce98565268e37c1",
13 | "zh:8a65dbf2ec7c433bf1c751a4f0ec332fd1bddd14e8aab64de4ee01890223f3a0",
14 | "zh:92582a5d81f2cfecb2832895091f58eec5a978cdf4982ef8d7b9d88e74b265fe",
15 | "zh:98c61fc2bf6a3af8b6ac8233860fefe8620e382a5fd25040f58297485ea0422a",
16 | "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
17 | "zh:a32ae8bb7397b4fd4eb4f5b21a119460bc74ec4be2baf8e0cc543c6945a74415",
18 | "zh:ae38e3d167cf69c5b8734eb581044c8621d70ed0df8b0433d5dadb6b81741483",
19 | "zh:d4686842c9cb4a73167c73b4aa6145729237c14cb520c3eb89b22c0317923525",
20 | "zh:dad0005f2f451512098fd1bdb838934a05267f6f170c1b652e7f12f02b595476",
21 | "zh:f64b0387a75838776f6edbc00ad01cda323c200bd6eaafa15acc92b9cdbd9e3a",
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/deployment/terraform/outputs.tf:
--------------------------------------------------------------------------------
1 | output "instance" {
2 | value = aws_instance.monosi.*.private_ip
3 | description = "PrivateIP address details"
4 | }
--------------------------------------------------------------------------------
/deployment/terraform/terraform.tfvars:
--------------------------------------------------------------------------------
1 | # Use an SSH Key that has been created and you have access to.
2 | ssh_key_name=""
3 |
4 | # Use a VPC ID that has been created and you would like to deploy into.
5 | vpc_id=""
6 |
7 | # Use a subnet ID in the above VPC that has been created and you would like to deploy into.
8 | subnet_id=""
9 |
--------------------------------------------------------------------------------
/deployment/terraform/user_data.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | sudo yum update -y
4 | sudo yum install -y docker
5 | sudo systemctl start docker
6 | sudo usermod -a -G docker $USER
7 |
8 | mkdir -p ~/.docker/cli-plugins/
9 | curl -SL https://github.com/docker/compose/releases/download/v2.2.3/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
10 | chmod +x ~/.docker/cli-plugins/docker-compose
11 | sudo mkdir -p /usr/local/lib/docker/cli-plugins
12 | sudo mv ~/.docker/cli-plugins/docker-compose /usr/local/lib/docker/cli-plugins/docker-compose
13 |
14 | sudo yum install -y git
15 | git clone https://github.com/monosidev/monosi
16 |
17 | cd monosi
18 | sudo make compose
19 |
20 |
--------------------------------------------------------------------------------
/deployment/terraform/variables.tf:
--------------------------------------------------------------------------------
1 | variable "prefix" {
2 | description = "A name which will be pre-pended to the resources created"
3 | type = string
4 | default = "mSi-app"
5 | }
6 |
7 | variable "vpc_id" {
8 | description = "The VPC to deploy the collector within"
9 | type = string
10 | }
11 |
12 | variable "subnet_id" {
13 | description = "The subnet to deploy the collector within"
14 | type = string
15 | }
16 |
17 | variable "instance_type" {
18 | description = "The instance type to use"
19 | type = string
20 | default = "t3.medium"
21 | }
22 |
23 | variable "ssh_key_name" {
24 | description = "The name of the SSH key-pair to attach to all EC2 nodes deployed"
25 | type = string
26 | }
27 |
28 | variable "ssh_ip_allowlist" {
29 | description = "The list of CIDR ranges to allow SSH traffic from"
30 | type = list(any)
31 | default = ["0.0.0.0/0"]
32 | }
33 |
--------------------------------------------------------------------------------
/deployment/terraform/versions.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 0.15"
3 |
4 | required_providers {
5 | aws = {
6 | source = "hashicorp/aws"
7 | version = ">= 3.25.0"
8 | }
9 | }
10 | }
--------------------------------------------------------------------------------
/documentation/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | env: {
3 | browser: true,
4 | es2021: true,
5 | node: true,
6 | },
7 | extends: ["eslint:recommended", "plugin:react/recommended"],
8 | parserOptions: {
9 | ecmaFeatures: {
10 | jsx: true,
11 | },
12 | ecmaVersion: 12,
13 | sourceType: "module",
14 | },
15 | plugins: ["react"],
16 | rules: {
17 | "react/prop-types": [0, {}],
18 | "react/no-unescaped-entities": [0, {}],
19 | "no-unused-vars": [1, {}],
20 | // "semi": ["error", "always"],
21 | // "quotes": ["error", "double"]
22 | },
23 | };
24 |
--------------------------------------------------------------------------------
/documentation/.github/CODEOWNERS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/.github/CODEOWNERS
--------------------------------------------------------------------------------
/documentation/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## What does this PR do?
2 |
3 | ## Notes to reviewers
4 |
5 |
6 |
--------------------------------------------------------------------------------
/documentation/.github/workflows/autoformat.yml:
--------------------------------------------------------------------------------
1 | name: Continuous Integration
2 |
3 | # This action works with pull requests and pushes
4 | on:
5 | pull_request:
6 | push:
7 | branches:
8 | - master
9 |
10 | jobs:
11 | prettier:
12 | runs-on: ubuntu-latest
13 |
14 | steps:
15 | - name: Checkout on push
16 | # github object: https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
17 | # event names: https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows
18 | if: "github.event_name == 'push'"
19 | uses: actions/checkout@v2
20 |
21 | - name: Checkout on PR
22 | if: "github.event_name == 'pull_request'"
23 | uses: actions/checkout@v2
24 | with:
25 | # Make sure the actual branch is checked out when running on pull requests
26 | ref: ${{ github.head_ref }}
27 |
28 | - name: Prettify code
29 | uses: creyD/prettier_action@v3.3
30 | with:
31 | # This part is also where you can pass other options, for example:
32 | prettier_options: --write **/*.{js,md}
33 | env:
34 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
35 |
--------------------------------------------------------------------------------
/documentation/.gitignore:
--------------------------------------------------------------------------------
1 | # Dependencies
2 | /node_modules
3 |
4 | # Production
5 | /build
6 |
7 | # Generated files
8 | .docusaurus
9 | .cache-loader
10 |
11 | # Misc
12 | .DS_Store
13 | .idea
14 | .env.local
15 | .env.development.local
16 | .env.test.local
17 | .env.production.local
18 |
19 | npm-debug.log*
20 | yarn-debug.log*
21 | yarn-error.log*
22 |
23 | # Local Netlify folder
24 | .netlify
25 |
26 | package-lock.json
--------------------------------------------------------------------------------
/documentation/.gitmodules:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/.gitmodules
--------------------------------------------------------------------------------
/documentation/.nvmrc:
--------------------------------------------------------------------------------
1 | 16
2 |
--------------------------------------------------------------------------------
/documentation/.prettierignore:
--------------------------------------------------------------------------------
1 | # https://prettier.io/docs/en/ignore.html#ignoring-files-prettierignore
2 | # Ignore artifacts:
3 | blog
4 | .docusaurus
5 | .github
6 |
--------------------------------------------------------------------------------
/documentation/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "arrowParens": "always",
3 | "bracketSpacing": false,
4 | "embeddedLanguageFormatting": "auto",
5 | "htmlWhitespaceSensitivity": "css",
6 | "insertPragma": false,
7 | "jsxBracketSameLine": false,
8 | "jsxSingleQuote": false,
9 | "printWidth": 80,
10 | "proseWrap": "preserve",
11 | "quoteProps": "as-needed",
12 | "requirePragma": false,
13 | "semi": true,
14 | "singleQuote": false,
15 | "tabWidth": 2,
16 | "trailingComma": "es5",
17 | "useTabs": false,
18 | "overrides": [
19 | {
20 | "files": ["docs/typescript/**/*.md"],
21 | "options": {
22 | "bracketSpacing": true,
23 | "singleQuote": true
24 | }
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/documentation/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Vocable Inc. dba Monosi
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
24 | The original theme and structure is attributed to temporal.io as stated below.
25 |
26 | MIT License
27 |
28 | Copyright (c) 2021 temporal.io
29 |
30 | Permission is hereby granted, free of charge, to any person obtaining a copy
31 | of this software and associated documentation files (the "Software"), to deal
32 | in the Software without restriction, including without limitation the rights
33 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
34 | copies of the Software, and to permit persons to whom the Software is
35 | furnished to do so, subject to the following conditions:
36 |
37 | The above copyright notice and this permission notice shall be included in all
38 | copies or substantial portions of the Software.
39 |
40 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
41 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
42 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
43 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
44 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
45 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
46 | SOFTWARE.
47 |
--------------------------------------------------------------------------------
/documentation/README.md:
--------------------------------------------------------------------------------
1 | # Monosi documentation
2 |
3 | The documentation in this repository is published to https://docs.monosi.dev.
4 |
5 | The source of this documentation is available to the public for individual and commercial use.
6 |
7 | Please read the [CONTRIBUTING](/CONTRIBUTING.md) guide before you submit any pull requests.
8 |
9 | Maintainers and contributors to this project are expected to conduct themselves in a respectful way.
10 | See the [CNCF Community Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md) as a reference.
11 |
--------------------------------------------------------------------------------
/documentation/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [require.resolve("@docusaurus/core/lib/babel/preset")],
3 | };
4 |
--------------------------------------------------------------------------------
/documentation/changelog/2022-04-01-v010-release.md:
--------------------------------------------------------------------------------
1 | ---
2 | tags:
3 | - releases
4 | posted_on_: 2022-04-01T00:00:00Z
5 | slug: v010-release
6 | title: "v0.1.0 Release"
7 | author: Kevin Unkrich
8 | author_title: Co-founder
9 | author_image_url: https://avatars.githubusercontent.com/u/15347345?v=4
10 | release_version: V0.1.0
11 | ---
12 |
13 | Monosi v0.1.0 Release
14 |
15 |
16 |
17 | ---
18 |
19 | Monosi's v0.1.0 release brings a new UI, ease of use & deployment, and much more.
20 |
21 | ### Web Application & UI
22 |
23 | In order to use the Monosi application, a web frontend and server are provided to add data sources, receive alerts, and view data quality analysis of the monitors that are running.
24 |
25 |
26 |
27 | ## Automatic Scheduler
28 |
29 | Built-in scheduling is provided out of the box with Monosi so that you can set it and forget it - until you receive an alert - or choose to view regularly up-to-date data at any time.
30 |
31 | ## Profiler
32 |
33 | Built-in profiling allows for the automatic creation of data monitors and quality checks when a data source is added.
34 |
35 | ### Deployment
36 |
37 | Deploying has been simplified with Docker and Docker Compose. All the necessary dependencies for Monosi to run are managed through Docker Compose, making it easy to start with just 3 commands:
38 |
39 | ```
40 | git clone https://github.com/monosidev/monosi.git
41 | cd monosi
42 | make compose
43 | ```
44 |
45 | ## Integration Support
46 |
47 | Support for Snowflake, PostgreSQL, and Redshift has been added.
48 |
49 | You can also receive alerts via webhooks and Slack.
50 |
51 | ### Documentation
52 |
53 | Documentation has been added to the project and is available at https://docs.monosi.dev
54 |
55 |
56 | ### Testing & Bug fixes
57 |
58 | More tests have been added to ensure the reliability of the code. Furthermore, bug fixes and code changes have been made to make Monosi increasingly more stable.
59 |
--------------------------------------------------------------------------------
/documentation/docs/components/CenteredImage.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import {v4 as uuidv4} from "uuid";
3 |
4 | export default function CenteredImage({imagePath, imageSize, title, legend}) {
5 | const legendList = [];
6 | if (legend != undefined) {
7 | for (const item of legend) {
8 | legendList.push({
9 | id: uuidv4(),
10 | symbol: item[0],
11 | description: item[1],
12 | });
13 | }
14 | }
15 | let imageClass = "";
16 | switch (imageSize) {
17 | case "25":
18 | imageClass = "docs-centered-image-size-25";
19 | break;
20 | case "50":
21 | imageClass = "docs-centered-image-size-50";
22 | break;
23 | case "75":
24 | imageClass = "docs-centered-image-size-75";
25 | break;
26 | default:
27 | imageClass = "docs-centered-image-size-100";
28 | }
29 | return (
30 |
31 |
34 |
35 |
36 |
37 | {legendList.length > 0 && (
38 |
39 |
40 | {legendList.map(({id, symbol, description}) => (
41 |
42 | {symbol} ={" "}
43 | {description}
44 |
45 | ))}
46 |
47 |
48 | )}
49 |
50 | );
51 | }
52 |
--------------------------------------------------------------------------------
/documentation/docs/components/DetermineHeader.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import clsx from "clsx";
3 | import Link from "@docusaurus/Link";
4 |
5 | export default function DetermineHeader({hLevel, hText}) {
6 | switch (hLevel) {
7 | case "##":
8 | return {hText} ;
9 | break;
10 | case "###":
11 | return {hText} ;
12 | break;
13 | case "####":
14 | return {hText} ;
15 | break;
16 | case "#####":
17 | return {hText} ;
18 | break;
19 | default:
20 | return null;
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/documentation/docs/components/RowOfImages.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function RowOfImages({imagePath1, imagePath2}) {
4 | return (
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 | );
16 | }
17 |
--------------------------------------------------------------------------------
/documentation/docs/contributing/local-development.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: local-development
3 | title: Local Development Setup
4 | sidebar_label: Local Development
5 | ---
6 |
7 | The directions below outline the process for setting up Monosi for local development. To get started:
8 |
9 | 1. Create a fork of the Monosi repository to your personal GitHub account
10 | 2. Clone the fork URL that you have created to your local machine (`git clone `)
11 |
12 | Monosi can be installed either through docker or through a local dependency setup.
13 | ## Running with Docker
14 |
15 | 1. Ensure that you have installed docker and it is running on your machine
16 | 2. In a terminal, navigate to the cloned repository
17 | 3. Run `make compose-build` from the base of the directory to build a local instance of Monosi
18 | 4. Run `make compose-up` from the base of the directory to run the local build
19 | 5. Navigate to `http://localhost:3000` and you will see the Monosi UI
20 |
21 | For any changes you make to the code locally, you can test them by re-running steps 3 & 4.
22 | ## Running without Docker
23 |
24 | 1. Ensure that you have Python3, Node, and Yarn on your machine
25 | 2. In a terminal, navigate to the cloned repository
26 |
27 | For the server:
28 | 1. Create a Python virtualenv by running `virtualenv .venv`
29 |
30 | 2. Activte the virtualenv by running `source .venv/bin/activate`
31 | 3. Install the Monosi dependencies by running `python3 setup.py install`
32 | 4. Navigate to the server directory `cd src/server`
33 | 5. Run `flask run`
34 | 6. You should see the server startup and become accessible at `http://localhost:5000`
35 |
36 | For the client:
37 | 1. Navigate to the ui directory `cd src/ui`
38 | 2. Run `yarn && yarn start`
39 | 3. The React application should start on `localhost:3000`
40 |
41 |
42 | If there's any problems with the setup, please send us a message in the [Slack](https://monosi.dev/slack) or over [email](mailto:support@monosi.dev).
43 |
--------------------------------------------------------------------------------
/documentation/docs/guides/quick-install.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: quick-install
3 | title: Quick Install
4 | sidebar_label: Quick Install
5 | ---
6 |
7 |
8 | ## Overview
9 |
10 | There are three ways to quickly install and run the Monosi application:
11 |
12 | - [Docker](#docker): Using `docker-compose` makes it easy to develop Workflows locally.
13 | - (Coming Soon) [Helm Charts](#helm-charts): Deploying the Server to [Kubernetes](https://kubernetes.io/) is an easy way to test the system and develop Workflows.
14 |
15 | _Note: These methods are not ready for deployment in a full production environment._
16 |
17 | ## Docker
18 |
19 | ### Prerequisites
20 |
21 | 1. [Install Docker](https://docs.docker.com/engine/install)
22 | 2. [Install docker-compose](https://docs.docker.com/compose/install)
23 |
24 | ### Run Monosi
25 |
26 | The following steps will run a local instance of Monosi using the default configuration:
27 |
28 | 1. Clone the [monosidev/monosi](https://github.com/monosidev/monosi) repository.
29 | 2. Change directory into the root of the project.
30 | 3. Run the `make compose` command.
31 |
32 | ```bash
33 | git clone https://github.com/monosidev/monosi.git
34 | cd monosi
35 | make compose
36 | ```
37 |
38 | After the Monosi application has started you can view the Monosi Web interface in your browser: [localhost:3000](http://localhost:3000/)
39 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/bigquery.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: bigquery
3 | title: BigQuery Integration
4 | sidebar_label: BigQuery
5 | ---
6 |
7 | Monosi supports BigQuery data warehouse connections. Currently, only authentication through a `service_account.json` file is supported. If you do not have a service account, please follow the instructions to create one [here](https://cloud.google.com/docs/authentication/getting-started).
8 |
9 | If you require other forms of authentication, please [open an issue](https://github.com/monosidev/monosi/issues/new?assignees=&labels=&template=feature_request.md).
10 |
11 |
12 |
13 | ## Configuration Details
14 |
15 | | Key | Description | Required |
16 | | --------------- | -------------------------------------------------------- | -------- |
17 | | project | The name of the BigQuery project you want to connect | Yes |
18 | | dataset | The name of the dataset you want to connect | Yes |
19 | | service_account | The `service_account.json` file associated with your IAM | Yes |
20 |
21 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/email.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: email
3 | title: Email Integration
4 | sidebar_label: Email
5 | ---
6 |
7 | More information on how to integrate with email is coming soon. If you’re interested in this option, please see or open a [GitHub Issue](https://github.com/monosidev/monosi/issues).
8 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/postgresql.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: postgresql
3 | title: PostgreSQL Integration
4 | sidebar_label: PostgreSQL
5 | ---
6 |
7 | ## Configuration Details
8 |
9 | | Key | Description | Required |
10 | |-----------|---------------------------------------------------------|----------|
11 | | user | The username with which to connect monosi to PostgreSQL | Yes |
12 | | password | The password with which to connect monosi to PostgreSQL | Yes |
13 | | host | The URL of the database host | Yes |
14 | | database | The name of the database to connect to in PostgreSQL | Yes |
15 | | schema | The name of the schema to connect to in PostgreSQL | Yes |
16 |
17 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/redshift.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: redshift
3 | title: Redshift Integration
4 | sidebar_label: Redshift
5 | ---
6 |
7 | ## Configuration Details
8 |
9 | The configuration details are the same as the PostgreSQL source
10 |
11 | | Key | Description | Required |
12 | |-----------|---------------------------------------------------------|----------|
13 | | user | The username with which to connect monosi to Redshift | Yes |
14 | | password | The password with which to connect monosi to Redshift | Yes |
15 | | host | The URL of the database host | Yes |
16 | | database | The name of the database to connect to in Redshift | Yes |
17 | | schema | The name of the schema to connect to in Redshift | Yes |
18 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/slack.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: slack
3 | title: Slack Integration
4 | sidebar_label: Slack
5 | ---
6 |
7 | Monosi integrates directly with Slack to be alert you in real-time of anomalies. It does so through the Incoming Webhooks feature of Slack.
8 |
9 | 1. Follow the steps over at Slack for creating an Incoming Webhook. Note that you may need permissions for your workspace in Slack to be able to create a webhook.
10 | 2. Add the webhook as a Slack integration in Monosi
11 |
12 | Navigate to the Integrations page in Monosi, click the “Create Integration” button.
13 |
14 |
15 |
16 | Provide a name for this integration as well as the incoming webhook URL and hit submit.
17 |
18 |
19 |
20 | Once saved, anomalies will be sent to this Slack integration moving forward until deleted.
21 |
--------------------------------------------------------------------------------
/documentation/docs/integrations/webhooks.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: webhooks
3 | title: Webhooks Integration
4 | sidebar_label: Webhooks
5 | ---
6 |
7 | Monosi supports sending alerts to custom endpoints through webhooks.
8 |
9 | 1. Navigate to the Integrations subpage in Settings
10 | 2. Click on the Create Integration button and select the Webhook integration in the drawer
11 | 3. Provide a name for this integration as well as the incoming webhook URL and hit submit.
12 |
13 |
14 |
15 | Once saved, anomalies will be sent to this Webhook integration moving forward until deleted.
16 |
17 | The payload format of the Webhook is detailed below:
18 |
19 | ```
20 | {
21 | alerts: [
22 | {
23 | message: 'Monosi - Anomaly Detected',
24 | type: 'table_health',
25 | info: {
26 | table_name: ""
27 | schema: ""
28 | database: ""
29 | column_name: ""
30 | metric: ""
31 | value: ""
32 | time_window_start: ""
33 | time_window_end: ""
34 | interval_length_sec: ""
35 | id: ""
36 | created_at: ""
37 | }
38 | }
39 | ...
40 | ]
41 | }
42 | ```
43 |
--------------------------------------------------------------------------------
/documentation/docs/msi/bootstrap/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: index
3 | title: bootstrap
4 | tags:
5 | - msi
6 | ---
7 |
--------------------------------------------------------------------------------
/documentation/docs/msi/collectors-yml.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: collectorsyml
3 | title: collectors.yml
4 | sidebar_label: collectors.yml
5 | tags:
6 | - msi
7 | ---
8 |
9 |
10 |
--------------------------------------------------------------------------------
/documentation/docs/msi/configuration.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: configuration
3 | title: Configuring msi
4 | sidebar_label: Configuration
5 | tags:
6 | - msi
7 | ---
8 |
--------------------------------------------------------------------------------
/documentation/docs/msi/destinations.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: destinations
3 | title: Destinations
4 | sidebar_label: Destinations
5 | tags:
6 | - msi
7 | ---
8 |
9 | ## Data Lake
10 |
11 |
12 | ## Warehouse
13 |
14 |
15 | ## Slack
16 |
17 |
18 | ## Webhook
19 |
20 |
21 |
--------------------------------------------------------------------------------
/documentation/docs/msi/environment-variables.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: environment-variables
3 | title: Environment variables for msi
4 | sidebar_label: Environment variables
5 | tags:
6 | - msi
7 | ---
8 |
9 | Setting environment variables for repeated parameters can shorten msi commands.
10 |
11 | ### MONOSI_WORKSPACE_PATH
12 |
13 | Specify a path for the Monosi workspace to use
14 |
--------------------------------------------------------------------------------
/documentation/docs/msi/functions.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: functions
3 | title: Functions
4 | sidebar_label: Functions
5 | tags:
6 | - msi
7 | ---
8 |
9 | ## Anomaly Detection
10 |
11 | ## Schema Changes
12 |
13 | ## Lineage
14 |
15 |
16 |
--------------------------------------------------------------------------------
/documentation/docs/msi/how-to-install-msi.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: how-to-install-msi
3 | title: How to install msi
4 | sidebar_label: Install
5 | description: You can install msi in four ways, described in this topic.
6 | tags:
7 | - msi
8 | ---
9 |
10 | You can install [msi](/docs/msi) in three ways.
11 |
12 | - Install locally by using [Homebrew](https://brew.sh/): `brew install msi`
13 | - Install locally by using [Python's Package Manager (pip)](https://pypi.org/): `pip install msi`
14 | - Build it locally:
15 | 1. Clone the [Monosi repo](https://github.com/monosidev/monosi).
16 | 1. Run `make msi`.
17 | 1. Copy the `msi` executable to any directory that appears in the `PATH` environment variable; for example, `/usr/bin/`.
18 |
--------------------------------------------------------------------------------
/documentation/docs/msi/how-to-use-msi.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: how-to-use-msi
3 | title: How to use msi
4 | sidebar_label: How to use msi
5 | ---
6 |
7 | :::note
8 |
9 | This page is temporary. We plan to move the information to other pages.
10 |
11 | :::
12 |
13 |
--------------------------------------------------------------------------------
/documentation/docs/msi/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: index
3 | title: What is msi?
4 | description: msi is a command-line tool that you can use to pipe, manipulate, and analyze metadata
5 | tags:
6 | - operation-guide
7 | - msi
8 | ---
9 |
10 | The Monosi CLI (msi) is a command-line tool that you can use to pipe, manipulate, and analyze metadata.
11 | It allows users to define the metadata they would like to monitor as code, create a git-based project for data quality checks, and load that data into your chosen data source for any purpose - analysis, lineage, observability, and more.
12 |
13 | - [How to install msi](/docs/msi/how-to-install-msi)
14 | - [Your first msi project](/docs/msi/project)
15 | - [How to use msi](/docs/msi/how-to-use-msi)
16 |
17 | ## msi commands
18 |
19 | - [`msi init`](/docs/msi/init)
20 | - [`msi bootstrap`](/docs/msi/bootstrap)
21 | - [`msi test-connection`](/docs/msi/test-connection)
22 | - [`msi run`](/docs/msi/run)
23 |
24 |
25 | ## Global modifiers
26 |
27 | You can supply the values for many of these modifiers by setting [environment variables](/docs/msi/environment-variables) instead of including the modifiers in a msi command.
28 |
29 | ### `--project-path`
30 |
31 | Specify the path to the project file and the monitors defined as code.
32 |
33 | ### `--workspace-path`
34 |
35 | Specify the path to the `workspaces.yml` file.
36 |
37 | ### `--source-name`
38 |
39 | Specify the name of the source for the workspace used.
40 |
41 | ### `--workspace-name`
42 |
43 | Specify the name of the workspace for the file in the workspace path.
44 |
45 | ### `--help`
46 |
47 | Display help for msi in the CLI.
48 |
49 | ### `--version`
50 |
51 | Display the version of msi in the CLI.
52 |
--------------------------------------------------------------------------------
/documentation/docs/msi/init/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: index
3 | title: init
4 | tags:
5 | - msi
6 | ---
7 |
--------------------------------------------------------------------------------
/documentation/docs/msi/inputs-yml.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: inputsyml
3 | title: inputs.yml
4 | sidebar_label: inputs.yml
5 | tags:
6 | - msi
7 | ---
8 |
9 | https://docs.cribl.io/logstream/2.4/inputsyml
10 |
--------------------------------------------------------------------------------
/documentation/docs/msi/msi-project-yml.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: msiprojectyml
3 | title: msi.yml
4 | sidebar_label: msi.yml
5 | tags:
6 | - msi
7 | ---
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/msi/normalization.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: normalization
3 | title: Normalization
4 | sidebar_label: Normalization
5 | tags:
6 | - msi
7 | ---
8 |
--------------------------------------------------------------------------------
/documentation/docs/msi/outputs-yml.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: outputsyml
3 | title: outputs.yml
4 | sidebar_label: outputs.yml
5 | tags:
6 | - msi
7 | ---
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/msi/routes-yml.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: routesyml
3 | title: routes.yml
4 | sidebar_label: routes.yml
5 | tags:
6 | - msi
7 | ---
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/msi/run/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: index
3 | title: run
4 | tags:
5 | - msi
6 | ---
7 |
--------------------------------------------------------------------------------
/documentation/docs/msi/sources.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: sources
3 | title: Sources
4 | sidebar_label: Sources
5 | tags:
6 | - msi
7 | ---
8 |
9 | # Sources
10 |
11 |
12 | ## Metrics
13 |
14 | | Field | Type | Required |
15 | |---------------------|------|----------|
16 | | id | | |
17 | | table_name | | |
18 | | schema | | |
19 | | database | | |
20 | | column_name | | |
21 | | metric | | |
22 | | value | | |
23 | | time_window_start | | |
24 | | time_window_end | | |
25 | | interval_length_sec | | |
26 |
27 | ## Schema
28 |
29 | ### Table
30 | ```
31 | SELECT
32 | TABLE_CATALOG,
33 | TABLE_SCHEMA,
34 | TABLE_NAME,
35 | TABLE_OWNER,
36 | TABLE_TYPE,
37 | IS_TRANSIENT,
38 | RETENTION_TIME,
39 | AUTO_CLUSTERING_ON,
40 | COMMENT
41 | FROM "ANALYTICS".information_schema.tables
42 | WHERE
43 | table_schema NOT IN ('INFORMATION_SCHEMA')
44 | AND TABLE_TYPE NOT IN ('VIEW', 'EXTERNAL TABLE')
45 | ORDER BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME;
46 | ```
47 |
48 | ### Column
49 | ```
50 | SELECT
51 | '"' || TABLE_CATALOG || '"."' || TABLE_SCHEMA || '"."' || TABLE_NAME || '"' AS FULL_NAME,
52 | COLUMN_NAME,
53 | DATA_TYPE,
54 | COLUMN_DEFAULT,
55 | IS_NULLABLE,
56 | COMMENT,
57 | CHARACTER_MAXIMUM_LENGTH,
58 | NUMERIC_PRECISION,
59 | NUMERIC_SCALE,
60 | DATETIME_PRECISION
61 | FROM "ANALYTICS".information_schema.columns;
62 | ```
63 |
64 | ## Logs
65 |
66 | ### Query History
67 |
68 | ### Copy History
69 |
70 |
71 |
--------------------------------------------------------------------------------
/documentation/docs/msi/state.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: state
3 | title: Centralized State
4 | sidebar_label: Centralized State
5 | tags:
6 | - msi
7 | ---
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/msi/test-connection/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: index
3 | title: test-connection
4 | tags:
5 | - msi
6 | ---
7 |
--------------------------------------------------------------------------------
/documentation/docs/supportfaq/usage-data-preferences.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: usage-data-preferences
3 | title: Usage Data Preferences
4 | sidebar_label: Usage Data Preferences
5 | ---
6 |
7 | To help the team understand how people are interacting with monosi, we collect a few usage metrics.
8 |
9 | `database_connection_success` - helps us determine if a database connection was established successfully
10 |
11 | `database_connection_fail` - helps us determine if there was an error with a database connection
12 |
13 | `run_start` - helps us determine if a monitor run was started
14 |
15 | `run_finish` - helps us determine if a monitor ran successfully
16 |
17 | `scheduling_monitors` - helps us determine if a monitor was scheduled
18 |
19 | You can opt out of this data collection at any point in time by setting the environment variable `SEND_ANONYMOUS_STATS` to false.
--------------------------------------------------------------------------------
/documentation/docs/user-guide/aws-ec2.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: aws-ec2
3 | title: AWS EC2 Deployment
4 | sidebar_label: AWS EC2
5 | ---
6 |
7 |
8 | ## Manual
9 |
10 | Information regarding setting up Monosi manually available shortly.
11 |
12 |
13 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/custom-sql.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: custom-sql
3 | title: Custom SQL Monitor
4 | sidebar_label: Custom SQL
5 | ---
6 |
7 | The Custom SQL monitor allows the user to write a custom SQL query which evaluates to one column and define custom thresholds on which to alert. It’s primary purpose is to cover long-tail use cases that the built-in metrics and monitors do not already cover.
8 |
9 | | Name | Description | Identifier |
10 | |-----------------------|------------------------------------------------------------------|------------|
11 | | Equals | Is equal in value | eq |
12 | | Not Equals | Is not equal in value | ne |
13 | | Greater Than | Is greater than in value | gt |
14 | | Greater Than or Equal | Is greater than or equal in value | ge |
15 | | Less Than | Is greater than or equal in value | lt |
16 | | Less Than or Equal | Is less than or equal in value | le |
17 | | Absolute Increase | Has increased from the beginning to end by a certain percentage | abs_inc |
18 | | Absolute Decrease | Has increased from the beginning to end by a certain percentage | abs_dec |
19 | | Relative Increase | Has increased from one value to the next by a certain percentage | rel_inc |
20 | | Relative Decrease | Has decreased from one value to the next by a certain percentage | rel_dec |
21 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/distribution.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: distribution
3 | title: Distribution Monitor
4 | sidebar_label: Distribution
5 | ---
6 |
7 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/freshness.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: freshness
3 | title: Freshness Monitor
4 | sidebar_label: Freshness
5 | ---
6 |
7 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/hybrid-deployment.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: hybrid-deployment
3 | title: Hybrid Deployment
4 | sidebar_label: Hybrid Deployment (Coming soon)
5 | ---
6 |
7 | More information on how to deploy in a hybrid fashion is coming soon. If you’re interested in this option, please see or open a [GitHub Issue](https://github.com/monosidev/monosi/issues).
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/introduction.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: introduction
3 | title: What is Monosi?
4 | sidebar_label: What is Monosi?
5 | ---
6 |
7 |
8 | Monosi is a stand-alone platform for data observability and monitoring. Monosi ensures data quality by automatically monitoring your data stack for anomalies, alerting you of issues, and analyzing the root cause.
9 |
10 |
11 |
12 | Monosi is on a mission to operationalize metadata and use it to improve existing data systems.
13 |
14 | - **Profile & setup basic data quality monitors in minutes.** Built-in logic to detect common data quality issues and the ability to extend checks.
15 | - **No-code, stand-alone full web application.** Fully supported user interface to improve user experience and allow for anyone to view the quality of the data they are using.
16 | - **Automated, built-in scheduler & jobs.** Connect your data sources and start monitoring immediately. No complicated processes or setup.
17 | - **Integrate & monitor your entire data stack.** Contribute and add to our growing number of integrations to collect & monitor your stack or use the existing ones.
18 | - **Own your (meta)data.** Keep control of your data (no security compliance process as Monosi is self-hosted), own and use the metadata collected by Monosi for any custom processes or analyses.
19 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/kubernetes.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: kubernetes
3 | title: Kubernetes (Coming soon)
4 | sidebar_label: Kubernetes (Coming soon)
5 | ---
6 |
7 | More information on how to deploy with Kubernetes is coming soon. If you’re interested in this option, please see or open a [GitHub Issue](https://github.com/monosidev/monosi/issues).
8 |
9 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/local-deployment.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: local-deployment
3 | title: Local Deployment (Docker)
4 | sidebar_label: Local Deployment (Docker)
5 | ---
6 |
7 | 1. Install Docker on your computer. Ensure that you are using Docker Compose v2 and it is installed.
8 | 2. Run the following commands to start using Monosi
9 | ```
10 | git clone https://github.com/monosidev/monosi
11 | cd monosi
12 | make compose
13 | ```
14 | 3. Navigate to http://localhost:3000 via your browser
15 | 4. Start monitoring your data
16 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/schema-changes.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: schema-changes
3 | title: Schema Changes Monitor
4 | sidebar_label: Schema Changes
5 | ---
6 |
7 | The Schema monitor checks for changes in your table's schema. It’s primary purpose is to alert you if the schema of a table changes to prevent data issues.
8 |
--------------------------------------------------------------------------------
/documentation/docs/user-guide/terraform-deployment.md:
--------------------------------------------------------------------------------
1 | ---
2 | id: terraform-deployment
3 | title: Deploying MonoSi with Terraform
4 | sidebar_label: Terraform
5 | ---
6 |
7 | MonoSi provides a starting point for deploying the application via Terraform. You can find the Terraform files in the `deployment/terraform` subdirectory.
8 |
9 | Note: MonoSi Terraform files have currently only been tested and in use with AWS. If you run into issues, reach out on Slack.
10 |
11 | ## Prerequisites
12 |
13 | MonoSi does not currently support creating a VPC or SSH key for you in these files. You will need to create/obtain:
14 | 1. SSH Key
15 | 2. VPC (and note it's ID)
16 | 3. Subnet (and note it's ID)
17 |
18 | ## Steps
19 |
20 | 1. Install Terraform on your computer, [find instructions here](https://learn.hashicorp.com/tutorials/terraform/install-cli)
21 | 2. Clone the MonoSi repository
22 | ```
23 | git clone https://github.com/monosidev/monosi && cd monosi
24 | ```
25 | 3. Change directory to terraform files.
26 | ```
27 | cd deployment/terraform
28 | ```
29 | 4. Update terraform.tfvars with your SSH key name, VPC ID, and subnet ID.
30 | 5. Ensure that you are authenticated locally with terraform to AWS. See [terraform docs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs#authentication-and-configuration) for more details.
31 | 5. Run `terraform init`
32 | 6. Run `terraform plan` to preview changes.
33 | 7. Run `terraform apply` to start the deployment and wait for it to complete (this may take some time).
34 | 8. Terraform should provide the IP of the EC2 instance it is being deployed on. You can visit MonoSi at that address on port 3000.
35 |
36 |
--------------------------------------------------------------------------------
/documentation/netlify.toml:
--------------------------------------------------------------------------------
1 | [[plugins]]
2 | package = "netlify-plugin-checklinks"
3 |
4 | [plugins.inputs]
5 |
6 | skipPatterns = ["#", "www.google-analytics.com", "www.googletagmanager.com", "Asset is used as both Html and Image", "/docs/typescript/client"]
7 | pretty = true
8 |
9 | ## Note: if you are looking for Redirects
10 | # they have been moved to /static/_redirects to make it more manageable - swyx
11 |
--------------------------------------------------------------------------------
/documentation/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "monosi-documentation",
3 | "version": "0.0.1",
4 | "private": true,
5 | "scripts": {
6 | "docusaurus": "docusaurus",
7 | "start": "docusaurus start",
8 | "build": "docusaurus build",
9 | "swizzle": "docusaurus swizzle",
10 | "deploy": "docusaurus deploy",
11 | "clear": "docusaurus clear",
12 | "serve": "docusaurus serve",
13 | "write-translations": "docusaurus write-translations",
14 | "write-heading-ids": "docusaurus write-heading-ids",
15 | "format": "prettier --write ."
16 | },
17 | "lint-staged": {
18 | "**/*": "prettier --write --ignore-unknown"
19 | },
20 | "dependencies": {
21 | "@docusaurus/core": "^2.0.0-beta.14",
22 | "@docusaurus/preset-classic": "^2.0.0-beta.14",
23 | "@mdx-js/react": "^1.5.8",
24 | "clsx": "^1.1.1",
25 | "react": "^17.0.2",
26 | "react-dom": "^17.0.2",
27 | "react-player": "^2.6.0",
28 | "remark-typescript-tools": "1.0.9",
29 | "snipsync": "1.5.0",
30 | "typescript": "4.x",
31 | "uuid": "^8.3.2",
32 | "webpack-font-preload-plugin": "^1.5.0"
33 | },
34 | "browserslist": {
35 | "production": [
36 | ">0.2%",
37 | "not dead",
38 | "not op_mini all"
39 | ],
40 | "development": [
41 | "last 1 chrome version",
42 | "last 1 firefox version",
43 | "last 1 safari version"
44 | ]
45 | },
46 | "devDependencies": {
47 | "@tailwindcss/typography": "^0.5.0",
48 | "autoprefixer": "^10.4.0",
49 | "docusaurus-tailwindcss-loader": "file:plugins/docusaurus-tailwindcss-loader",
50 | "eslint": "^7.32.0",
51 | "eslint-plugin-react": "^7.23.2",
52 | "lint-staged": "^11.1.2",
53 | "postcss": "^8.4.4",
54 | "postcss-import": "^14.0.2",
55 | "postcss-preset-env": "^6.7.0",
56 | "prettier": "^2.5.1",
57 | "prettier-plugin-tailwindcss": "^0.1.4",
58 | "tailwindcss": "^3.0.1"
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/documentation/plugins/docusaurus-tailwindcss-loader/index.js:
--------------------------------------------------------------------------------
1 | module.exports = function (context, options) {
2 | return {
3 | name: "postcss-tailwindcss-loader",
4 | configurePostCss(postcssOptions) {
5 | // Appends new PostCSS plugin.
6 | postcssOptions.plugins.push(
7 | require("postcss-import"),
8 | require("tailwindcss"),
9 | require("postcss-preset-env")({
10 | autoprefixer: {
11 | flexbox: "no-2009",
12 | },
13 | stage: 4,
14 | })
15 | );
16 | return postcssOptions;
17 | },
18 | };
19 | };
20 |
--------------------------------------------------------------------------------
/documentation/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | };
7 |
--------------------------------------------------------------------------------
/documentation/src/components/MonosiCloud.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export const MonosiCloud = () => {
4 | return (
5 |
9 |
13 |
14 | Monosi Cloud is a fully managed cloud offering of the open-source
15 | suite of tools. We are currently accepting private Design Partners.{" "}
16 |
20 | Apply here!
21 |
22 |
23 |
24 | );
25 | };
26 |
--------------------------------------------------------------------------------
/documentation/src/components/Resources.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import useBaseUrl from "@docusaurus/useBaseUrl";
3 | import Link from "@docusaurus/Link";
4 |
5 | const links = [
6 | {
7 | type: "article",
8 | title: "Why Monosi?",
9 | length: "3-20 min reads",
10 | url: "#",
11 | },
12 | {
13 | type: "article",
14 | title: "Monosi Core Concepts",
15 | length: "20 min read",
16 | url: "#",
17 | },
18 | // {
19 | // type: "video",
20 | // title: "Monosi architecture deep dive",
21 | // length: "20 min read/watch",
22 | // url: "#",
23 | // },
24 | ];
25 |
26 | export const Resources = () => {
27 | return (
28 |
29 | Resources and guides
30 |
31 | {links.map((link, i) => (
32 |
33 | {link.type === "article" ? (
34 |
40 |
45 |
46 | ) : (
47 |
53 |
58 |
59 | )}
60 |
61 |
62 | {link.title}{" "}
63 |
64 | {link.length}
65 |
66 |
67 |
68 | ))}
69 |
70 |
71 | );
72 | };
73 |
--------------------------------------------------------------------------------
/documentation/src/components/index.js:
--------------------------------------------------------------------------------
1 | export {default as ResponsivePlayer} from "./responsive-player/ResponsivePlayer";
2 | export {Button} from "./shared/Button";
3 | export {Integrations} from "./Integrations";
4 | export {MonosiCloud} from "./MonosiCloud";
5 | export {Community} from "./Community";
6 | export {Intro} from "./Intro";
7 | export {Resources} from "./Resources";
8 | export {Newsletter} from "./Newsletter";
9 |
--------------------------------------------------------------------------------
/documentation/src/components/responsive-player/ResponsivePlayer.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import ReactPlayer from "react-player";
3 |
4 | function ResponsivePlayer({url, loop, playing}) {
5 | return (
6 |
10 | {/* /* Player ratio: 100 / (1280 / 720) */}
11 |
20 |
21 | );
22 | }
23 |
24 | export default ResponsivePlayer;
25 |
--------------------------------------------------------------------------------
/documentation/src/components/shared/Button.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export const Button = ({children, type, name, className}) => {
4 | return (
5 |
11 | {children}
12 |
13 | );
14 | };
15 |
--------------------------------------------------------------------------------
/documentation/src/fonts/Aeonik-Bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/Aeonik-Bold.woff
--------------------------------------------------------------------------------
/documentation/src/fonts/Aeonik-Light.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/Aeonik-Light.woff
--------------------------------------------------------------------------------
/documentation/src/fonts/Aeonik-Regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/Aeonik-Regular.woff
--------------------------------------------------------------------------------
/documentation/src/fonts/AeonikTRIAL-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/AeonikTRIAL-Bold.ttf
--------------------------------------------------------------------------------
/documentation/src/fonts/AeonikTRIAL-Light.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/AeonikTRIAL-Light.ttf
--------------------------------------------------------------------------------
/documentation/src/fonts/AeonikTRIAL-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/src/fonts/AeonikTRIAL-Regular.ttf
--------------------------------------------------------------------------------
/documentation/src/pages/index.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Layout from "@theme/Layout";
3 | import useDocusaurusContext from "@docusaurus/useDocusaurusContext";
4 | import {
5 | Community,
6 | MonosiCloud,
7 | Intro,
8 | Resources,
9 | Newsletter,
10 | } from "../components";
11 |
12 | export default function Home() {
13 | const context = useDocusaurusContext();
14 | const {siteConfig = {}} = context;
15 | return (
16 |
17 |
22 |
23 |
24 | {/* */}
25 | {/* */}
26 |
27 |
28 |
29 |
30 |
31 | );
32 | }
33 |
--------------------------------------------------------------------------------
/documentation/src/pages/integrations.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Layout from "@theme/Layout";
3 | import useDocusaurusContext from "@docusaurus/useDocusaurusContext";
4 | import {Integrations} from "../components/Integrations";
5 |
6 | export default function MonosiIntegrations() {
7 | const context = useDocusaurusContext();
8 | const {siteConfig = {}} = context;
9 | return (
10 |
14 |
15 |
16 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/documentation/src/theme/BlogLayout/index.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Layout from "@theme/Layout";
3 | import BlogSidebar from "@theme/BlogSidebar";
4 |
5 | function BlogLayout(props) {
6 | const {sidebar, children, ...layoutProps} = props;
7 | const hasSidebar = sidebar && sidebar.items.length > 0;
8 |
9 | return (
10 |
11 |
12 |
13 | {children}
14 |
15 | {hasSidebar && (
16 |
19 | )}
20 |
21 |
22 | );
23 | }
24 |
25 | export default BlogLayout;
26 |
--------------------------------------------------------------------------------
/documentation/src/theme/BlogSidebar/index.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import Link from "@docusaurus/Link";
3 | export default function BlogSidebar({sidebar, row}) {
4 |
5 | const tags = []
6 | // const tags = [
7 | // {
8 | // title: "community",
9 | // url: "/blog/tags/community",
10 | // },
11 | // {
12 | // title: "announcement",
13 | // url: "/blog/tags/announcement",
14 | // },
15 | // {
16 | // title: "releases",
17 | // url: "/blog/tags/release",
18 | // },
19 | // ];
20 |
21 | if (sidebar.items.length === 0 || tags.length === 0) {
22 | return null;
23 | }
24 |
25 | return (
26 |
27 |
28 |
Tags
29 |
30 | {
31 |
32 | {tags.map(({title, url}) => (
33 |
38 | {title}
39 |
40 | ))}
41 |
42 | }
43 |
44 |
45 |
46 | );
47 | }
48 |
--------------------------------------------------------------------------------
/documentation/src/theme/BlogTagsListPage/index.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) Facebook, Inc. and its affiliates.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 | import React from "react";
8 | import BlogLayout from "@theme/BlogLayout";
9 | import TagsListByLetter from "@theme/TagsListByLetter";
10 | import {
11 | ThemeClassNames,
12 | translateTagsPageTitle,
13 | } from "@docusaurus/theme-common";
14 |
15 | function BlogTagsListPage(props) {
16 | const {tags, sidebar} = props;
17 | const title = translateTagsPageTitle();
18 | return (
19 |
29 | {title}
30 |
31 |
32 | );
33 | }
34 |
35 | export default BlogTagsListPage;
36 |
--------------------------------------------------------------------------------
/documentation/src/theme/DocItem/styles.module.css:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) Facebook, Inc. and its affiliates.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | .docTitle {
9 | font-size: 3rem;
10 | margin-bottom: calc(var(--ifm-leading-desktop) * var(--ifm-leading));
11 | }
12 |
13 | .docItemContainer {
14 | margin: 0 auto;
15 | padding: 0 0.5rem;
16 | }
17 |
18 | @media only screen and (min-width: 997px) {
19 | .docItemCol {
20 | max-width: 75% !important;
21 | }
22 |
23 | /* Prevent hydration FOUC, as the mobile TOC needs to be server-rendered */
24 | .tocMobile {
25 | display: none;
26 | }
27 | }
28 |
29 | @media only screen and (max-width: 996px) {
30 | .docItemContainer {
31 | padding: 0 0.3rem;
32 | }
33 | }
34 |
35 | .EditThisPage {
36 | margin-top: 2rem;
37 | }
38 |
39 | @media screen and (min-width: 998px) {
40 | .EditThisPage {
41 | backdrop-filter: invert(0.2);
42 | position: fixed;
43 | bottom: 1rem;
44 | right: 1rem;
45 | padding: 1rem;
46 | z-index: 99;
47 | }
48 | .EditThisPage:hover {
49 | background-color: var(--ifm-color-content);
50 | --ifm-link-color: var(--ifm-color-content-inverse);
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/documentation/src/theme/DocPage/styles.module.css:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) Facebook, Inc. and its affiliates.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | :root {
9 | --doc-sidebar-width: 300px;
10 | --doc-sidebar-hidden-width: 30px;
11 | }
12 |
13 | :global(.docs-wrapper) {
14 | display: flex;
15 | }
16 |
17 | .docPage,
18 | .docMainContainer {
19 | display: flex;
20 | width: 100%;
21 | }
22 |
23 | .docSidebarContainer {
24 | display: none;
25 | }
26 |
27 | @media (min-width: 997px) {
28 | .docMainContainer {
29 | flex-grow: 1;
30 | max-width: calc(100% - var(--doc-sidebar-width));
31 | }
32 |
33 | .docMainContainerEnhanced {
34 | max-width: calc(100% - var(--doc-sidebar-hidden-width));
35 | }
36 |
37 | .docSidebarContainer {
38 | display: block;
39 | width: var(--doc-sidebar-width);
40 | margin-top: calc(-1 * var(--ifm-navbar-height));
41 | border-right: 1px solid var(--ifm-toc-border-color);
42 | will-change: width;
43 | transition: width var(--ifm-transition-fast) ease;
44 | clip-path: inset(0);
45 | }
46 |
47 | .docSidebarContainerHidden {
48 | width: var(--doc-sidebar-hidden-width);
49 | cursor: pointer;
50 | }
51 |
52 | .collapsedDocSidebar {
53 | position: sticky;
54 | top: 0;
55 | height: 100%;
56 | max-height: 100vh;
57 | display: flex;
58 | align-items: center;
59 | justify-content: center;
60 | transition: background-color var(--ifm-transition-fast) ease;
61 | }
62 |
63 | .collapsedDocSidebar:hover,
64 | .collapsedDocSidebar:focus {
65 | background-color: var(--ifm-color-emphasis-200);
66 | }
67 |
68 | .expandSidebarButtonIcon {
69 | transform: rotate(0);
70 | }
71 | html[dir="rtl"] .expandSidebarButtonIcon {
72 | transform: rotate(180deg);
73 | }
74 |
75 | html[data-theme="dark"] .collapsedDocSidebar:hover,
76 | html[data-theme="dark"] .collapsedDocSidebar:focus {
77 | background-color: var(--collapse-button-bg-color-dark);
78 | }
79 |
80 | .docItemWrapperEnhanced {
81 | max-width: calc(
82 | var(--ifm-container-width) + var(--doc-sidebar-width)
83 | ) !important;
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/documentation/src/theme/Tag/index.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) Facebook, Inc. and its affiliates.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 | import React from "react";
8 | import Link from "@docusaurus/Link";
9 |
10 | function Tag(props) {
11 | const {permalink, name, count} = props;
12 | return (
13 |
19 | {name}
20 | {count && (
21 |
22 | {count}
23 |
24 | )}
25 |
26 | );
27 | }
28 |
29 | export default Tag;
30 |
--------------------------------------------------------------------------------
/documentation/src/theme/TagsListByLetter/index.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) Facebook, Inc. and its affiliates.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 | import React from "react";
8 | import Tag from "@theme/Tag";
9 | import {listTagsByLetters} from "@docusaurus/theme-common";
10 |
11 | function TagLetterEntryItem({letterEntry}) {
12 | return (
13 |
14 | {letterEntry.letter}
15 |
16 | {letterEntry.tags.map((tag) => (
17 |
18 |
19 |
20 | ))}
21 |
22 |
23 |
24 | );
25 | }
26 |
27 | function TagsListByLetter({tags}) {
28 | const letterList = listTagsByLetters(tags);
29 | return (
30 |
31 | {letterList.map((letterEntry) => (
32 |
36 | ))}
37 |
38 | );
39 | }
40 |
41 | export default TagsListByLetter;
42 |
--------------------------------------------------------------------------------
/documentation/static/CNAME:
--------------------------------------------------------------------------------
1 | docs.monosi.dev
--------------------------------------------------------------------------------
/documentation/static/_redirects:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/_redirects
--------------------------------------------------------------------------------
/documentation/static/img/Slack_Mark.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
11 |
12 |
13 |
14 |
16 |
17 |
18 |
19 |
21 |
22 |
23 |
24 |
26 |
27 |
28 |
29 |
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/documentation/static/img/airflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/airflow.png
--------------------------------------------------------------------------------
/documentation/static/img/alerts/alerts.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/alerts/alerts.png
--------------------------------------------------------------------------------
/documentation/static/img/alerts/create.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/alerts/create.png
--------------------------------------------------------------------------------
/documentation/static/img/bigquery.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/documentation/static/img/changelog/v011/issues_page.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/changelog/v011/issues_page.png
--------------------------------------------------------------------------------
/documentation/static/img/changelog/v011/onboarding_form.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/changelog/v011/onboarding_form.png
--------------------------------------------------------------------------------
/documentation/static/img/cloud.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/cloud.png
--------------------------------------------------------------------------------
/documentation/static/img/cube.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/documentation/static/img/datasource/bigquery_connection.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/datasource/bigquery_connection.png
--------------------------------------------------------------------------------
/documentation/static/img/datasource/connect.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/datasource/connect.png
--------------------------------------------------------------------------------
/documentation/static/img/datasource/create-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/datasource/create-1.png
--------------------------------------------------------------------------------
/documentation/static/img/datasource/create-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/datasource/create-2.png
--------------------------------------------------------------------------------
/documentation/static/img/dbt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/dbt.png
--------------------------------------------------------------------------------
/documentation/static/img/doc.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/documentation/static/img/dotnet.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/documentation/static/img/email.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/email.png
--------------------------------------------------------------------------------
/documentation/static/img/example.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/example.gif
--------------------------------------------------------------------------------
/documentation/static/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/favicon.ico
--------------------------------------------------------------------------------
/documentation/static/img/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/favicon.png
--------------------------------------------------------------------------------
/documentation/static/img/finish.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/documentation/static/img/foreign.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
8 |
12 |
14 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/documentation/static/img/integrations/create.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/integrations/create.png
--------------------------------------------------------------------------------
/documentation/static/img/integrations/overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/integrations/overview.png
--------------------------------------------------------------------------------
/documentation/static/img/integrations/webhook_alert.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/integrations/webhook_alert.png
--------------------------------------------------------------------------------
/documentation/static/img/layers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/layers.png
--------------------------------------------------------------------------------
/documentation/static/img/looker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/looker.png
--------------------------------------------------------------------------------
/documentation/static/img/monitors/monitors.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/monitors/monitors.png
--------------------------------------------------------------------------------
/documentation/static/img/monitors/monitors_index.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/monitors/monitors_index.png
--------------------------------------------------------------------------------
/documentation/static/img/monitors/table_health.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/monitors/table_health.png
--------------------------------------------------------------------------------
/documentation/static/img/moon.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/documentation/static/img/mysql.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/mysql.png
--------------------------------------------------------------------------------
/documentation/static/img/pagerduty.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/pagerduty.png
--------------------------------------------------------------------------------
/documentation/static/img/powerbi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/powerbi.png
--------------------------------------------------------------------------------
/documentation/static/img/redshift.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/documentation/static/img/ruby.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/documentation/static/img/server.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/documentation/static/img/snowflake.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/documentation/static/img/sun.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/documentation/static/img/tableau.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/documentation/static/img/tableau.png
--------------------------------------------------------------------------------
/documentation/static/img/tool.svg:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/documentation/static/img/webhooks.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/documentation/static/scripts/feedback.js:
--------------------------------------------------------------------------------
1 | // TODO: INSERT UPDATE
2 | // (function (h, o, t, j, a, r) {
3 | // h.hj =
4 | // h.hj ||
5 | // function () {
6 | // (h.hj.q = h.hj.q || []).push(arguments);
7 | // };
8 | // h._hjSettings = {hjid: 1917477, hjsv: 6};
9 | // a = o.getElementsByTagName("head")[0];
10 | // r = o.createElement("script");
11 | // r.async = 1;
12 | // r.src = t + h._hjSettings.hjid + j + h._hjSettings.hjsv;
13 | // a.appendChild(r);
14 | // })(window, document, "https://static.hotjar.com/c/hotjar-", ".js?sv=");
15 |
--------------------------------------------------------------------------------
/documentation/static/scripts/fullstory.js:
--------------------------------------------------------------------------------
1 | // TODO: INSERT UPDATE
2 | window["_fs_debug"] = false;
3 | window["_fs_host"] = "fullstory.com";
4 | window["_fs_script"] = "edge.fullstory.com/s/fs.js";
5 | window["_fs_org"] = "16T4RJ";
6 | window["_fs_namespace"] = "FS";
7 | (function (m, n, e, t, l, o, g, y) {
8 | if (e in m) {
9 | if (m.console && m.console.log) {
10 | m.console.log(
11 | 'FullStory namespace conflict. Please set window["_fs_namespace"].'
12 | );
13 | }
14 | return;
15 | }
16 | g = m[e] = function (a, b, s) {
17 | g.q ? g.q.push([a, b, s]) : g._api(a, b, s);
18 | };
19 | g.q = [];
20 | o = n.createElement(t);
21 | o.async = 1;
22 | o.crossOrigin = "anonymous";
23 | o.src = "https://" + _fs_script;
24 | y = n.getElementsByTagName(t)[0];
25 | y.parentNode.insertBefore(o, y);
26 | g.identify = function (i, v, s) {
27 | g(l, {uid: i}, s);
28 | if (v) g(l, v, s);
29 | };
30 | g.setUserVars = function (v, s) {
31 | g(l, v, s);
32 | };
33 | g.event = function (i, v, s) {
34 | g("event", {n: i, p: v}, s);
35 | };
36 | g.anonymize = function () {
37 | g.identify(!!0);
38 | };
39 | g.shutdown = function () {
40 | g("rec", !1);
41 | };
42 | g.restart = function () {
43 | g("rec", !0);
44 | };
45 | g.log = function (a, b) {
46 | g("log", [a, b]);
47 | };
48 | g.consent = function (a) {
49 | g("consent", !arguments.length || a);
50 | };
51 | g.identifyAccount = function (i, v) {
52 | o = "account";
53 | v = v || {};
54 | v.acctId = i;
55 | g(o, v);
56 | };
57 | g.clearUserCookie = function () {};
58 | g.setVars = function (n, p) {
59 | g("setVars", [n, p]);
60 | };
61 | g._w = {};
62 | y = "XMLHttpRequest";
63 | g._w[y] = m[y];
64 | y = "fetch";
65 | g._w[y] = m[y];
66 | if (m[y])
67 | m[y] = function () {
68 | return g._w[y].apply(this, arguments);
69 | };
70 | g._v = "1.3.0";
71 | })(window, document, window["_fs_namespace"], "script", "user");
72 |
--------------------------------------------------------------------------------
/documentation/versions.json:
--------------------------------------------------------------------------------
1 | []
2 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel"
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
--------------------------------------------------------------------------------
/requirements.api.txt:
--------------------------------------------------------------------------------
1 | gunicorn==20.1.0
2 |
--------------------------------------------------------------------------------
/requirements.pkg.txt:
--------------------------------------------------------------------------------
1 | build==0.7.0
2 | twine==3.7.1
3 |
--------------------------------------------------------------------------------
/requirements.test.txt:
--------------------------------------------------------------------------------
1 | pytest==7.0.1
2 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask==2.0.2
2 | Flask-APScheduler==1.12.2
3 | Flask-Cors==3.0.10
4 | Flask-RESTful==0.3.9
5 | Flask-SQLAlchemy==2.5.1
6 | jsonschema==4.4.0
7 | kafka-python==2.0.2
8 | mashumaro==3.0
9 | psycopg2==2.9.3
10 | pyjq==2.5.2
11 | PyYAML==6.0
12 | snowflake-sqlalchemy==1.3.3
13 | snowplow-tracker==0.10.0
14 | sqlalchemy-bigquery==1.4.3
15 | sqlalchemy-redshift==0.8.9
16 | sqlalchemy-utils==0.38.2
17 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import os
4 | import sys
5 | import pkg_resources
6 | from setuptools import setup, find_packages
7 |
8 |
9 | path = os.path.abspath('./src')
10 | sys.path.append(path)
11 |
12 | VERSION = '0.1.0'
13 |
14 | with open('README.md') as f:
15 | readme = f.read()
16 |
17 | with open('LICENSE') as f:
18 | license = f.read()
19 |
20 | install_requires = []
21 | with open(os.path.abspath("requirements.txt"), "r") as f:
22 | requirements_txt = f.readlines()
23 | install_requires = [
24 | str(requirement)
25 | for requirement
26 | in pkg_resources.parse_requirements(requirements_txt)
27 | ]
28 |
29 | setup(
30 | name='monosi',
31 | version=VERSION,
32 | description='Monosi - Data observability & monitoring toolkit',
33 | # long_description=readme,
34 | author='Vocable Inc.',
35 | author_email='support@monosi.dev',
36 | url='https://github.com/monosidev/monosi',
37 | license=license,
38 | install_requires=install_requires,
39 | packages=find_packages(
40 | where="src",
41 | include=[
42 | "ingestion*",
43 | "pipeline*"
44 | "scheduler*",
45 | "server*",
46 | "telemetry*",
47 | ],
48 | exclude=["tests"],
49 | ),
50 | package_dir={"": "src"},
51 | entry_points = {
52 | 'console_scripts': [
53 | 'monosi=cli.__main__:main',
54 | ],
55 | },
56 | )
57 |
--------------------------------------------------------------------------------
/src/ingestion/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/src/ingestion/__init__.py
--------------------------------------------------------------------------------
/src/ingestion/destinations/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Type
2 | import logging
3 | import json
4 |
5 | from .base import (
6 | Destination,
7 | DestinationConfiguration
8 | )
9 | from .monosi import MonosiDestination, MonosiDestinationConfiguration
10 |
11 |
12 | class DestinationFactory:
13 | @classmethod
14 | def _configuration_cls(cls, config_type: str) -> Type[DestinationConfiguration]:
15 | config_type = config_type.lower()
16 | if config_type == "monosi":
17 | return MonosiDestinationConfiguration
18 | else:
19 | raise Exception("Error: Unknown destination type.")
20 |
21 | @classmethod
22 | def _destination_cls(cls, config_type: str) -> Type[Destination]:
23 | config_type == config_type.lower()
24 | if config_type == "monosi":
25 | return MonosiDestination
26 | else:
27 | raise Exception("Error: Unknown destination type.")
28 |
29 | @classmethod
30 | def create(cls, configuration: Dict[str, Any]) -> Destination:
31 | config_type = configuration.get('type')
32 | if config_type == None:
33 | raise Exception("Error: No destination type set.")
34 |
35 | configuration_cls = cls._configuration_cls(config_type)
36 | destination_cls = cls._destination_cls(config_type)
37 |
38 | configuration_obj = configuration_cls(
39 | configuration=json.dumps(configuration)
40 | )
41 | destination = destination_cls(configuration_obj)
42 |
43 | return destination
44 |
45 |
--------------------------------------------------------------------------------
/src/ingestion/destinations/base.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | import abc
3 | import json
4 | from typing import Optional
5 |
6 |
7 | class Publisher(object):
8 | def run(self, item):
9 | raise NotImplementedError
10 |
11 | @dataclass
12 | class DestinationConfiguration:
13 | configuration: str
14 | name: Optional[str] = None
15 | enabled: bool = True
16 |
17 | def __init__(self, configuration: str):
18 | self.configuration = configuration
19 |
20 | @classmethod
21 | def validate(cls, configuration):
22 | raise NotImplementedError
23 |
24 | def connection_string(self):
25 | raise NotImplementedError
26 |
27 | @abc.abstractproperty
28 | def type(self):
29 | raise NotImplementedError
30 |
31 | def to_dict(self):
32 | return {
33 | "name": self.name or '',
34 | "configuration": json.loads(self.configuration),
35 | "enabled": self.enabled,
36 | "type": self.type,
37 | }
38 |
39 | class Destination(object):
40 | def __init__(self, configuration):
41 | self.configuration = configuration
42 |
43 | def _before_push(self):
44 | pass
45 |
46 | def _after_push(self):
47 | pass
48 |
49 | @abc.abstractmethod
50 | def _push(self, data):
51 | raise NotImplementedError
52 |
53 | def push(self, data):
54 | self._before_push()
55 | results = self._push(data)
56 | self._after_push()
57 |
58 | return results
59 |
--------------------------------------------------------------------------------
/src/ingestion/destinations/kafka.py:
--------------------------------------------------------------------------------
1 | from kafka import KafkaProducer, KafkaAdminClient
2 | from kafka.admin import NewTopic
3 | import json
4 |
5 | from .base import Destination, DestinationConfiguration, Publisher
6 |
7 |
8 | class KafkaDestinationConfiguration(DestinationConfiguration):
9 | @classmethod
10 | def validate(cls, configuration):
11 | raise NotImplementedError
12 |
13 | @classmethod
14 | def configuration_schema(cls):
15 | return {
16 | "type": "object",
17 | "properties": {
18 | "host": { "type": "string" },
19 | "port": { "type": "string" },
20 | },
21 | "secret": [ ],
22 | }
23 |
24 |
25 | def connection_string(self):
26 | configuration = json.loads(self.configuration)
27 |
28 | return "{host}:{port}".format(
29 | host=configuration.get('host'),
30 | port=configuration.get('port'),
31 | )
32 |
33 | @property
34 | def type(self):
35 | return "kafka"
36 |
37 | class KafkaDestinationPublisher(Publisher):
38 | def __init__(self, configuration: KafkaDestinationConfiguration):
39 | self.configuration = configuration
40 | self.connection = None
41 |
42 | def _initialize(self):
43 | connection_string = self.configuration.connection_string()
44 |
45 | admin = KafkaAdminClient(bootstrap_servers=connection_string)
46 | topic = NewTopic(name="msi_kafka", num_partitions=1, replication_factor=1)
47 | admin.create_topics(new_topics=[topic], validate_only=False)
48 |
49 | self.connection = KafkaProducer(bootstrap_servers=connection_string,
50 | value_serializer=lambda x: json.dumps(x).encode("utf-8"))
51 |
52 | def run(self, item):
53 | self.connection.send("msi_kafka", item)
54 |
55 |
56 |
57 | class KafkaDestination(Destination):
58 | def _push(self):
59 | raise NotImplementedError
60 |
--------------------------------------------------------------------------------
/src/ingestion/destinations/monosi.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from sqlalchemy import create_engine
3 | from sqlalchemy.orm import sessionmaker, Session
4 | from typing import Any, List
5 |
6 | from ingestion.sources.postgresql import PostgreSQLSourceConfiguration
7 |
8 | from .base import Destination, Publisher
9 |
10 |
11 | class SQLAlchemyPublisher(Publisher):
12 | def __init__(self, configuration):
13 | self.configuration = configuration
14 | self.engine = None
15 | self.connection = None
16 |
17 | def _create_engine(self):
18 | try:
19 | return create_engine(self.configuration.connection_string())
20 | except Exception as e:
21 | raise e
22 |
23 | def _initialize(self):
24 | if self.engine and self.connection:
25 | return
26 |
27 | self.engine = self._create_engine()
28 | self.connection = self.engine.connect()
29 |
30 | def _execute(self, data: List[Any]):
31 | if self.engine is None:
32 | raise Exception("Initialize publisher before execution.")
33 |
34 | try:
35 | from server.models import Monitor
36 | model = Monitor # TODO
37 |
38 | Session = sessionmaker(bind=self.engine)
39 | with Session() as session:
40 | session.bulk_insert_mappings(model, data)
41 | session.commit()
42 | session.close()
43 | except Exception as e:
44 | logging.error(e)
45 |
46 | def _terminate(self):
47 | if self.connection is not None:
48 | self.connection.close()
49 | self.connection = None
50 |
51 | if self.engine is not None:
52 | self.engine.dispose()
53 | self.engine = None
54 |
55 |
56 | def run(self, data: List[Any]):
57 | self._initialize()
58 | result = self._execute(data)
59 | self._terminate()
60 |
61 | return result
62 |
63 |
64 | class MonosiDestinationConfiguration(PostgreSQLSourceConfiguration):
65 | @property
66 | def type(self):
67 | return "monosi"
68 |
69 | class MonosiPublisher(SQLAlchemyPublisher):
70 | pass
71 |
72 | class MonosiDestination(Destination):
73 | def _push(self, data):
74 | publisher = MonosiPublisher(self.configuration)
75 | publisher.run(data)
76 |
77 |
78 |
--------------------------------------------------------------------------------
/src/ingestion/pipeline.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from typing import Any, Dict, List, Type
3 |
4 | from .destinations import Destination, DestinationFactory
5 | from .transformers import Transformer
6 |
7 |
8 | @dataclass
9 | class Pipeline(Destination):
10 | transformers: List[Type[Transformer]]
11 | destinations: List[Destination]
12 |
13 | @classmethod
14 | def from_configuration(cls, transformers: List[Type[Transformer]] = [], destinations: List[Dict[str, Any]] = []):
15 | destination_objs = [DestinationFactory.create(configuration) for configuration in destinations]
16 |
17 | return Pipeline(transformers=transformers, destinations=destination_objs)
18 |
19 | def _transform(self, input_normalized_json):
20 | for transformer in self.transformers:
21 | if transformer.match(input_normalized_json, transformer._original_schema()):
22 | return transformer.transform(input_normalized_json)
23 |
24 | return []
25 |
26 | def publish(self, data):
27 | return [destination.push(data) for destination in self.destinations]
28 |
29 | def _push(self, input_dict):
30 | transformed_dict = self._transform(input_dict)
31 | # output_normalized_json = transformed_dict
32 | self.publish(transformed_dict)
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/ingestion/sources/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Type
2 | import json
3 |
4 | from .base import (
5 | Extractor,
6 | Source,
7 | SourceConfiguration,
8 | SQLAlchemyExtractor,
9 | )
10 |
11 | from .postgresql import PostgreSQLSource, PostgreSQLSourceConfiguration
12 | from .snowflake import SnowflakeSource, SnowflakeSourceConfiguration
13 | from .redshift import RedshiftSource, RedshiftSourceConfiguration
14 | from .bigquery import BigQuerySource, BigQuerySourceConfiguration
15 |
16 | class SourceFactory:
17 | @classmethod
18 | def _configuration_cls(cls, config_type: str) -> Type[SourceConfiguration]:
19 | config_type = config_type.lower()
20 | if config_type == 'postgresql':
21 | return PostgreSQLSourceConfiguration
22 | elif config_type == 'snowflake':
23 | return SnowflakeSourceConfiguration
24 | elif config_type == 'redshift':
25 | return RedshiftSourceConfiguration
26 | elif config_type == 'bigquery':
27 | return BigQuerySourceConfiguration
28 | else:
29 | raise Exception("Error: Unknown source type.")
30 |
31 | @classmethod
32 | def _source_cls(cls, config_type: str) -> Type[Source]:
33 | config_type = config_type.lower()
34 | if config_type == 'postgresql':
35 | return PostgreSQLSource
36 | elif config_type == 'snowflake':
37 | return SnowflakeSource
38 | elif config_type == 'redshift':
39 | return RedshiftSource
40 | elif config_type == 'bigquery':
41 | return BigQuerySource
42 | else:
43 | raise Exception("Error: Unknown source type.")
44 |
45 | @classmethod
46 | def create(cls, configuration: Dict[str, Any]) -> Source:
47 | config_type = configuration.get('type')
48 | if config_type == None:
49 | raise Exception("Error: No source type set.")
50 |
51 | configuration_cls = cls._configuration_cls(config_type)
52 | source_cls = cls._source_cls(config_type)
53 |
54 | configuration_obj = configuration_cls(name=None, configuration=json.dumps(configuration))
55 | destination = source_cls(configuration_obj)
56 |
57 | return destination
58 |
59 |
--------------------------------------------------------------------------------
/src/ingestion/sources/kafka.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from .base import Source, SourceConfiguration
4 |
5 | class KafkaSourceConfiguration(SourceConfiguration):
6 | @classmethod
7 | def validate(cls, configuration):
8 | raise NotImplementedError
9 |
10 | @classmethod
11 | def configuration_schema(cls):
12 | raise NotImplementedError
13 |
14 | def connection_string(self) -> str:
15 | raise NotImplementedError
16 |
17 | @property
18 | def type(self):
19 | return "kafka"
20 |
21 |
22 | class KafkaSource(Source):
23 | pass
24 |
--------------------------------------------------------------------------------
/src/ingestion/sources/monosi.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from sqlalchemy.orm import sessionmaker, Session
3 | from typing import List
4 | from ingestion.sources.postgresql import PostgreSQLSourceConfiguration
5 |
6 | from ingestion.sources import PostgreSQLSource, SQLAlchemyExtractor
7 | from ingestion.task import TaskUnit
8 |
9 | class MonosiSourceExtractor(SQLAlchemyExtractor):
10 | def __init__(self, configuration):
11 | self.configuration = configuration
12 | self.engine = None
13 | self.connection = None
14 |
15 | def _initialize(self):
16 | if self.engine and self.connection:
17 | return
18 |
19 | self.engine = self._create_engine()
20 | self.connection = self.engine.connect()
21 |
22 | def _execute(self, model):
23 | objs = []
24 | try:
25 | Session = sessionmaker(self.engine)
26 | with Session() as session:
27 | objs = session.query(model).all()
28 | except Exception as e:
29 | logging.error("Couldn't retrieve metrics: {}", e)
30 |
31 | return objs
32 |
33 | def _terminate(self):
34 | if self.connection is not None:
35 | self.connection.close()
36 | self.connection = None
37 |
38 | if self.engine is not None:
39 | self.engine.dispose()
40 | self.engine = None
41 |
42 | def run(self, unit: TaskUnit):
43 | self._initialize()
44 |
45 | model = unit.request()
46 | results = self._execute(model)
47 |
48 | self._terminate()
49 |
50 | return results
51 |
52 | class MonosiSourceConfiguration(PostgreSQLSourceConfiguration):
53 | @property
54 | def type(self):
55 | return "monosi"
56 |
57 | class MonosiSource(PostgreSQLSource):
58 | def __init__(self, configuration: MonosiSourceConfiguration):
59 | super().__init__(configuration)
60 |
61 | def task_units(self) -> List[TaskUnit]:
62 | return []
63 |
64 | def extractor(self):
65 | return MonosiSourceExtractor(self.configuration)
66 |
67 |
--------------------------------------------------------------------------------
/src/ingestion/sources/redshift.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from ingestion.sources.base import SQLAlchemyExtractor
4 |
5 | from .postgresql import PostgreSQLSource, PostgreSQLSourceConfiguration, PostgreSQLSourceDialect
6 |
7 | class RedshiftSourceConfiguration(PostgreSQLSourceConfiguration):
8 | def _connection_string_prefix(self) -> str:
9 | return "redshift+psycopg2"
10 |
11 | @property
12 | def type(self):
13 | return "redshift"
14 |
15 | class RedshiftSourceDialect(PostgreSQLSourceDialect):
16 | @classmethod
17 | def _freshness(cls):
18 | return "DATEDIFF(MINUTE, MAX(CAST({} AS TIMESTAMP)), GETDATE())"
19 |
20 |
21 | class RedshiftExtractor(SQLAlchemyExtractor):
22 | def _initialize(self):
23 | super()._initialize()
24 | self._custom_execute("SET enable_case_sensitive_identifier TO true;")
25 |
26 | # TODO: This is a hacky solution, need to update _execute method in base to support non SELECT statements
27 | def _custom_execute(self, sql: str):
28 | if not self.connection:
29 | raise Exception("Connection has already been closed. Could not execute.")
30 |
31 | self.connection.execute(sql)
32 |
33 | class RedshiftSource(PostgreSQLSource):
34 | def __init__(self, configuration: RedshiftSourceConfiguration):
35 | self.configuration = configuration
36 | self.dialect = RedshiftSourceDialect
37 |
38 | def extractor(self):
39 | return RedshiftExtractor(self.configuration)
40 |
41 |
--------------------------------------------------------------------------------
/src/ingestion/task.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Any, List
3 |
4 |
5 | @dataclass
6 | class TaskUnit:
7 | request: Any
8 |
9 | def run(self, extractor):
10 | yield extractor.run(self)
11 |
12 | @dataclass
13 | class MultiTaskUnit(TaskUnit):
14 | request: Any
15 |
16 | def run(self, extractor):
17 | return extractor.run_multiple(self)
18 |
19 | @dataclass
20 | class Task:
21 | units: List[TaskUnit]
22 | extractor: Any
23 |
24 | def _run_unit(self, unit: TaskUnit):
25 | return unit.run(self.extractor)
26 |
27 | def run(self):
28 | for unit in self.units:
29 | yield self._run_unit(unit)
30 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/__init__.py:
--------------------------------------------------------------------------------
1 | from .base import (
2 | Transformer
3 | )
4 |
5 | from .monosi import (
6 | AnomalyTransformer,
7 | IssueTransformer,
8 | MetricTransformer,
9 | MonitorTransformer,
10 | ZScoreTransformer,
11 | )
12 |
13 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/base.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from jsonschema import validate
3 | import abc
4 | import json
5 | import pyjq
6 |
7 | class Transformer:
8 | @classmethod
9 | def match(cls, input, schema):
10 | try:
11 | validate(instance=input, schema=schema)
12 | return True
13 | except Exception as e:
14 | logging.info(e)
15 | return False
16 |
17 | @abc.abstractclassmethod
18 | def _original_schema(cls):
19 | # Schema to match against on input
20 | raise NotImplementedError("Transformer: Original Schema")
21 |
22 | @abc.abstractclassmethod
23 | def _normalized_schema(cls):
24 | # Schema to match against on output
25 | raise NotImplementedError("Transformer: Nomralized Schema")
26 |
27 | @abc.abstractclassmethod
28 | def _transform(cls, input):
29 | # Actual transform logic
30 | raise NotImplementedError
31 |
32 | @classmethod
33 | def _after_transform(cls, input):
34 | return input
35 |
36 | @classmethod
37 | def transform(cls, incoming_json):
38 | if cls.match(incoming_json, cls._original_schema()) == False:
39 | raise Exception("Error: Can't transform, incoming JSON doesn't match schema.\n\nSchema: {}\n\nInput: {}", cls._original_schema(), incoming_json)
40 |
41 | outgoing_json = cls._transform(incoming_json)
42 | outgoing_json = cls._after_transform(outgoing_json)
43 |
44 | # TODO: Check transform worked properly
45 | # if cls.match(outgoing_json, cls._normalized_schema()) == False:
46 | # raise Exception("Error: Can't transform, outgoing JSON doesn't match schema.\n\nSchema: {}\n\nOutgoing: {}", cls._normalized_schema(), outgoing_json)
47 |
48 | return outgoing_json
49 |
50 | class JSONTransformer(Transformer):
51 | @abc.abstractclassmethod
52 | def _mapped_schema(cls):
53 | raise NotImplementedError
54 |
55 | @classmethod
56 | def _transform(cls, input):
57 | json_input = json.loads(json.dumps(input, indent=4, sort_keys=True, default=str))
58 | transformed_results = pyjq.all(
59 | cls._mapped_schema(),
60 | json_input,
61 | )
62 | return transformed_results
63 |
64 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/monosi/__init__.py:
--------------------------------------------------------------------------------
1 | from .anomalies import AnomalyTransformer
2 | from .issues import IssueTransformer
3 | from .metrics import MetricTransformer
4 | from .monitors import MonitorTransformer
5 | from .zscores import ZScoreTransformer
6 |
7 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/monosi/anomalies.py:
--------------------------------------------------------------------------------
1 | from ingestion.transformers.base import Transformer
2 |
3 | class AnomalyTransformer(Transformer):
4 | @classmethod
5 | def _transform(cls, zscores):
6 | return list(filter(lambda x: x['error'] == True, zscores))
7 |
8 | @classmethod
9 | def _original_schema(cls):
10 | return {
11 | "type": "array",
12 | "items": {
13 | "type": "object",
14 | "required": ["error"]
15 | },
16 | "minItems": 1
17 | }
18 |
19 | @classmethod
20 | def _normalized_schema(cls):
21 | return {
22 | "type": "array",
23 | "items": {
24 | "type": "object",
25 | "properties": {
26 | "error": {
27 | "type": "boolean",
28 | "const": True
29 | },
30 | },
31 | "required": ["error"]
32 | },
33 | "minItems": 1
34 | }
35 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/monosi/issues.py:
--------------------------------------------------------------------------------
1 | from ingestion.transformers.base import Transformer
2 |
3 | class IssueTransformer(Transformer):
4 | @classmethod
5 | def message_formatter(cls, anomaly):
6 | return "Column {column_name} is alerting with a value of {value} on the metric {metric}.".format(
7 | column_name=anomaly['column_name'],
8 | value=anomaly['value'],
9 | metric=anomaly['metric'],
10 | )
11 |
12 | @classmethod
13 | def _transform(cls, anomalies):
14 | return [{
15 | 'type': 'metric',
16 | 'entity': "{}.{}.{}.{}".format(anomaly['database'], anomaly['schema'], anomaly['table_name'], anomaly['column_name']),
17 | 'message': cls.message_formatter(anomaly),
18 | 'value': anomaly['value'],
19 | 'created_at': anomaly['time_window_end'],
20 | } for anomaly in anomalies]
21 |
22 | @classmethod
23 | def _original_schema(cls):
24 | return {
25 | "type": "array",
26 | "items": {
27 | "type": "object",
28 | },
29 | "minItems": 1
30 | }
31 |
32 | @classmethod
33 | def _normalized_schema(cls):
34 | return {
35 | "type": "array",
36 | "items": {
37 | "type": "object",
38 | },
39 | "minItems": 1
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/src/ingestion/transformers/monosi/metrics.py:
--------------------------------------------------------------------------------
1 | from uuid import uuid4
2 | from ingestion.transformers.base import JSONTransformer
3 |
4 | class MetricTransformer(JSONTransformer):
5 | @classmethod
6 | def _mapped_schema(cls): # TODO: Add table, schema, database
7 | return '.rows | .[] | { "metric": (to_entries | .[] | select((.key | split("___") | .[1]) != null) | { "name": .key | split("___") | .[1], "column": .key | split("___") | .[0], "value": .value }), "time_window_start": .WINDOW_START, "time_window_end": .WINDOW_END, "table_name": .TABLE_NAME, "database": .DATABASE_NAME, "schema": .SCHEMA_NAME } | { "metric": .metric.name, "column_name": .metric.column, "value": .metric.value, "time_window_start": .time_window_start, "time_window_end": .time_window_end, "table_name": .table_name, "database": .database, "schema": .schema }'
8 |
9 | @classmethod
10 | def _original_schema(cls):
11 | return {
12 | "$schema":"http://json-schema.org/draft-04/schema#",
13 | "type":"object",
14 | "properties":{
15 | "rows":{
16 | "type":"array",
17 | "items": {
18 | "type": "object"
19 | },
20 | "minItems": 1
21 | },
22 | "columns":{
23 | "type":"array",
24 | "items":{
25 | "type":"string"
26 | }
27 | }
28 | },
29 | "required":[
30 | "rows",
31 | "columns"
32 | ]
33 | }
34 |
35 | @classmethod
36 | def _normalized_schema(cls):
37 | return {
38 | "type": "array",
39 | "items": {
40 | "type": "object",
41 | "properties": {
42 | "id": {"type": "string"},
43 | "table_name": {"type": "string"},
44 | "schema": {"type": "string"},
45 | "database": {"type": "string"},
46 | "column_name": {"type": "string"},
47 | "metric": {"type": "string"},
48 | "value": {"type": "string"},
49 | "time_window_start": {"type": "string"},
50 | "time_window_end": {"type": "string"}
51 | },
52 | "required": ["id"]
53 | },
54 | "minItems": 1
55 | }
56 |
57 |
58 | @classmethod
59 | def _after_transform(cls, input):
60 | for metric in input:
61 | metric['id'] = uuid4().hex
62 | return input
63 |
64 |
--------------------------------------------------------------------------------
/src/scheduler/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | curr_path = os.path.dirname(os.path.abspath(__file__))
5 | src_path = os.path.abspath(os.path.join(curr_path, "../"))
6 | sys.path.append(src_path)
--------------------------------------------------------------------------------
/src/scheduler/api.py:
--------------------------------------------------------------------------------
1 | from flask_restful import Api
2 |
3 | import scheduler.handlers as handlers
4 |
5 | class MsiSchedulerApi(Api):
6 | VERSION = 'v1'
7 | PREFIX = f'{VERSION}/api'
8 |
9 | def __init__(self, app):
10 | super().__init__(app)
11 | handlers.init_api(self)
12 |
13 | def init_api(app):
14 | return MsiSchedulerApi(app)
15 |
--------------------------------------------------------------------------------
/src/scheduler/constants.py:
--------------------------------------------------------------------------------
1 | STATUS_SCHEDULED = 0
2 | STATUS_RUNNING = 1
3 | STATUS_STOPPING = 2
4 | STATUS_STOPPED = 3
5 | STATUS_FAILED = 4
6 | STATUS_SUCCEEDED = 5
7 | STATUS_TIMEOUT = 6
8 | STATUS_SCHEDULED_ERROR = 7
9 |
10 | STATUS_DICT = {
11 | STATUS_SCHEDULED: 'scheduled',
12 | STATUS_RUNNING: 'running',
13 | STATUS_STOPPING: 'stopping',
14 | STATUS_STOPPED: 'stopped',
15 | STATUS_FAILED: 'failed',
16 | STATUS_SUCCEEDED: 'succeeded',
17 | STATUS_TIMEOUT: 'timeout',
18 | STATUS_SCHEDULED_ERROR: 'scheduled error'
19 | }
20 |
--------------------------------------------------------------------------------
/src/scheduler/db.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import create_engine
2 | from sqlalchemy.orm import sessionmaker
3 |
4 | from .models import mapper_registry
5 |
6 | class SchedulerDatabase:
7 | def __init__(self, url):
8 | self.engine = self._create_engine(url)
9 |
10 | def _create_engine(self, url):
11 | try:
12 | engine = create_engine(url)
13 | mapper_registry.metadata.create_all(engine)
14 |
15 | return engine
16 | except Exception as e:
17 | raise e
18 |
19 | def add(self, obj):
20 | Session = sessionmaker(self.engine)
21 | with Session() as session:
22 | session.add(obj)
23 | session.commit()
24 |
25 |
--------------------------------------------------------------------------------
/src/scheduler/handlers/__init__.py:
--------------------------------------------------------------------------------
1 | from .executions import ExecutionsListResource, ExecutionsResource
2 | # from .jobs import JobListResource, JobResource
3 | # from .logs import LogListResources
4 |
5 |
6 | def init_api(api):
7 | api.add_resource(ExecutionsListResource, '/{}/executions'.format(api.PREFIX))
8 | api.add_resource(ExecutionsResource, '/{}/executions/'.format(api.PREFIX))
9 |
10 | # api.add_resource(JobListResource, '/{}/jobs'.format(api.PREFIX))
11 | # api.add_resource(JobResource, '/{}/jobs/'.format(api.PREFIX))
12 |
13 | # api.add_resource(LogListResource, '/{}/logs'.format(api.PREFIX))
14 |
--------------------------------------------------------------------------------
/src/scheduler/handlers/base.py:
--------------------------------------------------------------------------------
1 | from flask_restful import Resource
2 |
3 | class BaseResource(Resource):
4 | def app_db(self):
5 | from scheduler.manager import JobManager
6 | return JobManager.jobstore().engine
7 |
--------------------------------------------------------------------------------
/src/scheduler/handlers/executions.py:
--------------------------------------------------------------------------------
1 | from flask_restful import abort
2 | from sqlalchemy.orm import sessionmaker
3 |
4 | from scheduler import constants
5 | from scheduler.models.execution import Execution
6 |
7 | from .base import BaseResource
8 |
9 | def _state_to_str(execution_dict):
10 | prev_state = execution_dict['state']
11 | execution_dict['state'] = constants.STATUS_DICT[prev_state]
12 | return execution_dict
13 |
14 | class ExecutionsListResource(BaseResource):
15 | def get(self):
16 | Session = sessionmaker(self.app_db())
17 | with Session() as session:
18 | executions = session.query(Execution).all()
19 | executions_dict_list = [_state_to_str(execution.to_dict()) for execution in executions]
20 |
21 | return {"executions": executions_dict_list}
22 |
23 |
24 | # TODO: Execute on demand
25 |
26 | class ExecutionsResource(BaseResource):
27 | def _retrieve_by_id(self, execution_id):
28 | try:
29 | Session = sessionmaker(self.app_db())
30 | with Session() as session:
31 | executions = session.query(Execution).filter(Execution.datasource_id == execution_id).all()
32 | except:
33 | abort(404)
34 | return executions
35 |
36 | def get(self, obj_id):
37 | executions = self._retrieve_by_id(obj_id) # This is actually datasource_id
38 |
39 | return {"executions": [_state_to_str(execution.to_dict()) for execution in executions]}
40 |
--------------------------------------------------------------------------------
/src/scheduler/handlers/jobs.py:
--------------------------------------------------------------------------------
1 | from .base import BaseResource
2 |
3 | # TODO
4 | class JobListResource(BaseResource):
5 | def _get_jobs(self):
6 | """Returns a dictionary for all jobs info.
7 | It's a blocking operation.
8 | """
9 | jobs = self.manager.get_jobs()
10 | return_json = []
11 | for job in jobs:
12 | return_json.append(self._build_job_dict(job))
13 | return {'jobs': return_json}
14 |
15 | def _build_job_dict(self, job):
16 | """Transforms apscheduler's job structure to a python dictionary.
17 | :param Job job: An apscheduler.job.Job instance.
18 | :return: dictionary for job info
19 | :rtype: dict
20 | """
21 | if job.next_run_time:
22 | next_run_time = job.next_run_time.isoformat()
23 | else:
24 | next_run_time = ''
25 | return_dict = {
26 | 'job_id': job.id,
27 | 'name': job.name,
28 | 'next_run_time': next_run_time,
29 | 'job_class_string': utils.get_job_name(job),
30 | 'pub_args': utils.get_job_args(job)}
31 |
32 | return_dict.update(utils.get_cron_strings(job))
33 | return return_dict
34 |
35 | def get(self):
36 | return self._get_jobs()
37 |
--------------------------------------------------------------------------------
/src/scheduler/manager.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from .api import init_api
4 | from .base import MsiScheduler
5 |
6 | class JobManager:
7 | singleton = None
8 |
9 | def __init__(self, app=None, db_url=None):
10 | self.app = app
11 | self.scheduler = MsiScheduler(db_url=db_url)
12 |
13 | if app is not None:
14 | self.init_app(app)
15 |
16 | JobManager.singleton = self
17 |
18 | def init_app(self, app):
19 | # Initialize scheduler
20 | self.scheduler.init_app(app)
21 | self.scheduler.app = app
22 |
23 | # Initialize API
24 | init_api(app)
25 |
26 | try:
27 | self.start()
28 | logging.info("The scheduler started successfully.")
29 | except Exception as e:
30 | logging.warn("The scheduler failed to start.")
31 | raise e
32 |
33 | @classmethod
34 | def jobstore(cls):
35 | if cls.singleton is None:
36 | return
37 |
38 | return cls.singleton.scheduler.jobstore()
39 |
40 | def start(self):
41 | return self.scheduler.start()
42 |
43 | def stop(self):
44 | return self.scheduler.shutdown()
45 |
46 | def add_job(self, job_class_string, name=None, args=None, trigger='interval', minutes=60, job_id=None, **kwargs):
47 | return self.scheduler.add_scheduler_job(job_class_string, job_id=job_id, name=name, job_args=args, trigger=trigger, minutes=minutes, **kwargs)
48 |
49 | def pause_job(self, job_id):
50 | return self.scheduler.pause_job(job_id)
51 |
52 | def get_job(self, job_id):
53 | return self.scheduler.get_job(job_id)
54 |
55 | def get_jobs(self):
56 | return self.scheduler.get_jobs()
57 |
58 | def remove_job(self, job_id):
59 | return self.scheduler.remove_job(job_id)
60 |
61 | def resume_job(self, job_id):
62 | return self.scheduler.resume_job(job_id)
63 |
64 | def add_listener(self, listener, events):
65 | return self.scheduler.add_listener(listener, events)
66 |
67 | def remove_listener(self, listener):
68 | return self.scheduler.remove_listener(listener)
69 |
70 |
--------------------------------------------------------------------------------
/src/scheduler/models/__init__.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.orm import registry
2 |
3 | mapper_registry = registry()
--------------------------------------------------------------------------------
/src/scheduler/models/execution.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | from datetime import datetime
3 | from mashumaro import DataClassDictMixin
4 | from sqlalchemy import Column, Integer, Sequence, Text, DateTime
5 | from typing import Optional
6 |
7 | from sqlalchemy.sql.functions import func
8 | from scheduler import constants
9 |
10 | from . import mapper_registry
11 |
12 | @mapper_registry.mapped
13 | @dataclass
14 | class Execution(DataClassDictMixin):
15 | job_id: str = field(metadata={"sa": Column(Text)})
16 | state: int = field(metadata={"sa": Column(Integer)})
17 | result: Optional[str] = field(metadata={"sa": Column(Text, nullable=True)})
18 |
19 | id: int = field(default=None, metadata={"sa": Column(Integer, Sequence('ds_id_seq'), primary_key=True, autoincrement=True)})
20 | datasource_id: int = field(default=None, metadata={"sa": Column(Integer, nullable=True)})
21 | created_at: datetime = field(default=None, metadata={"sa": Column(DateTime(timezone=True), nullable=False, server_default=func.now())})
22 | updated_at: datetime = field(default=None, metadata={"sa": Column(DateTime(timezone=True), nullable=False, server_default=func.now())})
23 |
24 | __tablename__ = "msi_executions"
25 | __sa_dataclass_metadata_key__ = "sa"
26 |
--------------------------------------------------------------------------------
/src/server/__init__.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | from flask_cors import CORS
3 | import os
4 | import sys
5 |
6 | curr_path = os.path.dirname(os.path.abspath(__file__))
7 | src_path = os.path.abspath(os.path.join(curr_path, "../"))
8 | sys.path.append(src_path)
9 |
10 | from server.middleware import middleware
11 | from telemetry.events import set_user_id, track_event
12 |
13 |
14 | def create_app():
15 | # Initialize Server
16 | app = Flask(__name__)
17 | CORS(app, resources={r"/v1/api/*": {"origins": "*"}})
18 | app_settings = os.getenv(
19 | 'APP_SETTINGS',
20 | 'server.config.Config'
21 | )
22 | app.config.from_object(app_settings)
23 |
24 | # Middleware
25 | [func(app) for func in middleware]
26 |
27 | from server.models import User
28 | user = User.create_or_load()
29 | set_user_id(user.id, user.email)
30 | track_event(action="server_start")
31 |
32 | return app
33 |
--------------------------------------------------------------------------------
/src/server/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
3 |
4 |
5 | def monosi_home_dir():
6 | monosi_path = os.path.expanduser("~/.monosi")
7 | if not os.path.exists(monosi_path):
8 | os.makedirs(monosi_path)
9 |
10 | return monosi_path
11 |
12 | db_config = {
13 | "type": "postgresql",
14 | "user": os.getenv('DB_USER'),
15 | "password": os.getenv('DB_PASSWORD'),
16 | "host": os.getenv('DB_HOST'),
17 | "port": os.getenv('DB_PORT'),
18 | "database": os.getenv('DB_DATABASE'),
19 | "schema": os.getenv('DB_SCHEMA'),
20 | }
21 |
22 | class BaseConfig:
23 | """Base configuration."""
24 | SQLALCHEMY_TRACK_MODIFICATIONS = False
25 | SECRET_KEY = os.getenv('SECRET_KEY', 'my_precious')
26 | DEBUG = False
27 | SERVE_UI = bool(os.getenv('SERVE_UI', False))
28 | SQLALCHEMY_DATABASE_URI = "{type}://{user}:{password}@{host}:{port}/{database}".format(
29 | type=db_config['type'],
30 | user=db_config['user'],
31 | password=db_config['password'],
32 | host=db_config['host'],
33 | port=db_config['port'],
34 | database=db_config['database'],
35 | )
36 | SCHEDULER_JOBSTORES = {"default": SQLAlchemyJobStore(url=SQLALCHEMY_DATABASE_URI, tablename="msi_jobs")}
37 | SCHEDULER_API_ENABLED = True
38 |
39 | class DevelopmentConfig(BaseConfig):
40 | """Development configuration."""
41 | DEBUG = True
42 |
43 | class TestingConfig(BaseConfig):
44 | """Testing configuration."""
45 | DEBUG = True
46 | TESTING = True
47 |
48 | class ProductionConfig(BaseConfig):
49 | """Production configuration."""
50 | SECRET_KEY = 'my_precious'
51 | DEBUG = False
52 |
53 | Config = DevelopmentConfig
54 | if os.getenv('FLASK_ENV') == "production":
55 | Config = ProductionConfig
56 |
--------------------------------------------------------------------------------
/src/server/handlers/__init__.py:
--------------------------------------------------------------------------------
1 | from .datasources import (
2 | DataSourceListResource,
3 | DataSourceTestResource,
4 | DataSourceResource,
5 | )
6 | from .integrations import (
7 | IntegrationListResource,
8 | IntegrationResource,
9 | )
10 | from .issues import (
11 | IssueListResource,
12 | )
13 | from .metrics import (
14 | MetricListResource,
15 | )
16 | from .monitors import (
17 | MonitorListResource,
18 | MonitorResource,
19 | )
20 | from .users import UserResource
21 |
22 | def init_api(api):
23 | api.add_resource(IntegrationListResource, '/{}/integrations'.format(api.PREFIX))
24 | api.add_resource(IntegrationResource, '/{}/integrations/'.format(api.PREFIX))
25 |
26 | api.add_resource(IssueListResource, '/{}/issues'.format(api.PREFIX))
27 |
28 | api.add_resource(DataSourceListResource, '/{}/datasources'.format(api.PREFIX))
29 | api.add_resource(DataSourceResource, '/{}/datasources/'.format(api.PREFIX))
30 | api.add_resource(DataSourceTestResource, '/{}/datasources//test'.format(api.PREFIX))
31 |
32 | api.add_resource(MonitorListResource, '/{}/monitors'.format(api.PREFIX))
33 | api.add_resource(MonitorResource, '/{}/monitors/'.format(api.PREFIX))
34 |
35 | api.add_resource(MetricListResource, '/{}/monitors//metrics'.format(api.PREFIX))
36 |
37 | api.add_resource(UserResource, '/{}/users'.format(api.PREFIX))
38 |
39 |
--------------------------------------------------------------------------------
/src/server/handlers/integrations.py:
--------------------------------------------------------------------------------
1 | from server.models import Integration
2 |
3 | from .base import CrudResource, ListResource
4 |
5 | class IntegrationResource(CrudResource):
6 | @property
7 | def resource(self):
8 | return Integration
9 |
10 | @property
11 | def key(self):
12 | return "integration"
13 |
14 | class IntegrationListResource(ListResource):
15 | @property
16 | def resource(self):
17 | return Integration
18 |
19 | @property
20 | def key(self):
21 | return "integrations"
22 |
23 | def _validate(self, req):
24 | try:
25 | Integration.from_dict(req)
26 | except Exception as e:
27 | return False
28 | return True
29 |
30 |
31 |
--------------------------------------------------------------------------------
/src/server/handlers/issues.py:
--------------------------------------------------------------------------------
1 | from server.handlers.base import ListResource
2 | from server.models import Issue
3 |
4 |
5 | class IssueListResource(ListResource):
6 | @property
7 | def resource(self):
8 | return Issue
9 |
10 | @property
11 | def key(self):
12 | return "issues"
13 |
14 |
--------------------------------------------------------------------------------
/src/server/handlers/monitors.py:
--------------------------------------------------------------------------------
1 | from flask_restful import abort
2 | from sqlalchemy import func
3 |
4 | from server.models import Metric, Monitor
5 | from server.middleware.db import db
6 |
7 | from .base import CrudResource, ListResource
8 |
9 | class MonitorListResource(ListResource):
10 | @property
11 | def resource(self):
12 | raise NotImplemented
13 |
14 | @property
15 | def key(self):
16 | return "monitors"
17 |
18 | @staticmethod
19 | def _transform(obj):
20 | metrics = obj[0] # hack
21 | if obj[0] == 1:
22 | metrics = 0
23 |
24 | return {
25 | 'metrics': metrics,
26 | 'id': obj[1],
27 | 'table_name': obj[2],
28 | 'database': obj[3],
29 | 'schema': obj[4],
30 | 'type': obj[5],
31 | 'source': obj[6],
32 | 'workspace': obj[7],
33 | 'created_at': obj[8].strftime("%b %d, %Y %H:%M:%S"),
34 | 'timestamp_field': obj[9],
35 | }
36 |
37 | def _all(self):
38 | try:
39 | objs = db.session.query(
40 | func.count(func.concat(Metric.metric, Metric.column_name).distinct()),
41 | Monitor.id,
42 | Monitor.table_name,
43 | Monitor.database,
44 | Monitor.schema,
45 | Monitor.type,
46 | Monitor.source,
47 | Monitor.workspace,
48 | Monitor.created_at,
49 | Monitor.timestamp_field,
50 | ).outerjoin(
51 | Metric,
52 | (Monitor.table_name==Metric.table_name) &
53 | (Monitor.database==Metric.database) &
54 | (Monitor.schema==Metric.schema)
55 | ).group_by(
56 | Monitor.id,
57 | Monitor.table_name,
58 | Monitor.database,
59 | Monitor.schema,
60 | Monitor.type,
61 | Monitor.source,
62 | Monitor.workspace,
63 | Monitor.created_at,
64 | Monitor.timestamp_field,
65 | ).all()
66 | except:
67 | abort(500)
68 | return [self._transform(obj) for obj in objs]
69 |
70 | def post(self): # Disable creation
71 | abort(500)
72 |
73 |
74 | class MonitorResource(CrudResource):
75 | @property
76 | def resource(self):
77 | return Monitor
78 |
79 | @property
80 | def key(self):
81 | return "monitor"
82 |
--------------------------------------------------------------------------------
/src/server/handlers/users.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from flask_restful import abort, request
3 | from sqlalchemy.exc import IntegrityError
4 |
5 | from server.models import User
6 |
7 | from .base import BaseResource
8 |
9 |
10 | class UserResource(BaseResource):
11 | @property
12 | def resource(self):
13 | return User
14 |
15 | @property
16 | def key(self):
17 | return "user"
18 |
19 | def get(self):
20 | obj = User.create_or_load()
21 |
22 | return {self.key: obj.to_dict()}
23 |
24 | def post(self):
25 | req = request.get_json(force=True)
26 |
27 | try:
28 | obj = User.update(req)
29 | except IntegrityError as e:
30 | logging.error(e)
31 | abort(422)
32 | except Exception as e:
33 | logging.error(e)
34 | abort(500)
35 |
36 | return {self.key: obj.to_dict()}, 200
37 |
--------------------------------------------------------------------------------
/src/server/integrations/__init__.py:
--------------------------------------------------------------------------------
1 | from .slack import SlackIntegration
2 | from .webhook import WebhookIntegration
3 |
4 |
--------------------------------------------------------------------------------
/src/server/jobs/analysis.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, field
2 | import json
3 | import logging
4 | from typing import List
5 | from ingestion.destinations import Destination
6 | from server.models import Metric
7 | from server.middleware.db import db
8 |
9 | class ZScoreShim(Destination):
10 | def _retrieve_metrics_by_attrs(self, database, schema):
11 | metrics = db.session.query(Metric).filter( # TODO: Optimize
12 | Metric.database == database,
13 | Metric.schema == schema,
14 | ).all()
15 |
16 | metrics_dict = [metric.to_dict() for metric in metrics]
17 | return json.loads(json.dumps(metrics_dict))
18 |
19 | def push(self, data):
20 | if len(data) == 0:
21 | return
22 |
23 | database = data[0]['database']
24 | schema = data[0]['schema']
25 |
26 | metrics = self._retrieve_metrics_by_attrs(database, schema)
27 | from server.pipeline import zscores_pipeline
28 | zscores_pipeline.push(metrics)
29 |
30 | activator = ZScoreShim('{}')
31 |
32 |
--------------------------------------------------------------------------------
/src/server/jobs/base.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from ingestion.collector import Collector
3 |
4 | from scheduler import job
5 | from telemetry.events import track_event
6 |
7 | from server.models import DataSource
8 | from server.middleware.db import db
9 |
10 |
11 | class CollectorJob(job.JobBase):
12 | MAX_RETRIES = 3
13 | TIMEOUT = 10
14 |
15 | def _retrieve_source_configuration(self, source_id):
16 | source = db.session.query(DataSource).filter(DataSource.id == source_id).one()
17 | source_configuration = source.config
18 |
19 | source_configuration['type'] = source.type
20 | source_configuration['start_date'] = str(self.last_run) if self.last_run else None
21 |
22 | return source_configuration
23 |
24 | def _create_collector(self, source, pipelines, configuration):
25 | collector = Collector.from_configuration(
26 | source_dict=source,
27 | pipelines=pipelines,
28 | configuration=configuration,
29 | )
30 | return collector
31 |
32 | @classmethod
33 | def meta_info(cls):
34 | return {
35 | 'job_class_string': '%s.%s' % (cls.__module__, cls.__name__),
36 | 'notes': ('This ingests metadata from the source specified.'),
37 | 'arguments': [
38 | {'type': 'integer', 'description': 'The ID of the datasource from which to ingest metadata.'},
39 | ],
40 | }
41 |
42 | @abstractmethod
43 | def pipelines(self):
44 | raise NotImplementedError
45 |
46 | @abstractmethod
47 | def configuration(self):
48 | raise NotImplementedError
49 |
50 | def run(self, datasource_id, *args, **kwargs):
51 | track_event(action="metadata_ingestion_start", label="server")
52 |
53 | source = self._retrieve_source_configuration(datasource_id)
54 |
55 | collector_pipeline = self._create_collector(source, self.pipelines(), self.configuration())
56 | collector_pipeline.run()
57 |
58 |
--------------------------------------------------------------------------------
/src/server/jobs/schema.py:
--------------------------------------------------------------------------------
1 | from server.pipeline import monitors_pipeline
2 |
3 | from .base import CollectorJob
4 |
5 |
6 | class SchemaCollectorJob(CollectorJob):
7 | def pipelines(self):
8 | return [monitors_pipeline]
9 |
10 | def configuration(self):
11 | return { 'monitors': [ { 'type': 'schema' } ] }
12 |
13 |
--------------------------------------------------------------------------------
/src/server/jobs/table_health.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from server.models import Monitor
3 | from server.pipeline import metrics_pipeline
4 | from server.middleware.db import db
5 |
6 | from .base import CollectorJob
7 |
8 |
9 | class TableHealthCollectorJob(CollectorJob):
10 | def pipelines(self):
11 | return [metrics_pipeline]
12 |
13 | def configuration(self):
14 | try:
15 | monitors = db.session.query(Monitor).all()
16 | monitors = [{'type': 'table_health', 'definition': monitor.to_dict()} for monitor in monitors]
17 | except Exception as e:
18 | logging.warn("Could not load the monitors.")
19 | monitors = []
20 |
21 | return { 'monitors': monitors }
22 |
23 |
24 |
--------------------------------------------------------------------------------
/src/server/middleware/__init__.py:
--------------------------------------------------------------------------------
1 | from .api import init_api
2 | from .db import init_db
3 | from .scheduler import init_scheduler
4 | from .ui import init_ui
5 |
6 | middleware = [
7 | init_api,
8 | init_db,
9 | init_scheduler,
10 | init_ui,
11 | ]
--------------------------------------------------------------------------------
/src/server/middleware/api.py:
--------------------------------------------------------------------------------
1 | from flask_restful import Api
2 |
3 | import server.handlers as handlers
4 |
5 | class MsiApi(Api):
6 | VERSION = 'v1'
7 | PREFIX = f'{VERSION}/api'
8 |
9 | def __init__(self, app):
10 | super().__init__(app)
11 | handlers.init_api(self)
12 |
13 | def init_api(app):
14 | return MsiApi(app)
15 |
--------------------------------------------------------------------------------
/src/server/middleware/db.py:
--------------------------------------------------------------------------------
1 | from flask_sqlalchemy import SQLAlchemy
2 | from sqlalchemy.orm import declarative_base
3 |
4 | from server.models import mapper_registry
5 |
6 | Base = declarative_base(metadata=mapper_registry.metadata)
7 | db = SQLAlchemy(model_class=Base)
8 |
9 | def init_db(app):
10 | db.init_app(app)
11 | db.app = app
12 |
13 | with app.app_context():
14 | db.create_all()
15 |
16 | return db
17 |
--------------------------------------------------------------------------------
/src/server/middleware/scheduler.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from scheduler.manager import JobManager
3 |
4 | from server.config import Config
5 |
6 |
7 | # Initialize scheduler process
8 | manager = JobManager(db_url=Config.SQLALCHEMY_DATABASE_URI)
9 |
10 | def init_scheduler(app):
11 | try:
12 | manager.init_app(app)
13 | except:
14 | logging.warn("Scheduler already started.")
15 | return manager
16 |
17 |
--------------------------------------------------------------------------------
/src/server/middleware/ui.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from flask import send_from_directory
5 |
6 | curr_path = os.path.dirname(os.path.abspath(__file__))
7 | src_path = os.path.abspath(os.path.join(curr_path, "../"))
8 |
9 | def _build_path():
10 | build_path = os.path.join(src_path, 'ui/build/')
11 | if not os.path.exists(build_path):
12 | raise Exception("Client UI was not built before attempting to serve via Flask.")
13 |
14 | return build_path
15 |
16 | def _serve_ui(path=''):
17 | build_path = _build_path()
18 | req_path = os.path.join(build_path, path)
19 |
20 | if req_path == build_path or not os.path.exists(req_path):
21 | path = "index.html"
22 |
23 | return send_from_directory(build_path, path)
24 |
25 | def init_ui(app):
26 | if not app.config['SERVE_UI']:
27 | return
28 |
29 | app.static_folder = os.path.join(_build_path(), "static")
30 | app.add_url_rule("/", view_func=_serve_ui)
31 | app.add_url_rule("/", view_func=_serve_ui)
32 |
--------------------------------------------------------------------------------
/src/server/wsgi.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | curr_path = os.path.dirname(os.path.abspath(__file__))
5 | src_path = os.path.abspath(os.path.join(curr_path, "../"))
6 | sys.path.append(src_path)
7 |
8 | from server import create_app
9 | app = create_app()
10 |
--------------------------------------------------------------------------------
/src/telemetry/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/src/telemetry/__init__.py
--------------------------------------------------------------------------------
/src/telemetry/events.py:
--------------------------------------------------------------------------------
1 | import os
2 | from snowplow_tracker import (
3 | AsyncEmitter,
4 | Emitter,
5 | Subject,
6 | Tracker,
7 | logger as snowplow_logger,
8 | )
9 | import logging
10 |
11 | snowplow_logger.setLevel(100)
12 | SNOWPLOW_URL = "monosi-spipeline-collector-lb-1716143593.us-west-2.elb.amazonaws.com"
13 | send_anonymous_stats = os.getenv('SEND_ANONYMOUS_STATS', True) != "false"
14 |
15 | e = AsyncEmitter(
16 | SNOWPLOW_URL,
17 | protocol="http",
18 | )
19 | tracker = Tracker(
20 | e,
21 | namespace="cf",
22 | app_id="monosi",
23 | )
24 |
25 | def set_user_id(user_id, email=None):
26 | subject = Subject()
27 | subject.set_user_id(user_id)
28 | subject.set_domain_user_id(email)
29 | tracker.set_subject(subject)
30 |
31 | def track_event(*args, **kwargs):
32 | if send_anonymous_stats == False:
33 | return
34 |
35 | try:
36 | tracker.track_struct_event(
37 | action=kwargs.get('action') or '',
38 | label=kwargs.get('label') or '',
39 | category='monosi',
40 | )
41 | except Exception as e:
42 | logging.error("Failed to send anonymous usage stats.")
43 | logging.error(e)
44 |
45 |
--------------------------------------------------------------------------------
/src/ui/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "msi-ui",
3 | "version": "0.0.3.post2",
4 | "private": true,
5 | "dependencies": {
6 | "@elastic/charts": "^31.1.0",
7 | "@elastic/datemath": "^5.0.3",
8 | "@elastic/eui": "^34.6.0",
9 | "@testing-library/jest-dom": "^5.11.4",
10 | "@testing-library/react": "^11.1.0",
11 | "@testing-library/user-event": "^12.1.10",
12 | "@types/date-fns": "^2.6.0",
13 | "@types/jest": "^26.0.15",
14 | "@types/js-yaml": "^4.0.4",
15 | "@types/node": "^12.0.0",
16 | "@types/react": "^17.0.0",
17 | "@types/react-bootstrap-table-next": "^4.0.17",
18 | "@types/react-bootstrap-table2-paginator": "^2.1.2",
19 | "@types/react-bootstrap-table2-toolkit": "^2.1.6",
20 | "@types/react-dom": "^17.0.0",
21 | "@types/react-plotly.js": "^2.5.0",
22 | "@types/react-router-dom": "^5.3.1",
23 | "bootstrap": "5.1.3",
24 | "date-fns": "^2.28.0",
25 | "js-yaml": "^4.1.0",
26 | "moment": "^2.29.1",
27 | "moment-timezone": "^0.5.33",
28 | "plotly.js": "^2.9.0",
29 | "prop-types": "^15.7.2",
30 | "react": "^17.0.2",
31 | "react-bootstrap": "^2.1.2",
32 | "react-bootstrap-icons": "^1.7.2",
33 | "react-bootstrap-table-next": "^4.0.3",
34 | "react-bootstrap-table2-paginator": "^2.1.2",
35 | "react-bootstrap-table2-toolkit": "^2.1.3",
36 | "react-dom": "^17.0.2",
37 | "react-plotly.js": "^2.5.1",
38 | "react-router": "^5.2.1",
39 | "react-router-dom": "^5.3.0",
40 | "react-scripts": "4.0.3",
41 | "react-spring": "^9.4.2",
42 | "typescript": "^4.1.2",
43 | "web-vitals": "^1.0.1"
44 | },
45 | "scripts": {
46 | "start": "react-scripts start",
47 | "build": "react-scripts build",
48 | "test": "react-scripts test",
49 | "eject": "react-scripts eject"
50 | },
51 | "eslintConfig": {
52 | "extends": [
53 | "react-app",
54 | "react-app/jest"
55 | ]
56 | },
57 | "browserslist": {
58 | "production": [
59 | ">0.2%",
60 | "not dead",
61 | "not op_mini all"
62 | ],
63 | "development": [
64 | "last 1 chrome version",
65 | "last 1 firefox version",
66 | "last 1 safari version"
67 | ]
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/src/ui/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/src/ui/public/favicon.ico
--------------------------------------------------------------------------------
/src/ui/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
12 |
13 |
17 |
18 |
27 | Monosi
28 |
29 |
30 | You need to enable JavaScript to run this app.
31 |
32 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/ui/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "Monosi",
3 | "name": "Monosi - Open Source Data Observability Platform",
4 | "icons": [
5 | // {
6 | // "src": "favicon.ico",
7 | // "sizes": "64x64 32x32 24x24 16x16",
8 | // "type": "image/x-icon"
9 | // },
10 | // {
11 | // "src": "logo192.png",
12 | // "type": "image/png",
13 | // "sizes": "192x192"
14 | // },
15 | // {
16 | // "src": "logo512.png",
17 | // "type": "image/png",
18 | // "sizes": "512x512"
19 | // }
20 | ],
21 | "start_url": ".",
22 | "display": "standalone",
23 | "theme_color": "#000000",
24 | "background_color": "#ffffff"
25 | }
26 |
--------------------------------------------------------------------------------
/src/ui/public/robots.txt:
--------------------------------------------------------------------------------
1 | # https://www.robotstxt.org/robotstxt.html
2 | User-agent: *
3 | Disallow:
4 |
--------------------------------------------------------------------------------
/src/ui/src/App.css:
--------------------------------------------------------------------------------
1 | .setupheader > .euiPageHeaderContent {
2 | display: flex;
3 | }
4 | .searchDisplayNone .euiSearchBar__searchHolder {
5 | display: none;
6 | }
7 |
--------------------------------------------------------------------------------
/src/ui/src/components/Flyout/flyout.css:
--------------------------------------------------------------------------------
1 | .offcanvas-end {
2 | width: 500px;
3 | }
--------------------------------------------------------------------------------
/src/ui/src/components/Flyout/index.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react';
2 | import {
3 | Button,
4 | Offcanvas,
5 | } from 'react-bootstrap';
6 |
7 | import './flyout.css';
8 |
9 | const Flyout = ({name, form}: any) => {
10 | const [isFlyoutVisible, setIsFlyoutVisible] = useState(false);
11 |
12 | const handleClose = () => setIsFlyoutVisible(false);
13 | const handleShow = () => setIsFlyoutVisible(true);
14 |
15 | return (
16 | <>
17 |
18 | Create {name}
19 |
20 |
21 |
25 |
26 | Create {name}
27 |
28 |
29 | {form}
30 |
31 |
32 | >
33 | );
34 | }
35 |
36 | export default Flyout;
37 |
--------------------------------------------------------------------------------
/src/ui/src/components/Page/bootstrap_page.css:
--------------------------------------------------------------------------------
1 | .faux-body {
2 | min-height: 100vh;
3 | }
4 |
5 | .feather {
6 | width: 16px;
7 | height: 16px;
8 | vertical-align: text-bottom;
9 | }
10 |
11 | /*
12 | * Sidebar
13 | */
14 |
15 | .sidebar {
16 | position: fixed;
17 | top: 0;
18 | /* rtl:raw:
19 | right: 0;
20 | */
21 | bottom: 0;
22 | /* rtl:remove */
23 | left: 0;
24 | z-index: 100; /* Behind the navbar */
25 | padding-left: 0 !important;
26 | }
27 |
28 | #sidebarMenu {
29 | width: 96px;
30 | }
31 |
32 |
33 | .b-example-divider {
34 | flex-shrink: 0;
35 | width: 1.5rem;
36 | height: 100vh;
37 | background-color: rgba(0, 0, 0, .1);
38 | border: solid rgba(0, 0, 0, .15);
39 | border-width: 1px 0;
40 | box-shadow: inset 0 .5em 1.5em rgba(0, 0, 0, .1), inset 0 .125em .5em rgba(0, 0, 0, .15);
41 | }
42 |
43 | @media (max-width: 767.98px) {
44 | .sidebar {
45 | top: 5rem;
46 | }
47 | }
48 |
49 | .bi {
50 | vertical-align: -.125em;
51 | pointer-events: none;
52 | fill: currentColor;
53 | }
54 |
55 | .feather {
56 | width: 16px;
57 | height: 16px;
58 | vertical-align: text-bottom;
59 | }
60 |
61 | .nav-flush .nav-link {
62 | border-radius: 0 !important;
63 | }
64 |
65 | .sidebar-sticky {
66 | position: relative;
67 | top: 0;
68 | height: calc(100vh - 48px);
69 | padding-top: .5rem;
70 | overflow-x: hidden;
71 | overflow-y: auto; /* Scrollable contents if viewport is shorter than content. */
72 | }
73 |
74 |
75 | .nav-flush .nav-link {
76 | border-radius: 0;
77 | }
78 |
79 |
80 | a, .nav-link, a.page-link {
81 | color: #7344d2;
82 | }
83 |
84 | a:hover, a:focus, a.page-link:hover, a.page-link:focus, .nav-link:focus, .nav-link:hover {
85 | color: #270d5b;
86 | }
87 |
88 | .btn-primary:hover, .btn-primary:focus {
89 | background-color: #270d5b
90 | }
91 |
92 | .btn-primary, .nav-pills .nav-link.active, .nav-pills .show>.nav-link {
93 | background-color: #7344d2;
94 | }
95 |
96 | .btn-primary:hover, .btn-primary:focus {
97 | background-color: #270d5b
98 | }
99 | .list-group-item.active {
100 | background-color: #7344d2;
101 | border-color: #7344d2;
102 | }
103 |
104 | thead .caret {
105 | display: inline-block;
106 | width: 0;
107 | height: 0;
108 | margin-left: 2px;
109 | vertical-align: middle;
110 | border-top: 4px dashed;
111 | border-top: 4px solid \9;
112 | border-right: 4px solid transparent;
113 | border-left: 4px solid transparent;
114 | }
115 |
116 | thead .dropup .caret, .navbar-fixed-bottom .dropdown .caret {
117 | border-top: 0;
118 | border-bottom: 4px dashed;
119 | border-bottom: 4px solid \9;
120 | content: "";
121 | }
--------------------------------------------------------------------------------
/src/ui/src/components/Page/index.tsx:
--------------------------------------------------------------------------------
1 | import React, { ReactNode } from 'react';
2 |
3 | import Navigation from 'components/Navigation';
4 |
5 | import 'bootstrap/dist/css/bootstrap.min.css';
6 | import './bootstrap_page.css';
7 |
8 | interface PageProps {
9 | children?: ReactNode;
10 | selectedTab: string
11 | }
12 |
13 | const Page: React.FC = ({ children, selectedTab }) => {
14 | return (
15 |
16 |
17 |
18 |
19 | {children}
20 |
21 |
22 |
23 | );
24 | };
25 |
26 | export default Page;
27 |
--------------------------------------------------------------------------------
/src/ui/src/components/forms/ProfileForm/index.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 |
3 | import {
4 | EuiFlexGroup,
5 | EuiFlexItem,
6 | EuiFormRow,
7 | EuiFieldText,
8 | } from '@elastic/eui';
9 |
10 |
11 | const ProfileForm: React.FC = () => {
12 | const [email, _] = useState('');
13 |
14 | return (
15 |
16 |
17 |
18 |
19 |
24 |
25 |
26 |
27 |
28 | );
29 | };
30 |
31 | export default ProfileForm;
32 |
33 |
--------------------------------------------------------------------------------
/src/ui/src/images/BigQueryLogo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | const BigQueryLogo = ({ height = 400, width = 400, ...props }) => (
4 |
11 |
19 |
20 |
21 |
22 |
26 |
31 |
32 |
33 |
34 |
35 |
36 | );
37 |
38 | export default BigQueryLogo;
39 |
--------------------------------------------------------------------------------
/src/ui/src/images/PagerDutyLogo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | const PagerDutyLogo = (props: any) => (
4 |
11 |
12 |
13 |
14 | {'PagerDuty Logo'}
15 |
21 |
25 |
26 | );
27 |
28 | export default PagerDutyLogo;
29 |
--------------------------------------------------------------------------------
/src/ui/src/images/WebhookLogo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | const WebhookLogo = (props: any) => (
4 |
17 |
18 |
19 | );
20 |
21 | export default WebhookLogo;
22 |
--------------------------------------------------------------------------------
/src/ui/src/images/index.ts:
--------------------------------------------------------------------------------
1 | import PagerDutyLogo from './PagerDutyLogo';
2 | import WebhookLogo from './WebhookLogo';
3 | import BigQueryLogo from './BigQueryLogo';
4 |
5 | export { PagerDutyLogo, WebhookLogo, BigQueryLogo };
6 |
--------------------------------------------------------------------------------
/src/ui/src/index.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 | import reportWebVitals from './reportWebVitals';
5 |
6 | ReactDOM.render(
7 |
8 |
9 | ,
10 | document.getElementById('root')
11 | );
12 |
13 | // If you want to start measuring performance in your app, pass a function
14 | // to log results (for example: reportWebVitals(console.log))
15 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
16 | reportWebVitals();
17 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/dashboard/Index/dashboard.css:
--------------------------------------------------------------------------------
1 | .icon-square {
2 | align-self: center;
3 | display: inline-flex;
4 | align-items: center;
5 | justify-content: center;
6 | width: 2.5rem;
7 | height: 2.5rem;
8 | font-size: 1.5rem;
9 | border-radius: 0.5rem;
10 | margin: 0.5rem;
11 | }
--------------------------------------------------------------------------------
/src/ui/src/pages/app/executions/Index/components/JobsTable.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 | import JobService from 'services/jobs';
3 |
4 | import BootstrapTable from "react-bootstrap-table-next";
5 | import paginationFactory from "react-bootstrap-table2-paginator";
6 |
7 | import ToolkitProvider from 'react-bootstrap-table2-toolkit';
8 | import { formatTimestamp } from 'utils/timestampFormatting';
9 |
10 | const JobsTable: React.FC = () => {
11 | const [jobs, setJobs] = useState([]);
12 |
13 | useEffect(() => {
14 | async function loadJobs() {
15 | let res = await JobService.getAll();
16 | if (res !== null && res) {
17 | setJobs(res);
18 | }
19 | }
20 |
21 | loadJobs();
22 | }, []);
23 |
24 | const columns = [
25 | {
26 | dataField: "name",
27 | text: "Name",
28 | },
29 | {
30 | dataField: "trigger",
31 | text: "Type",
32 | },
33 | {
34 | dataField: "hours",
35 | text: "Interval (hrs)",
36 | },
37 | {
38 | dataField: "start_date",
39 | text: "Start Date",
40 | formatter: (cell: any, row: any) => {
41 | return formatTimestamp(row.start_date);
42 | },
43 | },
44 | {
45 | dataField: "next_run_time",
46 | text: "Next Run Time",
47 | formatter: (cell: any, row: any) => {
48 | return formatTimestamp(row.next_run_time);
49 | },
50 | },
51 | ];
52 |
53 | const emptyState = () => {
54 | return (
55 |
56 |
57 |
No jobs yet!
58 |
You need to create a data source in order to start tracking the status of ingestion jobs
59 |
If you haven't created a data source yet, start there
60 |
61 |
62 | )
63 | }
64 | if (jobs.length == 0) {
65 | return emptyState();
66 | }
67 |
68 | return(
69 |
74 | {
75 | props => (
76 |
77 |
82 |
83 | )
84 | }
85 |
86 | );
87 | };
88 |
89 | export default JobsTable;
90 |
91 |
92 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/executions/Index/index.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import Page from 'components/Page';
4 | import ExecutionsTable from './components/ExecutionsTable';
5 | import JobsTable from './components/JobsTable';
6 | import { Tab, Tabs } from 'react-bootstrap';
7 |
8 | const ExecutionsIndex: React.FC = () => {
9 | return (
10 |
11 |
12 |
13 |
14 |
15 |
Executions
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 | );
31 | };
32 |
33 | export default ExecutionsIndex;
34 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/issues/Index/index.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import Page from 'components/Page';
4 | import IssuesTable from './components/IssuesTable';
5 |
6 | const IssuesIndex: React.FC = () => {
7 | return (
8 |
9 |
10 |
11 |
12 |
13 |
Issues
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | );
22 | };
23 |
24 | export default IssuesIndex;
25 |
26 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/monitors/Index/index.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 |
3 | import MonitorService from 'services/monitors';
4 | import Page from 'components/Page';
5 | import MonitorsTable from './components/MonitorsTable';
6 |
7 |
8 | const MonitorsIndex: React.FC = () => {
9 | const [monitors, setMonitors] = useState([]);
10 | const [isLoading, setIsLoading] = useState(false);
11 |
12 | const loadMonitors = async () => {
13 | setIsLoading(true);
14 | let res = await MonitorService.getAll();
15 | if (res && res.monitors) {
16 | setMonitors(res.monitors);
17 | }
18 | setIsLoading(false);
19 | };
20 |
21 | useEffect(() => {
22 | loadMonitors();
23 | }, []);
24 |
25 | return (
26 |
27 |
28 |
29 |
30 |
31 |
Monitors
32 |
33 |
34 |
38 |
39 |
40 |
41 |
42 | );
43 | };
44 |
45 | export default MonitorsIndex;
46 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/onboarding/GettingStarted/index.tsx:
--------------------------------------------------------------------------------
1 | import React, { useEffect, useState } from 'react';
2 | import { Button, Form } from 'react-bootstrap';
3 |
4 | import UserService from 'services/users';
5 |
6 | const OnboardingGettingStarted: React.FC = () => {
7 | const [email, setEmail] = useState('');
8 | const [anonymizeUsageData, setAnonymizeUsageData] = useState(false);
9 | const [receiveUpdates, setReceiveUpdates] = useState(true);
10 |
11 | const handleSubmit = async () => {
12 | const body = {
13 | email: email,
14 | anonymize_usage_data: anonymizeUsageData,
15 | receive_updates: receiveUpdates,
16 | };
17 |
18 | const resp = await UserService.create(body);
19 |
20 | window.location.reload(); // TODO: Fix - dirty
21 | };
22 |
23 | return (
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
Welcome to MonoSi
32 |
OSS Data Observability
33 |
34 |
36 | Email address
37 | setEmail(e.target.value)} />
38 |
39 |
40 |
41 | setAnonymizeUsageData(e.target.checked)} />
42 |
43 |
44 | setReceiveUpdates(e.target.checked)} />
45 |
46 |
47 | Continue
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 | );
56 | };
57 |
58 | export default OnboardingGettingStarted;
59 |
60 |
--------------------------------------------------------------------------------
/src/ui/src/pages/app/onboarding/Walkthrough/index.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import Page from 'components/Page';
3 | import { Button, Col, Row } from 'react-bootstrap';
4 |
5 | const DashboardIndex: React.FC = () => {
6 | return (
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
MonoSi Walkthrough
18 |
Start monitoring your data quality in minutes.
19 |
20 |
21 |
22 |
2-min Demo Video
23 | Watch
24 |
25 |
26 |
27 |
28 |
Explore Demo Data
29 | Browse
30 |
31 |
32 |
33 |
Set up a data source
34 |
35 |
Skip Onboarding
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 | );
44 | };
45 |
46 | export default DashboardIndex;
47 |
48 |
49 |
--------------------------------------------------------------------------------
/src/ui/src/react-app-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/src/ui/src/reportWebVitals.ts:
--------------------------------------------------------------------------------
1 | import { ReportHandler } from 'web-vitals';
2 |
3 | const reportWebVitals = (onPerfEntry?: ReportHandler) => {
4 | if (onPerfEntry && onPerfEntry instanceof Function) {
5 | import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
6 | getCLS(onPerfEntry);
7 | getFID(onPerfEntry);
8 | getFCP(onPerfEntry);
9 | getLCP(onPerfEntry);
10 | getTTFB(onPerfEntry);
11 | });
12 | }
13 | };
14 |
15 | export default reportWebVitals;
16 |
--------------------------------------------------------------------------------
/src/ui/src/services/common/base.tsx:
--------------------------------------------------------------------------------
1 | import { HttpService } from './http';
2 |
3 | export class BaseService {
4 | http;
5 |
6 | constructor(url_prefix = '') {
7 | this.http = new HttpService(url_prefix);
8 | }
9 |
10 | async getAll() {
11 | return await this.http.get(``);
12 | }
13 |
14 | async get(id: string) {
15 | return await this.http.get(`/${id}`);
16 | }
17 |
18 | async create(body: any) {
19 | return await this.http.post(``, body);
20 | }
21 |
22 | async update(id: string, body: any) {
23 | return await this.http.put(`/${id}`, body);
24 | }
25 |
26 | async delete(id: string) {
27 | return await this.http.remove(`/${id}`);
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/ui/src/services/common/constants.tsx:
--------------------------------------------------------------------------------
1 | export const SESSION_KEY = 'monosi_user';
2 |
--------------------------------------------------------------------------------
/src/ui/src/services/datasources.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class DatasourceService extends BaseService {
4 | constructor() {
5 | super('datasources');
6 | }
7 |
8 | async test(id: string) {
9 | return await this.http.get(`/${id}/test`);
10 | }
11 | }
12 |
13 | export default new DatasourceService();
14 |
--------------------------------------------------------------------------------
/src/ui/src/services/executions.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class ExecutionService extends BaseService {
4 | constructor() {
5 | super('executions');
6 | }
7 | }
8 |
9 | export default new ExecutionService();
10 |
11 |
--------------------------------------------------------------------------------
/src/ui/src/services/integrations.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class IntegrationService extends BaseService {
4 | constructor() {
5 | super('integrations');
6 | }
7 | }
8 |
9 | export default new IntegrationService();
10 |
--------------------------------------------------------------------------------
/src/ui/src/services/issues.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class IssueService extends BaseService {
4 | constructor() {
5 | super('issues');
6 | }
7 | }
8 |
9 | export default new IssueService();
10 |
11 |
--------------------------------------------------------------------------------
/src/ui/src/services/jobs.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class JobService extends BaseService {
4 | constructor() {
5 | super('jobs');
6 | }
7 | }
8 |
9 | export default new JobService();
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/ui/src/services/monitors.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class MonitorService extends BaseService {
4 | constructor() {
5 | super('monitors');
6 | }
7 |
8 | async getMetrics(id: string, ) {
9 | return await this.http.get(`/${id}/metrics`);
10 | }
11 |
12 | async getMetricData(id: string, column_name: string, metric: string) {
13 | return await this.http.get(`/${id}/metrics?column_name=${column_name}&metric=${metric}`);
14 | }
15 | }
16 |
17 | export default new MonitorService();
18 |
--------------------------------------------------------------------------------
/src/ui/src/services/users.tsx:
--------------------------------------------------------------------------------
1 | import { BaseService } from 'services/common/base';
2 |
3 | class UserService extends BaseService {
4 | constructor() {
5 | super('users');
6 | }
7 | }
8 |
9 | export default new UserService();
10 |
11 |
--------------------------------------------------------------------------------
/src/ui/src/utils/timestampFormatting.ts:
--------------------------------------------------------------------------------
1 | import { format } from 'date-fns';
2 |
3 | const DEFAULT_TIMESTAMP_FORMAT: string = 'eeee, MMM do (HH:mm:ss)';
4 |
5 | const formatTimestamp = (
6 | timestamp: string,
7 | tFormat: string = DEFAULT_TIMESTAMP_FORMAT
8 | ) => {
9 | try {
10 | const formattedDate = format(new Date(timestamp), tFormat);
11 | return formattedDate;
12 | } catch (e) {
13 | return timestamp;
14 | }
15 | };
16 |
17 | export { DEFAULT_TIMESTAMP_FORMAT, formatTimestamp };
18 |
--------------------------------------------------------------------------------
/src/ui/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": [
5 | "dom",
6 | "dom.iterable",
7 | "esnext"
8 | ],
9 | "allowJs": true,
10 | "skipLibCheck": true,
11 | "esModuleInterop": true,
12 | "allowSyntheticDefaultImports": true,
13 | "strict": true,
14 | "forceConsistentCasingInFileNames": true,
15 | "noFallthroughCasesInSwitch": true,
16 | "module": "esnext",
17 | "moduleResolution": "node",
18 | "resolveJsonModule": true,
19 | "isolatedModules": true,
20 | "noEmit": true,
21 | "jsx": "react-jsx",
22 | "baseUrl": "src",
23 | },
24 | "include": [
25 | "src"
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/__init__.py
--------------------------------------------------------------------------------
/tests/context.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../src')))
4 |
5 | import ingestion
6 | import pipeline
7 | import scheduler
8 | import server
9 |
--------------------------------------------------------------------------------
/tests/ingestion/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/ingestion/__init__.py
--------------------------------------------------------------------------------
/tests/ingestion/sources/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/ingestion/sources/__init__.py
--------------------------------------------------------------------------------
/tests/ingestion/transformers/test_base.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/ingestion/transformers/test_base.py
--------------------------------------------------------------------------------
/tests/scheduler/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/scheduler/__init__.py
--------------------------------------------------------------------------------
/tests/scheduler/test_base.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | from sqlalchemy.engine.mock import create_mock_engine
4 |
5 | import scheduler.base as base
6 |
7 | ## JobStore Tests
8 | @pytest.fixture
9 | def connection_string():
10 | db_config = {
11 | "type": "postgresql",
12 | "user": os.getenv('DB_USER'),
13 | "password": os.getenv('DB_PASSWORD'),
14 | "host": os.getenv('DB_HOST'),
15 | "port": os.getenv('DB_PORT'),
16 | "database": os.getenv('DB_DATABASE'),
17 | "schema": os.getenv('DB_SCHEMA'),
18 | }
19 | from ingestion.sources import SourceFactory
20 | source = SourceFactory.create(db_config)
21 | return source.configuration.connection_string()
22 |
23 | @pytest.fixture
24 | def datasource_id():
25 | return 130
26 |
27 | @pytest.fixture
28 | def datasource_id_not_executed():
29 | return 131
30 |
31 | @pytest.fixture
32 | def jobstore(connection_string, datasource_id):
33 | return base.MsiJobStore(url=connection_string)
34 |
35 | def test_jobstore_get_no_execution(jobstore, datasource_id_not_executed):
36 | execution = jobstore.get(datasource_id_not_executed)
37 | assert execution == {}
38 |
39 | def test_jobstore_get_execution_exists(jobstore, datasource_id):
40 | execution = jobstore.get(datasource_id)
41 | assert execution != {}
42 |
43 | def test_jobstore_get_execution_exists_created_at(jobstore, datasource_id):
44 | execution = jobstore.get(datasource_id)
45 | created_at = execution.get('created_at')
46 | assert created_at is not None
47 | # assert parse is date
48 |
49 |
--------------------------------------------------------------------------------
/tests/server/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/server/__init__.py
--------------------------------------------------------------------------------
/tests/server/handlers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/server/handlers/__init__.py
--------------------------------------------------------------------------------
/tests/server/handlers/test_base.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.handlers.base as base
4 |
5 |
6 | def test_list_resource_get():
7 | pass
8 |
9 | def test_list_resource_post():
10 | pass
11 |
12 | def test_list_resource__create():
13 | pass
14 |
15 | def test_list_resource__all():
16 | pass
17 |
18 |
19 | def test_crud_resource__retrieve_by_id():
20 | pass
21 |
22 | def test_crud_resource_get():
23 | pass
24 |
25 | def test_crud_resource_put():
26 | pass
27 |
28 | def test_crud_resource_delete():
29 | pass
30 |
31 | def test_crud_resource__update():
32 | pass
33 |
34 | def test_crud_resource__delete():
35 | pass
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/tests/server/handlers/test_datasources.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | import pytest
3 |
4 | from server import create_app
5 |
6 | DATASOURCES_ENDPOINT = "/v1/api/datasources"
7 | NUM_DATASOURCES_IN_DB = 1
8 |
9 | @pytest.fixture
10 | def client(tmpdir):
11 | # temp_db_file = f"sqlite:///{tmpdir.dirpath()}/"
12 | app = create_app()
13 | app.config["TESTING"] = True
14 |
15 | with app.test_client() as client:
16 | yield client
17 |
18 |
19 | def test_datasources_get_all(client):
20 | response = client.get(f"{DATASOURCES_ENDPOINT}")
21 |
22 | assert response.status_code == 200
23 | assert len(response.json) == NUM_DATASOURCES_IN_DB
24 |
25 | def test_datasources_post(client):
26 | datasource_name = "datasource " + uuid.uuid4().hex
27 | new_datasource_json = {"name": datasource_name, "type": "snowflake", "config": {}}
28 | response = client.post(f"{DATASOURCES_ENDPOINT}", json=new_datasource_json)
29 |
30 | assert response.status_code == 200
31 | assert response.json["datasources"]["name"] == datasource_name
32 |
33 | def test_datasources_post_error(client):
34 | missing_datasource_info_json = {"name": "D.K. Metcalf"}
35 | response = client.post(f"{DATASOURCES_ENDPOINT}", json=missing_datasource_info_json)
36 |
37 | assert response.status_code == 500
38 |
39 | def test_datasources_single(client):
40 | response = client.get(f"{DATASOURCES_ENDPOINT}/1")
41 |
42 | assert response.status_code == 200
43 | assert response.json["datasource"] is not None
44 |
45 | def test_datasource_not_found(client):
46 | response = client.get(f"{DATASOURCES_ENDPOINT}/7")
47 |
48 | assert response.status_code == 404
49 |
50 | # def test_datasource_test_connection_success(client):
51 | # response = client.get(f"{DATASOURCES_ENDPOINT}/1/test")
52 |
53 | # assert response.json['connection'] == "true"
54 |
55 | # def test_datasource_test_connection_failure(client):
56 | # response = client.get(f"{DATASOURCES_ENDPOINT}/4/test")
57 |
58 | # assert response.json['connection'] == "false"
59 |
60 |
--------------------------------------------------------------------------------
/tests/server/handlers/test_integrations.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | import pytest
3 |
4 | from server import create_app
5 |
6 | INTEGRATIONS_ENDPOINT = "/v1/api/integrations"
7 | NUM_INTEGRATIONS_IN_DB = 1
8 |
9 | @pytest.fixture
10 | def client(tmpdir):
11 | app = create_app()
12 | app.config["TESTING"] = True
13 |
14 | with app.test_client() as client:
15 | yield client
16 |
17 | def test_integrations_get_all(client):
18 | response = client.get(f"{INTEGRATIONS_ENDPOINT}")
19 |
20 | assert response.status_code == 200
21 | assert len(response.json) == NUM_INTEGRATIONS_IN_DB
22 |
23 | def test_integrations_post(client):
24 | integration_name = "integration " + uuid.uuid4().hex
25 | new_integration_json = {"name": integration_name, "type": "slack", "config": {}}
26 | response = client.post(f"{INTEGRATIONS_ENDPOINT}", json=new_integration_json)
27 |
28 | assert response.status_code == 200
29 | assert response.json["integrations"]["name"] == integration_name
30 |
31 | def test_integrations_post_error(client):
32 | missing_integration_info_json = {}
33 | response = client.post(f"{INTEGRATIONS_ENDPOINT}", json=missing_integration_info_json)
34 |
35 | assert response.status_code == 500
36 |
37 | def test_integrations_single(client):
38 | response = client.get(f"{INTEGRATIONS_ENDPOINT}/99")
39 |
40 | assert response.status_code == 200
41 | assert response.json["integration"] is not None
42 | # assert response.json["name"] == "Example Integration"
43 |
44 | def test_integration_not_found(client):
45 | response = client.get(f"{INTEGRATIONS_ENDPOINT}/7")
46 |
47 | assert response.status_code == 404
48 |
49 |
--------------------------------------------------------------------------------
/tests/server/handlers/test_metrics.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from server import create_app
4 |
5 |
6 | METRICS_ENDPOINT = "/v1/api/monitors/{monitor_id}/metrics?metric={metric}&column_name={column_name}"
7 | NUM_METRICS_FOR_MONITOR = 4
8 |
9 | @pytest.fixture
10 | def client(tmpdir):
11 | temp_db_file = f"sqlite:///{tmpdir.dirpath()}"
12 | app = create_app()
13 | app.config["TESTING"] = True
14 |
15 | with app.test_client() as client:
16 | yield client
17 |
18 | def test_metrics_get_individ(client):
19 | response = client.get(METRICS_ENDPOINT.format(
20 | datasource_id=1,
21 | metric="approx_distinct_count",
22 | column_name="user_id",
23 | ))
24 |
25 | assert response.status_code == 200
26 | assert len(response.json) == NUM_METRICS_FOR_MONITOR
27 |
28 |
29 | def test_metrics_get_individ_failure(client):
30 | response = client.get(METRICS_ENDPOINT.format(
31 | monitor_id=1,
32 | metric="metric_doesnt_exist",
33 | column_name="missing_column_name",
34 | ))
35 |
36 | assert response.status_code == 500
37 |
38 |
39 |
--------------------------------------------------------------------------------
/tests/server/handlers/test_monitors.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | import pytest
3 |
4 | from server import create_app
5 |
6 | MONITORS_ENDPOINT = "/v1/api/monitors"
7 | NUM_MONITORS_ENDPOINT = 1
8 |
9 | @pytest.fixture
10 | def client(tmpdir):
11 | app = create_app()
12 | app.config["TESTING"] = True
13 |
14 | with app.test_client() as client:
15 | yield client
16 |
17 | def test_monitors_get_all(client):
18 | response = client.get(f"{MONITORS_ENDPOINT}")
19 |
20 | assert response.status_code == 200
21 | assert len(response.json) == NUM_MONITORS_ENDPOINT
22 |
23 | def test_monitors_post(client):
24 | new_monitor_json = {"table_name": uuid.uuid4().hex + "table_name", "database": "database", "schema": "schema", "timestamp_field": "timestamp", "workspace": "workspace", "source": "source", "type": "table_health"}
25 | response = client.post(f"{MONITORS_ENDPOINT}", json=new_monitor_json)
26 |
27 | assert response.status_code == 200
28 |
29 | def test_monitors_post_job(client):
30 | pass
31 | # creates job
32 |
33 | def test_monitors_destroy_job(client):
34 | pass
35 | # deletes job
36 |
37 | def test_monitors_destroy_associated(client):
38 | pass
39 | # deletes metrics and executions
40 |
41 | def test_monitors_post_error(client):
42 | missing_monitor_information_json = {}
43 | response = client.post(f"{MONITORS_ENDPOINT}", json=missing_monitor_information_json)
44 |
45 | assert response.status_code == 500
46 |
47 | def test_monitor_single(client):
48 | response = client.get(f"{MONITORS_ENDPOINT}/126")
49 |
50 | assert response.status_code == 200
51 | assert response.json["monitor"] is not None
52 |
53 | def test_monitor_not_found(client):
54 | response = client.get(f"{MONITORS_ENDPOINT}/7")
55 |
56 | assert response.status_code == 404
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/tests/server/middleware/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/monosidev/monosi/a88b689fc74010b10dbabb32f4b2bdeae865f4d5/tests/server/middleware/__init__.py
--------------------------------------------------------------------------------
/tests/server/middleware/test_api.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.middleware.api
4 |
5 |
6 | def test_init_api():
7 | pass
8 |
9 | def test_api_version_and_prefix():
10 | pass
11 |
--------------------------------------------------------------------------------
/tests/server/middleware/test_config.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.config
4 |
5 |
6 | def test_monosi_dir():
7 | pass
8 |
9 | def test_db_config_is_loaded():
10 | pass
11 |
12 | def test_db_config_incorrect_details():
13 | pass
14 |
15 |
16 | def test_config_base():
17 | pass
18 |
19 | def test_config_env_dev():
20 | pass
21 |
22 | def test_config_env_test():
23 | pass
24 |
25 | def test_config_env_prod():
26 | pass
27 |
--------------------------------------------------------------------------------
/tests/server/middleware/test_db.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.middleware.db
4 |
5 |
6 | def test_init_db():
7 | pass
8 |
--------------------------------------------------------------------------------
/tests/server/middleware/test_scheduler.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.middleware.scheduler
4 |
5 |
6 | def test_init_scheduler():
7 | pass
8 |
--------------------------------------------------------------------------------
/tests/server/middleware/test_ui.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import server.middleware.ui
4 |
5 |
6 | def test__build_path():
7 | pass
8 |
9 |
10 | def test__serve_ui():
11 | pass
12 |
13 | def test_init_ui_with_serve_ui_config_true():
14 | pass
15 |
16 | def test_init_ui_with_serve_ui_config_false():
17 | pass
18 |
--------------------------------------------------------------------------------
/tests/test_basic.py:
--------------------------------------------------------------------------------
1 | def test_absolute_truth_and_meaning():
2 | assert True
3 |
4 |
--------------------------------------------------------------------------------