├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.yml │ └── config.yml ├── pull_request_template.md ├── release-drafter.yml └── workflows │ ├── pr-labeler.yml │ └── release-drafter.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── License.md ├── README.md ├── core ├── morph │ ├── __init__.py │ ├── api │ │ ├── __init__.py │ │ ├── app.py │ │ ├── auth.py │ │ ├── cloud │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── client.py │ │ │ └── types.py │ │ ├── context.py │ │ ├── custom_types.py │ │ ├── error.py │ │ ├── handler.py │ │ ├── plugin.py │ │ ├── service.py │ │ ├── templates │ │ │ └── index.html │ │ └── utils.py │ ├── cli │ │ ├── README.md │ │ ├── __init__.py │ │ ├── flags.py │ │ ├── main.py │ │ ├── params.py │ │ ├── requires.py │ │ └── types.py │ ├── config │ │ ├── __init__.py │ │ └── project.py │ ├── constants.py │ ├── include │ │ ├── __init__.py │ │ └── starter_template │ │ │ ├── .env │ │ │ ├── .gitignore │ │ │ ├── .mock_user_context.json │ │ │ ├── README.md │ │ │ ├── components.json │ │ │ ├── morph_project.yml │ │ │ ├── package.json │ │ │ ├── src │ │ │ ├── __init__.py │ │ │ └── pages │ │ │ │ ├── 404.tsx │ │ │ │ ├── _app.tsx │ │ │ │ ├── _components │ │ │ │ ├── header.tsx │ │ │ │ └── table-of-contents.tsx │ │ │ │ ├── _lib │ │ │ │ └── utils.ts │ │ │ │ ├── index.css │ │ │ │ └── index.mdx │ │ │ ├── static │ │ │ └── favicon.ico │ │ │ ├── tsconfig.json │ │ │ └── vite.config.ts │ ├── py.typed │ └── task │ │ ├── __init__.py │ │ ├── api.py │ │ ├── base.py │ │ ├── clean.py │ │ ├── compile.py │ │ ├── config.py │ │ ├── context.py │ │ ├── deploy.py │ │ ├── init.py │ │ ├── new.py │ │ ├── plugin.py │ │ ├── resource.py │ │ ├── run.py │ │ ├── server.py │ │ └── utils │ │ ├── __init__.py │ │ ├── connection.py │ │ ├── connections │ │ ├── athena │ │ │ ├── api.py │ │ │ └── usecase.py │ │ ├── bigquery │ │ │ ├── api.py │ │ │ ├── types.py │ │ │ └── usecase.py │ │ ├── connector.py │ │ ├── database │ │ │ ├── mssql.py │ │ │ ├── mysql.py │ │ │ ├── postgres.py │ │ │ ├── redshift.py │ │ │ ├── types.py │ │ │ └── utils.py │ │ ├── snowflake │ │ │ ├── api.py │ │ │ ├── types.py │ │ │ └── usecase.py │ │ └── utils.py │ │ ├── file_upload.py │ │ ├── load_dockerfile.py │ │ ├── logging.py │ │ ├── morph.py │ │ └── run_backend │ │ ├── __init__.py │ │ ├── cache.py │ │ ├── decorators.py │ │ ├── errors.py │ │ ├── execution.py │ │ ├── inspection.py │ │ ├── output.py │ │ ├── state.py │ │ └── types.py └── morph_lib │ ├── __init__.py │ ├── api.py │ ├── database.py │ ├── error.py │ ├── function.py │ ├── py.typed │ ├── stream.py │ ├── types.py │ └── utils │ ├── __init__.py │ ├── db_connector.py │ └── sql.py ├── mypy.ini ├── poetry.lock ├── poetry.toml ├── pyproject.toml └── pytest.ini /.github/ISSUE_TEMPLATE/bug-report.yml: -------------------------------------------------------------------------------- 1 | name: 🐞 Bug Report 2 | description: Report a bug or an issue you've found with Morph 3 | title: "[Bug] " 4 | labels: ["bug"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to fill out this bug report! 10 | - type: checkboxes 11 | attributes: 12 | label: Is this a new bug in Morph? 13 | description: > 14 | In other words, is this an error, flaw, failure or fault in our software? 15 | 16 | - If this is a bug experienced while using Morph Cloud, please report to [support](https://www.morph-data.io/form/contact). 17 | 18 | - If this is a request for help or troubleshooting code in your own Morph project, please open a [Discussion question](https://github.com/morph-data/morph/discussions). 19 | 20 | Please search to see if an issue already exists for the bug you encountered. 21 | options: 22 | - label: I believe this is a new bug in Morph 23 | required: true 24 | - label: I have searched the existing issues, and I could not find an existing issue for this bug 25 | required: true 26 | - type: textarea 27 | attributes: 28 | label: Current Behavior 29 | description: A concise description of what you're experiencing. 30 | validations: 31 | required: true 32 | - type: textarea 33 | attributes: 34 | label: Expected Behavior 35 | description: A concise description of what you expected to happen. 36 | validations: 37 | required: true 38 | - type: textarea 39 | attributes: 40 | label: Steps To Reproduce 41 | description: Steps to reproduce the behavior. 42 | placeholder: | 43 | 1. In this environment... 44 | 2. With this config... 45 | 3. Run '...' 46 | 4. See error... 47 | validations: 48 | required: true 49 | - type: textarea 50 | id: logs 51 | attributes: 52 | label: Relevant log output 53 | description: | 54 | If applicable, log output to help explain your problem. 55 | render: shell 56 | validations: 57 | required: false 58 | - type: textarea 59 | attributes: 60 | label: Environment 61 | description: | 62 | examples: 63 | - **OS**: Ubuntu 20.04 64 | - **Python**: 3.9.12 (`python3 --version`) 65 | - **morph-data**: 0.1.1 (`morph --version`) 66 | value: | 67 | - OS: 68 | - Python: 69 | - Morph: 70 | render: markdown 71 | validations: 72 | required: false 73 | - type: textarea 74 | attributes: 75 | label: Additional Context 76 | description: | 77 | Links? References? Anything that will give us more context about the issue you are encountering! 78 | 79 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. 80 | validations: 81 | required: false 82 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: 📚 Docs Suggestion 4 | url: https://github.com/morph-data/docs/issues/new/choose 5 | about: Let us know how our docs could be better 6 | - name: 🚀 Feature Request 7 | url: https://github.com/morph-data/morph/discussions/new?category=ideas 8 | about: Share ideas for new features 9 | - name: 📖 Morph documentation 10 | url: https://docs.morph-data.io/ 11 | about: Learn more about how to use Morph 12 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Describe your changes 2 | 3 | ## GitHub Issue Link (if applicable) 4 | 5 | ## How I Tested These Changes 6 | 7 | ## Additional context 8 | 9 | Add any other context or screenshots. 10 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: 'v$RESOLVED_VERSION' 2 | tag-template: 'v$RESOLVED_VERSION' 3 | categories: 4 | - title: '🚀 New Features' 5 | labels: 6 | - feature 7 | - title: '🐛 Bug Fixes' 8 | labels: 9 | - bug 10 | - title: '🔧 Improvements' 11 | labels: 12 | - enhancement 13 | - optimization 14 | - refactor 15 | - title: '🛠 Maintenance' 16 | labels: 17 | - chore 18 | template: | 19 | ## What's Changed 20 | 21 | $CHANGES 22 | 23 | --- 24 | -------------------------------------------------------------------------------- /.github/workflows/pr-labeler.yml: -------------------------------------------------------------------------------- 1 | name: PR Labeler 2 | on: 3 | pull_request: 4 | types: 5 | - opened 6 | - reopened 7 | - synchronize 8 | 9 | jobs: 10 | label-pr: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Extract branch name 15 | id: extract_branch 16 | run: echo "branch_name=${{ github.head_ref }}" >> $GITHUB_ENV 17 | 18 | - name: Add labels based on branch name 19 | uses: actions-ecosystem/action-add-labels@v1 20 | with: 21 | github_token: ${{ secrets.GITHUB_TOKEN }} 22 | labels: | 23 | ${{ startsWith(env.branch_name, 'feature/') && 'feature' || '' }} 24 | ${{ startsWith(env.branch_name, 'fix/') && 'bug' || '' }} 25 | ${{ startsWith(env.branch_name, 'enhancement/') && 'enhancement' || '' }} 26 | ${{ startsWith(env.branch_name, 'optimization/') && 'optimization' || '' }} 27 | ${{ startsWith(env.branch_name, 'refactor/') && 'refactor' || '' }} 28 | ${{ startsWith(env.branch_name, 'chore/') && 'chore' || '' }} -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - closed 7 | branches: 8 | - develop 9 | - release/* 10 | 11 | jobs: 12 | draft_release_notes: 13 | # develop branch or minor release branch 14 | if: > 15 | (github.event.pull_request.base.ref == 'develop' && 16 | !startsWith(github.event.pull_request.head.ref, 'release/') && 17 | github.event.pull_request.head.ref != 'main') || 18 | (startsWith(github.event.pull_request.base.ref, 'release/') && 19 | endsWith(github.event.pull_request.base.ref, '0')) 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v3 24 | 25 | - name: Get current version from tags 26 | id: get_current_version 27 | run: | 28 | # fetch all tags 29 | git fetch --tags 30 | 31 | # Find the latest tag using semantic versioning 32 | CURRENT_VERSION=$(git tag -l --sort=-v:refname | head -n 1) 33 | if [ -z "$CURRENT_VERSION" ]; then 34 | CURRENT_VERSION="v0.0.0" # Default if no tags exist 35 | fi 36 | echo "CURRENT_VERSION=${CURRENT_VERSION}" >> $GITHUB_ENV 37 | 38 | - name: Determine next version 39 | id: determine_next_version 40 | run: | 41 | if [[ "${{ github.event.pull_request.base.ref }}" == "develop" ]]; then 42 | # Increment the minor version 43 | IFS='.' read -r -a parts <<< "${CURRENT_VERSION#v}" 44 | MAJOR=${parts[0]} 45 | MINOR=${parts[1]} 46 | PATCH=${parts[2]} 47 | NEXT_VERSION="v${MAJOR}.$((MINOR + 1)).0" 48 | elif [[ "${{ github.event.pull_request.base.ref }}" == release/* ]]; then 49 | # Use the release branch version 50 | base_ref="${{ github.event.pull_request.base.ref }}" 51 | NEXT_VERSION="${base_ref#release/}" 52 | fi 53 | echo "NEXT_VERSION=${NEXT_VERSION}" >> $GITHUB_ENV 54 | echo "Next version: ${NEXT_VERSION}" 55 | - name: Update release draft 56 | uses: release-drafter/release-drafter@v5 57 | env: 58 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 59 | with: 60 | tag: ${{ env.NEXT_VERSION }} 61 | name: ${{ env.NEXT_VERSION }} 62 | version: ${{ env.NEXT_VERSION }} 63 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .venv 124 | env/ 125 | venv/ 126 | ENV/ 127 | env.bak/ 128 | venv.bak/ 129 | 130 | # Spyder project settings 131 | .spyderproject 132 | .spyproject 133 | 134 | # Rope project settings 135 | .ropeproject 136 | 137 | # mkdocs documentation 138 | /site 139 | 140 | # mypy 141 | .mypy_cache/ 142 | .dmypy.json 143 | dmypy.json 144 | 145 | # Pyre type checker 146 | .pyre/ 147 | 148 | # pytype static type analyzer 149 | .pytype/ 150 | 151 | # Cython debug symbols 152 | cython_debug/ 153 | 154 | # PyCharm 155 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 156 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 157 | # and can be added to the global gitignore or merged into this file. For a more nuclear 158 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 159 | .idea/ 160 | 161 | .vscode/ 162 | ### macOS template 163 | # General 164 | .DS_Store 165 | .AppleDouble 166 | .LSOverride 167 | 168 | # Icon must end with two \r 169 | Icon 170 | 171 | # Thumbnails 172 | ._* 173 | 174 | # Files that might appear in the root of a volume 175 | .DocumentRevisions-V100 176 | .fseventsd 177 | .Spotlight-V100 178 | .TemporaryItems 179 | .Trashes 180 | .VolumeIcon.icns 181 | .com.apple.timemachine.donotpresent 182 | 183 | # Directories potentially created on remote AFP share 184 | .AppleDB 185 | .AppleDesktop 186 | Network Trash Folder 187 | Temporary Items 188 | .apdisk 189 | 190 | # Morph core 191 | core/morph/api/templates/development/* 192 | !core/morph/api/templates/development/.gitkeep 193 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v4.3.0 7 | hooks: 8 | - id: trailing-whitespace 9 | - id: end-of-file-fixer 10 | - id: check-yaml 11 | - id: check-added-large-files 12 | 13 | - repo: https://github.com/psf/black 14 | rev: 22.6.0 15 | hooks: 16 | - id: black 17 | language_version: python3 18 | 19 | - repo: https://github.com/pycqa/isort 20 | rev: 5.12.0 21 | hooks: 22 | - id: isort 23 | exclude: ^.*\b(migrations)\b.*$ 24 | args: [ "--profile", "black" ] 25 | 26 | - repo: https://github.com/pycqa/flake8 27 | rev: 7.0.0 28 | hooks: 29 | - id: flake8 30 | args: 31 | [ 32 | --max-line-length=88, 33 | --ignore, 34 | "E203,E501,W503,W504", 35 | "--exclude", 36 | "*test*", 37 | ] 38 | 39 | - repo: https://github.com/pycqa/autoflake 40 | rev: "v2.2.1" 41 | hooks: 42 | - id: autoflake 43 | args: 44 | [ 45 | "--in-place", 46 | "--remove-all-unused-imports", 47 | "--exclude", 48 | "*test*", 49 | ] 50 | 51 | - repo: https://github.com/pre-commit/mirrors-mypy 52 | rev: v1.6.1 53 | hooks: 54 | - id: mypy 55 | types: [ python ] 56 | exclude: ^.*\b(example|core/morph/include)\b.*$ 57 | additional_dependencies: [ pandas-stubs, types-requests, types-PyYAML, pydantic, types-tabulate ] 58 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | - Using welcoming and inclusive language 18 | - Being respectful of differing viewpoints and experiences 19 | - Gracefully accepting constructive criticism 20 | - Focusing on what is best for the community 21 | - Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | - The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | - Trolling, insulting/derogatory comments, and personal or political attacks 28 | - Public or private harassment 29 | - Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | - Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at shibata@morphdb.io. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Morph 2 | 3 | Thank you for your interest in helping Morph! ❤️ 4 | 5 | 🔍 If you're looking for Morph's documentation, you can find it [here](https://docs.morph-data.io). 6 | 7 | This guide is for people who want to contribute code to Morph. There are also other ways to contribute, such as [reporting bugs](https://github.com/morph-data/morph/issues/new/choose), creating [feature requests](https://github.com/morph-data/morph/issues/new/choose), helping other users [in our discussion board](https://github.com/morph-data/morph/discussions/new), Stack Overflow, etc., or just being an awesome member of the community! 8 | 9 | ## Contents 10 | 11 | - [Report an Issue](#report-an-issue) 12 | - [Request a Feature](#request-a-feature) 13 | - [Contribute Code](#contribute-code) 14 | - [Branch Types](#branch-types) 15 | 16 | ## Report an Issue 17 | 18 | Open issues for bugs, docs improvements or errors. 19 | 20 | [Create an issue here](https://github.com/morph-data/morph/issues/new/choose) 21 | 22 | ### Private information 23 | 24 | If your problem relates to sensitive or private information, please don't post any of your data in an issue. We suggest creating a small test dataset that can reproduce the problem without revealing any private info, and posting that data in the issue. If that's not possible, please reach out to morph@queue-inc.com. 25 | 26 | ## Request a Feature 27 | 28 | To request a feature, a new data source, or ask for help, create a GitHub discussion. 29 | 30 | [Create a discussion here](https://github.com/morph-data/morph/discussions/new) 31 | 32 | ## Contribute Code 33 | 34 | ### Getting Started 35 | 36 | #### Prerequisites 37 | 38 | - Python v3.9.0+ 39 | - Node.js v18+ 40 | 41 | #### Setting up a python environment 42 | 43 | Morph uses [Poetry](https://python-poetry.org/) for Python dependency management. 44 | In the root of the repository, run the following command to install the dependencies for the Python code. 45 | 46 | ``` 47 | poetry install 48 | ``` 49 | 50 | #### Test the code before committing 51 | 52 | `pre-commit` takes care of running all code-checks for formatting and linting. By the following command, `pre-commit` will be installed and ensure your changes are formatted and linted automatically when you commit your changes. 53 | 54 | ``` 55 | pre-commit install 56 | ``` 57 | 58 | ### Running the code locally 59 | 60 | Run the following command to install the package in editable mode so you can make changes to the code and test them immediately. 61 | 62 | ``` 63 | pip install --editable .'[morph-data]' 64 | ``` 65 | 66 | ### Pull Requests 67 | 68 | Pull requests are welcome! We review pull requests as they are submitted and will reach out to you with any questions or comments. 69 | 70 | Follow these steps to submit a pull request for your changes: 71 | 72 | 1. Create a fork of the morph repo 73 | 2. Before committing your changes, please run `pre-commit install` to automatically format your code and help prevent linting errors. 74 | 3. Commit your changes to your fork 75 | 4. Test your changes to make sure all results are as expected 76 | 5. Open a pull request againt the develop branch of the morph repo 77 | 78 | ## Branch Types 79 | 80 | This project follows a **GitFlow-inspired** branching model adapted to accommodate **weekly patch releases** and **monthly minor releases**. **All new branches are created by the maintainers.** 81 | 82 | - **Weekly patch releases (patch)**: Small, frequent updates to fix critical bugs or minor issues. 83 | - **Monthly minor releases (minor)**: Bundles of new features and improvements. 84 | 85 | Our adaptions to GitFlow: 86 | 87 | - **`develop`** holds the code for the **next monthly minor release**. 88 | - **Weekly patch** branches (e.g., `release/v0.0.x`) are branched off from **`main`** to quickly address bugs. 89 | - **Release** branches for the monthly minor (e.g., `release/v0.1.0`) are branched off **`develop`** when we finalize which features/fixes go into that release. By the time it merges into `main`, the release branch should be tested and ready to be released. 90 | 91 | ### 1. Main (or Master) 92 | 93 | - **Contains**: The production-ready code of the latest official release. 94 | - **Weekly patches** (urgent bug fixes) branch off here. 95 | - **Merged into**: When a release (patch or minor) is finalized, it merges back into `main` with an associated tag. 96 | 97 | ### 2. Develop 98 | 99 | - **Contains**: The next monthly minor release in active development. 100 | - **Merged into**: Feature branches get merged back into `develop`. 101 | - **Branched from**: A release branch (e.g., `release/v0.1.0`) is created from `develop` when we’re ready for final QA and testing. 102 | 103 | ### 3. Release Branches 104 | 105 | We have two kinds of release branches: 106 | 107 | 1. **Monthly Minor Release** 108 | - Named like `release/v0.1.0`. 109 | - Created from `develop`. 110 | - Final testing/bugfixes happen here. 111 | - Once ready, merged into `main` (release goes live), then back into `develop`. 112 | - If you find any bugs during testing, please create a new PR to merge into release branch to fix the bug. 113 | 114 | 2. **Weekly Patch Release** 115 | - Named like `release/v0.0.x`. 116 | - Created from `main`. 117 | - Used to quickly fix bugs in the currently released version. 118 | - Once merged into `main`, also merged into `develop` to ensure the patch carries forward. 119 | 120 | ### 4. Working Branches 121 | 122 | These branches are where developers actually write and commit their changes. **All of them branch off from `develop` or `release/<version>`**, and once the work is complete, they are merged back into them via pull requests. Each branch type automatically triggers the creation of release notes from the PR, so **please adhere to these naming conventions**: 123 | 124 | 1. **`feature/<name>`** 125 | - **Purpose**: Introduce a completely new functionality or a significant feature. 126 | - **Examples**: `feature/user-auth`, `feature/payment-integration`. 127 | 128 | 2. **`fix/<name>`** 129 | - **Purpose**: Resolve bugs or errors discovered in the codebase. 130 | - **Examples**: `fix/login-crash`, `fix/cart-update-error`. 131 | 132 | 3. **`enhancement/<name>`** 133 | - **Purpose**: Improve or extend existing functionality without introducing entirely new features. 134 | - **Examples**: `enhancement/profile-page-ui`, `enhancement/api-responses`. 135 | 136 | 4. **`optimization/<name>`** 137 | - **Purpose**: Improve performance, memory usage, or efficiency of existing code without changing functionality. 138 | - **Examples**: `optimization/query-speed`, `optimization/asset-loading`. 139 | 140 | 5. **`refactor/<name>`** 141 | - **Purpose**: Restructure or reorganize code (improving readability, maintainability, etc.) without changing external behavior. 142 | - **Examples**: `refactor/order-service`, `refactor/database-layer`. 143 | 144 | 6. **`chore/<name>`** 145 | - **Purpose**: Maintenance tasks, project cleanup, and changes that do not impact the code’s behavior (e.g., updating dependencies, adjusting configurations). 146 | - **Examples**: `chore/dependency-updates`, `chore/ci-setup`. 147 | 148 | ## Join Our Team 149 | 150 | If you're passionate about what we're building at Morph and want to join our team, reach out to us at morph@queue-inc.com. 151 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Header](https://data.morphdb.io/assets/header.png) 2 | 3 | ## Features 4 | 5 | [Morph](https://www.morph-data.io/) is a python-centric full-stack framework for building and deploying AI apps. 6 | 7 | - **Fast to start** 🚀 - Allows you to get up and running with just three commands. 8 | - **Deploy and operate 🌐** - Easily deploy your AI apps and manage them in production. Managed cloud is available for user authentication and secure data connection. 9 | - **No HTML/CSS knowledge required🔰** - With **Markdown-based syntax** and **pre-made components**, you can create flexible, visually appealing designs without writing a single line of HTML or CSS. 10 | - **Customizable 🛠️** - **Chain Python and SQL** for advanced AI workflows. Custom CSS and custom React components are available for building tailored UI. 11 | 12 | ## Quick start 13 | 14 | 1. Install morph 15 | 16 | ```bash 17 | pip install morph-data 18 | ``` 19 | 20 | 2. Create a new project 21 | 22 | ```bash 23 | morph new 24 | ``` 25 | 26 | 3. Start dev server 27 | 28 | ```bash 29 | morph serve 30 | ``` 31 | 32 | 4. Visit `http://localhsot:8080` on browser. 33 | 34 | ## How it works 35 | 36 | Understanding the concept of developing an AI app in Morph will let you do a flying start. 37 | 38 | 1. Develop the AI workflow in Python and give it an alias. 39 | 2. Create an .mdx file. Each .mdx file becomes a page of your app. 40 | 3. Place the component in the MDX file and specify the alias to connect to. 41 | 42 | ``` 43 | . 44 | ├─ pages 45 | │ └─ index.mdx 46 | └─ python 47 | └─ chat.py 48 | ``` 49 | 50 | ## Building AI Apps 51 | 52 | ### A little example 53 | 54 | 1. Create each files in `python` and `pages` directories. 55 | 56 | Python: Using Langchain to create a AI workflow. 57 | 58 | ```python 59 | import morph 60 | from morph import MorphGlobalContext 61 | from morph_lib.stream import stream_chat 62 | from langchain_openai import ChatOpenAI 63 | from langchain_core.messages import HumanMessage 64 | 65 | @morph.func 66 | def langchain_chat(context: MorphGlobalContext): 67 | llm = ChatOpenAI(model="gpt-4o") 68 | messages = [HumanMessage(context.vars["prompt"])] 69 | for token in llm.stream(messages): 70 | yield stream_chat(token.content) 71 | ``` 72 | 73 | MDX: Define the page and connect the data. 74 | 75 | ```typescript 76 | # 🦜🔗 Langchain Chat 77 | 78 | <Chat postData="langchain_chat" height={300} /> 79 | ``` 80 | 81 | 2. Run `morph serve` to open the app! 82 | 83 | ![AI App](https://data.morphdb.io/assets/gif/langchain-demo.gif) 84 | 85 | ## Documentation 86 | 87 | Visit https://docs.morph-data.io for more documentation. 88 | 89 | ## Contributing 90 | 91 | Thanks for your interest in helping improve Morph ❤️ 92 | 93 | - Before contributing, please read the [CONTRIBUTING.md](CONTRIBUTING.md). 94 | - If you find any issues, please let us know and open [an issue](https://github.com/morph-data/morph/issues/new/choose). 95 | 96 | ## Lisence 97 | 98 | Morph is [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0) licensed. 99 | -------------------------------------------------------------------------------- /core/morph/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: ignore=F401 2 | 3 | from .api.auth import auth 4 | from .api.plugin import plugin_app 5 | from .task.utils.run_backend.decorators import func, load_data, variables 6 | from .task.utils.run_backend.state import MorphGlobalContext 7 | 8 | __all__ = ["func", "variables", "load_data", "MorphGlobalContext", "plugin_app", "auth"] 9 | -------------------------------------------------------------------------------- /core/morph/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/api/__init__.py -------------------------------------------------------------------------------- /core/morph/api/auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import os 4 | 5 | from fastapi import Header 6 | 7 | from morph.api.cloud.types import UserInfo 8 | from morph.api.context import request_context 9 | from morph.api.error import AuthError, ErrorCode, ErrorMessage 10 | from morph.task.utils.morph import find_project_root_dir 11 | 12 | 13 | async def auth( 14 | authorization: str = Header(default=None), x_api_key: str = Header(default=None) 15 | ) -> None: 16 | if x_api_key is not None: 17 | os.environ["MORPH_API_KEY"] = x_api_key 18 | 19 | if authorization is None or authorization == "Bearer dummy": 20 | # "dummy" is set when running in local 21 | project_root = find_project_root_dir() 22 | mock_json_path = f"{project_root}/.mock_user_context.json" 23 | if not os.path.exists(mock_json_path): 24 | request_context.set( 25 | { 26 | "user": UserInfo( 27 | user_id="cea122ea-b240-49d7-ae7f-8b1e3d40dd8f", 28 | email="mock_user@morph-data.io", 29 | username="mock_user", 30 | first_name="Mock", 31 | last_name="User", 32 | roles=["Admin"], 33 | ).model_dump() 34 | } 35 | ) 36 | return 37 | try: 38 | mock_json = json.load(open(mock_json_path)) 39 | request_context.set({"user": mock_json}) 40 | return 41 | except Exception: 42 | raise AuthError( 43 | ErrorCode.AuthError, ErrorMessage.AuthErrorMessage["mockJsonInvalid"] 44 | ) 45 | 46 | try: 47 | token = authorization.split(" ")[1] 48 | parts = token.split(".") 49 | if len(parts) != 3: 50 | raise AuthError( 51 | ErrorCode.AuthError, ErrorMessage.AuthErrorMessage["tokenInvalid"] 52 | ) 53 | 54 | payload_encoded = parts[1] 55 | payload_json = base64.urlsafe_b64decode( 56 | payload_encoded + "=" * (-len(payload_encoded) % 4) 57 | ) 58 | user_context_json = json.loads(payload_json) 59 | request_context.set({"user": user_context_json}) 60 | except Exception: 61 | raise AuthError( 62 | ErrorCode.AuthError, ErrorMessage.AuthErrorMessage["tokenInvalid"] 63 | ) 64 | -------------------------------------------------------------------------------- /core/morph/api/cloud/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/api/cloud/__init__.py -------------------------------------------------------------------------------- /core/morph/api/cloud/base.py: -------------------------------------------------------------------------------- 1 | import urllib.parse 2 | from abc import ABC, abstractmethod 3 | from typing import Any, Dict, Optional, Type, TypeVar 4 | 5 | import requests 6 | from pydantic import BaseModel 7 | from requests import Response 8 | 9 | T = TypeVar("T", bound=BaseModel) 10 | 11 | 12 | class MorphClientResponse(Response): 13 | def __init__(self, response: Response): 14 | super().__init__() 15 | self.__dict__ = response.__dict__.copy() 16 | 17 | @staticmethod 18 | def is_api_error(response: Dict[str, Any]) -> bool: 19 | return "error" in response and "subCode" in response and "message" in response 20 | 21 | def is_error(self, raise_err: Optional[bool] = False) -> bool: 22 | try: 23 | self.raise_for_status() 24 | if MorphClientResponse.is_api_error(self.json()): 25 | if raise_err: 26 | raise SystemError(self.text) 27 | return True 28 | except Exception as e: 29 | if raise_err: 30 | raise e 31 | return True 32 | return False 33 | 34 | def to_model( 35 | self, model: Type[T], raise_err: Optional[bool] = False 36 | ) -> Optional[T]: 37 | if raise_err: 38 | if self.status_code > 500: 39 | raise SystemError(self.text) 40 | try: 41 | response_json = self.json() 42 | except Exception as e: 43 | raise SystemError(e) 44 | if MorphClientResponse.is_api_error(response_json): 45 | raise SystemError(response_json["message"]) 46 | return model(**response_json) 47 | else: 48 | try: 49 | response_json = self.json() 50 | except Exception: # noqa 51 | return None 52 | if MorphClientResponse.is_api_error(response_json): 53 | return None 54 | return model(**response_json) 55 | 56 | 57 | class MorphApiBaseClient(ABC): 58 | @abstractmethod 59 | def get_headers(self) -> Dict[str, Any]: 60 | pass 61 | 62 | @abstractmethod 63 | def get_base_url(self) -> str: 64 | pass 65 | 66 | def request( 67 | self, 68 | method: str, 69 | path: str, 70 | data: Optional[Dict[str, Any]] = None, 71 | query: Optional[Dict[str, Any]] = None, 72 | is_debug: Optional[bool] = False, 73 | ) -> MorphClientResponse: 74 | headers = self.get_headers() 75 | url = urllib.parse.urljoin(f"{self.get_base_url()}/", path) 76 | 77 | if query: 78 | url_parts = list(urllib.parse.urlparse(url)) 79 | existing_query = dict(urllib.parse.parse_qsl(url_parts[4])) 80 | updated_query = { 81 | k: (str(v).lower() if isinstance(v, bool) else v) 82 | for k, v in query.items() 83 | if v is not None 84 | } 85 | existing_query.update(updated_query) 86 | url_parts[4] = urllib.parse.urlencode(existing_query) 87 | url = urllib.parse.urlunparse(url_parts) 88 | 89 | if is_debug: 90 | print(">> DEBUGGING REQUEST ==============================") 91 | print(f"URL: {url}") 92 | print(f"Headers: {headers}") 93 | print(f"Data: {data}") 94 | print(f"Query: {query}") 95 | 96 | response = requests.request( 97 | method=method, url=url, headers=headers, json=data, verify=True 98 | ) 99 | 100 | if is_debug: 101 | print(">> DEBUGGING RESPONSE =============================") 102 | print(f"Status Code: {response.status_code}") 103 | print(f"Response: {response.json()}") 104 | 105 | return MorphClientResponse(response) 106 | -------------------------------------------------------------------------------- /core/morph/api/cloud/client.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import os 3 | from functools import wraps 4 | from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, cast 5 | 6 | from morph.api.cloud.base import MorphApiBaseClient, MorphClientResponse 7 | from morph.api.cloud.types import EnvVarObject 8 | from morph.constants import MorphConstant 9 | from morph.task.utils.morph import find_project_root_dir 10 | 11 | MORPH_API_BASE_URL = "https://api.squadbase.dev/v0" 12 | 13 | 14 | def validate_project_id(method): 15 | @wraps(method) 16 | def wrapper(self, *args, **kwargs): 17 | if not hasattr(self, "project_id") or not self.project_id: 18 | raise ValueError( 19 | "No project id found. Please fill project_id in morph_project.yml" 20 | ) 21 | return method(self, *args, **kwargs) 22 | 23 | return wrapper 24 | 25 | 26 | class MorphApiKeyClientImpl(MorphApiBaseClient): 27 | def __init__(self): 28 | # Initialize default values 29 | self.project_id = os.environ.get("MORPH_PROJECT_ID", "") 30 | self.api_url = os.environ.get("MORPH_BASE_URL", MORPH_API_BASE_URL) 31 | self.api_key = os.environ.get("MORPH_API_KEY", "") 32 | 33 | from morph.config.project import load_project # avoid circular import 34 | 35 | try: 36 | project_root = find_project_root_dir() 37 | except Exception: # noqa 38 | project_root = None 39 | 40 | if project_root: 41 | project = load_project(project_root) 42 | else: 43 | project = None 44 | 45 | if project: 46 | profile = project.profile or "default" 47 | else: 48 | profile = "default" 49 | 50 | self.project_id = os.environ.get( 51 | "MORPH_PROJECT_ID", "" if not project else project.project_id or "" 52 | ) 53 | 54 | self.api_key = os.environ.get("MORPH_API_KEY", "") 55 | if not self.api_key: 56 | config_path = MorphConstant.MORPH_CRED_PATH 57 | if not os.path.exists(config_path): 58 | raise ValueError( 59 | f"Credential file not found at {config_path}. Please run 'morph init'." 60 | ) 61 | config = configparser.ConfigParser() 62 | config.read(config_path) 63 | if not config.has_section(profile): 64 | raise ValueError( 65 | f"No profile '{profile}' found in the credentials file." 66 | ) 67 | self.api_key = config.get(profile, "api_key", fallback="") 68 | 69 | if not self.api_key: 70 | raise ValueError(f"No API key found for profile '{profile}'.") 71 | 72 | def get_headers(self) -> Dict[str, Any]: 73 | return { 74 | "Contet-Type": "application/json", 75 | "X-Api-Key": self.api_key, 76 | "project-id": self.project_id, 77 | } 78 | 79 | def get_base_url(self) -> str: 80 | return self.api_url 81 | 82 | @validate_project_id 83 | def find_database_connection(self) -> MorphClientResponse: 84 | path = f"project/{self.project_id}/connection" 85 | return self.request(method="GET", path=path) 86 | 87 | @validate_project_id 88 | def find_external_connection(self, connection_slug: str) -> MorphClientResponse: 89 | path = f"external-connection/{connection_slug}" 90 | return self.request(method="GET", path=path) 91 | 92 | @validate_project_id 93 | def list_env_vars(self) -> MorphClientResponse: 94 | path = "env-vars" 95 | return self.request(method="GET", path=path) 96 | 97 | @validate_project_id 98 | def override_env_vars(self, env_vars: List[EnvVarObject]) -> MorphClientResponse: 99 | path = "env-vars/override" 100 | body = {"envVars": [env_var.model_dump() for env_var in env_vars]} 101 | return self.request(method="POST", path=path, data=body) 102 | 103 | @validate_project_id 104 | def list_fields( 105 | self, 106 | table_name: str, 107 | schema_name: Optional[str], 108 | connection: Optional[str], 109 | ) -> MorphClientResponse: 110 | path = f"field/{table_name}" 111 | query = {} 112 | if connection: 113 | path = "external-database-field" 114 | query.update( 115 | { 116 | "connectionSlug": connection, 117 | "tableName": table_name, 118 | "schemaName": schema_name, 119 | } 120 | ) 121 | return self.request(method="GET", path=path, query=query) 122 | 123 | def check_api_secret(self) -> MorphClientResponse: 124 | path = "api-secret/check" 125 | return self.request(method="GET", path=path) 126 | 127 | @validate_project_id 128 | def verify_api_secret(self) -> MorphClientResponse: 129 | path = "api-secret/verify" 130 | body = {"projectId": self.project_id} 131 | return self.request(method="POST", path=path, data=body) 132 | 133 | @validate_project_id 134 | def initiate_deployment( 135 | self, 136 | project_id: str, 137 | image_build_log: str, 138 | image_checksum: str, 139 | config: Optional[dict[str, Any]] = None, 140 | ) -> MorphClientResponse: 141 | path = "deployment" 142 | body: dict[str, Any] = { 143 | "projectId": project_id, 144 | "imageBuildLog": image_build_log, 145 | "imageChecksum": image_checksum, 146 | } 147 | if config: 148 | body["config"] = config 149 | 150 | return self.request(method="POST", path=path, data=body) 151 | 152 | @validate_project_id 153 | def execute_deployment( 154 | self, user_function_deployment_id: str 155 | ) -> MorphClientResponse: 156 | path = f"deployment/{user_function_deployment_id}" 157 | 158 | return self.request( 159 | method="POST", 160 | path=path, 161 | ) 162 | 163 | 164 | T = TypeVar("T", bound=MorphApiBaseClient) 165 | 166 | 167 | class MorphApiClient(Generic[T]): 168 | def __init__(self, client_class: Type[T], token: Optional[str] = None): 169 | self.req: T = self._create_client(client_class, token=token) 170 | 171 | def _create_client(self, client_class: Type[T], token: Optional[str] = None) -> T: 172 | if client_class is MorphApiKeyClientImpl: 173 | return cast(T, MorphApiKeyClientImpl()) 174 | else: 175 | raise ValueError("Invalid client class.") 176 | -------------------------------------------------------------------------------- /core/morph/api/cloud/types.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from pydantic import BaseModel, ConfigDict, Field 4 | from pydantic.alias_generators import to_snake 5 | 6 | 7 | # ================================================ 8 | # User 9 | # ================================================ 10 | class UserInfo(BaseModel): 11 | user_id: str = Field(alias="userId") 12 | username: str 13 | email: str 14 | first_name: str = Field(alias="firstName") 15 | last_name: str = Field(alias="lastName") 16 | roles: List[str] 17 | 18 | model_config = ConfigDict( 19 | alias_generator=to_snake, 20 | populate_by_name=True, 21 | from_attributes=True, 22 | ) 23 | 24 | 25 | # ================================================ 26 | # EnvVar 27 | # ================================================ 28 | 29 | 30 | class EnvVarObject(BaseModel): 31 | key: str 32 | value: str 33 | 34 | class Config: 35 | extra = "ignore" 36 | 37 | 38 | class EnvVarList(BaseModel): 39 | items: List[EnvVarObject] 40 | count: int 41 | -------------------------------------------------------------------------------- /core/morph/api/context.py: -------------------------------------------------------------------------------- 1 | import contextvars 2 | 3 | request_context: contextvars.ContextVar = contextvars.ContextVar( 4 | "request_context", default={} 5 | ) 6 | -------------------------------------------------------------------------------- /core/morph/api/custom_types.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Literal, Optional 2 | 3 | from fastapi import File, UploadFile 4 | from pydantic import BaseModel 5 | 6 | # ================================================ 7 | # Success 8 | # ================================================ 9 | 10 | 11 | class SuccessResponse(BaseModel): 12 | message: str 13 | 14 | 15 | # ================================================ 16 | # RunFileWithType 17 | # ================================================ 18 | 19 | 20 | class RunFileWithTypeRequestBody(BaseModel): 21 | variables: Optional[Dict[str, Any]] = None 22 | useCache: Optional[bool] = True 23 | 24 | 25 | class RunFileWithTypeService(BaseModel): 26 | name: str 27 | type: Literal["json", "html", "markdown"] 28 | variables: Optional[Dict[str, Any]] = None 29 | use_cache: Optional[bool] = True 30 | limit: Optional[int] = None 31 | skip: Optional[int] = None 32 | 33 | 34 | class RunFileWithTypeResponse(BaseModel): 35 | type: Literal["json", "html", "image", "markdown"] 36 | data: Any 37 | 38 | 39 | # ================================================ 40 | # RunFile 41 | # ================================================ 42 | 43 | 44 | class RunFileRequestBody(BaseModel): 45 | variables: Optional[Dict[str, Any]] = None 46 | runId: Optional[str] = None 47 | 48 | 49 | class RunFileService(BaseModel): 50 | name: str 51 | variables: Optional[Dict[str, Any]] = None 52 | run_id: Optional[str] = None 53 | 54 | 55 | # ================================================ 56 | # RunFileStream 57 | # ================================================ 58 | 59 | 60 | class RunFileStreamRequestBody(BaseModel): 61 | variables: Optional[Dict[str, Any]] = None 62 | 63 | 64 | class RunFileStreamService(BaseModel): 65 | name: str 66 | variables: Optional[Dict[str, Any]] = None 67 | 68 | 69 | # ================================================ 70 | # RunResult 71 | # ================================================ 72 | 73 | 74 | class RunResultUnit(BaseModel): 75 | name: str 76 | status: str 77 | startedAt: str 78 | logs: List[str] 79 | outputs: List[str] 80 | endedAt: Optional[str] = None 81 | error: Optional[str] = None 82 | 83 | 84 | class RunResult(BaseModel): 85 | runId: str 86 | cells: List[RunResultUnit] 87 | status: str 88 | startedAt: str 89 | endedAt: Optional[str] = None 90 | error: Optional[str] = None 91 | 92 | 93 | # ================================================ 94 | # Upload File 95 | # ================================================ 96 | 97 | 98 | class UploadFileService(BaseModel): 99 | file: UploadFile = File(...) 100 | -------------------------------------------------------------------------------- /core/morph/api/error.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import List, Optional 4 | 5 | 6 | class ApiBaseError(Exception): 7 | status: int 8 | code: str 9 | message: str 10 | detail: Optional[str] = None 11 | 12 | def __init__( 13 | self, code: dict[str, str], message: str, detail: Optional[str] = None 14 | ): 15 | self.code = code["code"] 16 | self.message = message 17 | self.detail = detail 18 | 19 | 20 | class WarningError(ApiBaseError): 21 | status = 200 22 | 23 | 24 | class RequestError(ApiBaseError): 25 | status = 400 26 | 27 | 28 | class AuthError(ApiBaseError): 29 | status = 401 30 | 31 | 32 | class InternalError(ApiBaseError): 33 | status = 500 34 | 35 | def __init__(self): 36 | self.code = "internal_server_error" 37 | self.message = "Unexpected error occurred while processing the request." 38 | 39 | 40 | class ErrorMessage: 41 | RequestErrorMessage = { 42 | "requestBodyInvalid": "Invalid request body.", 43 | } 44 | AuthErrorMessage = { 45 | "notAuthorized": "Not authorized.", 46 | "mockJsonInvalid": "Invalid mock json.", 47 | "tokenInvalid": "Invalid token.", 48 | } 49 | FileErrorMessage = { 50 | "notFound": "File not found.", 51 | "createFailed": "Failed to create file.", 52 | "formatInvalid": "Invalid file format.", 53 | } 54 | ExecutionErrorMessage = { 55 | "executionFailed": "Execution failed.", 56 | "unexpectedResult": "Unexpected result.", 57 | } 58 | 59 | 60 | class ErrorCode: 61 | RequestError = {"code": "request_error"} 62 | AuthError = {"code": "auth_error"} 63 | FileError = {"code": "file_error"} 64 | ExecutionError = {"code": "execution_error"} 65 | 66 | 67 | def render_error_html(error_messages: List[str]) -> str: 68 | error_traceback = "\n".join( 69 | [ 70 | f"<p>Error {i+1}:</p><pre>{error}</pre>" 71 | for i, error in enumerate(error_messages) 72 | ] 73 | ) 74 | return f"""<!DOCTYPE html> 75 | <html lang="en"> 76 | <head> 77 | <meta charset="UTF-8"> 78 | <meta name="viewport" content="width=device-width, initial-scale=1.0"> 79 | <title>Internal Server Error 80 | 107 | 108 | 109 |
110 |

Internal Server Error

111 |

The server encountered an internal error and was unable to complete your request.

112 |

Traceback:

113 | {error_traceback} 114 |
115 | 116 | """ 117 | -------------------------------------------------------------------------------- /core/morph/api/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Any, Literal, Optional 4 | 5 | from fastapi import APIRouter, File, Header, Security, UploadFile 6 | from fastapi.responses import StreamingResponse 7 | from pydantic import ValidationError 8 | 9 | from morph.api.auth import auth 10 | from morph.api.custom_types import ( 11 | RunFileRequestBody, 12 | RunFileService, 13 | RunFileStreamRequestBody, 14 | RunFileStreamService, 15 | RunFileWithTypeRequestBody, 16 | RunFileWithTypeResponse, 17 | RunFileWithTypeService, 18 | SuccessResponse, 19 | UploadFileService, 20 | ) 21 | from morph.api.error import ( 22 | ApiBaseError, 23 | AuthError, 24 | ErrorCode, 25 | ErrorMessage, 26 | InternalError, 27 | RequestError, 28 | ) 29 | from morph.api.service import ( 30 | file_upload_service, 31 | list_resource_service, 32 | run_file_service, 33 | run_file_stream_service, 34 | run_file_with_type_service, 35 | ) 36 | 37 | logger = logging.getLogger(__name__) 38 | router = APIRouter() 39 | 40 | 41 | @router.post("/cli/run-stream/{name}") 42 | async def vm_run_file_stream( 43 | name: str, 44 | body: RunFileStreamRequestBody, 45 | authorization: str = Header(None), 46 | x_api_key: str = Header(None), 47 | ) -> StreamingResponse: 48 | try: 49 | await auth(authorization, x_api_key) 50 | input = RunFileStreamService( 51 | name=name, 52 | variables=body.variables, 53 | ) 54 | except ValidationError: # noqa 55 | content = '3:"Invalid request body."\n\n' 56 | return StreamingResponse(content=content, media_type="text/event-stream") 57 | except AuthError: 58 | content = '3:"Not Authorized."\n\n' 59 | return StreamingResponse(content=content, media_type="text/event-stream") 60 | 61 | is_error = False 62 | 63 | async def _wrapped_generator(): 64 | nonlocal is_error 65 | try: 66 | async for chunk in run_file_stream_service(input): 67 | yield chunk 68 | except Exception as e: 69 | is_error = True 70 | raise e 71 | 72 | generator = _wrapped_generator() 73 | 74 | error = None 75 | first_chunk = None 76 | try: 77 | first_chunk = await generator.__anext__() 78 | except Exception as e: 79 | is_error = True 80 | error = e 81 | 82 | if is_error: 83 | if isinstance(error, ApiBaseError): 84 | return StreamingResponse( 85 | content=json.dumps( 86 | { 87 | "error": { 88 | "code": error.code, 89 | "message": error.message, 90 | "detail": error.detail, 91 | } 92 | } 93 | ), 94 | status_code=InternalError().status, 95 | media_type="text/event-stream", 96 | headers={ 97 | "Transfer-Encoding": "chunked", 98 | "Content-Type": "text/event-stream", 99 | }, 100 | ) 101 | return StreamingResponse( 102 | content=json.dumps( 103 | { 104 | "error": { 105 | "code": InternalError().code, 106 | "message": InternalError().message, 107 | "detail": str(error), 108 | } 109 | } 110 | ), 111 | status_code=InternalError().status, 112 | media_type="text/event-stream", 113 | headers={ 114 | "Transfer-Encoding": "chunked", 115 | "Content-Type": "text/event-stream", 116 | }, 117 | ) 118 | 119 | async def _generate_content(): 120 | if first_chunk: 121 | yield first_chunk 122 | async for chunk in generator: 123 | yield chunk 124 | 125 | return StreamingResponse( 126 | content=_generate_content(), 127 | status_code=200, 128 | media_type="text/event-stream", 129 | headers={"Transfer-Encoding": "chunked", "Content-Type": "text/event-stream"}, 130 | ) 131 | 132 | 133 | @router.post("/cli/run/{name}/{type}") 134 | def run_file_with_type( 135 | name: str, 136 | type: Literal["json", "html", "markdown"], 137 | body: RunFileWithTypeRequestBody, 138 | limit: Optional[int] = None, 139 | skip: Optional[int] = None, 140 | _: str = Security(auth), 141 | ) -> RunFileWithTypeResponse: 142 | try: 143 | input = RunFileWithTypeService( 144 | name=name, 145 | type=type, 146 | variables=body.variables, 147 | use_cache=body.useCache, 148 | limit=limit, 149 | skip=skip, 150 | ) 151 | except ValidationError as e: 152 | error_messages = " ".join([str(err["msg"]) for err in e.errors()]) 153 | raise RequestError( 154 | ErrorCode.RequestError, 155 | ErrorMessage.RequestErrorMessage["requestBodyInvalid"], 156 | error_messages, 157 | ) 158 | return run_file_with_type_service(input) 159 | 160 | 161 | @router.post("/cli/run/{name}") 162 | def run_file( 163 | name: str, 164 | body: RunFileRequestBody, 165 | _: str = Security(auth), 166 | ) -> SuccessResponse: 167 | try: 168 | input = RunFileService( 169 | name=name, 170 | variables=body.variables, 171 | run_id=body.runId, 172 | ) 173 | except ValidationError as e: 174 | error_messages = " ".join([str(err["msg"]) for err in e.errors()]) 175 | raise RequestError( 176 | ErrorCode.RequestError, 177 | ErrorMessage.RequestErrorMessage["requestBodyInvalid"], 178 | error_messages, 179 | ) 180 | return run_file_service(input) 181 | 182 | 183 | @router.get("/cli/resource") 184 | def list_resource( 185 | _: str = Security(auth), 186 | ) -> Any: 187 | return list_resource_service() 188 | 189 | 190 | @router.post("/cli/file-upload") 191 | async def file_upload( 192 | file: UploadFile = File(...), 193 | _: str = Security(auth), 194 | ) -> Any: 195 | try: 196 | input = UploadFileService(file=file) 197 | except ValidationError as e: 198 | error_messages = " ".join([str(err["msg"]) for err in e.errors()]) 199 | raise RequestError( 200 | ErrorCode.RequestError, 201 | ErrorMessage.RequestErrorMessage["requestBodyInvalid"], 202 | error_messages, 203 | ) 204 | return await file_upload_service(input) 205 | -------------------------------------------------------------------------------- /core/morph/api/plugin.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | 3 | plugin_app = FastAPI() 4 | -------------------------------------------------------------------------------- /core/morph/api/templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {% inertia_head %} 7 | 8 | 9 | 10 | {% if local_dev_mode and frontend_url %} 11 | 18 | {% endif %} 19 | 20 | 21 | {% inertia_body %} 22 | 23 | 24 | -------------------------------------------------------------------------------- /core/morph/api/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | from typing import Any, Dict, Literal, Optional, Union 4 | 5 | import pandas as pd 6 | from morph_lib.types import HtmlResponse, MarkdownResponse 7 | 8 | from morph.task.utils.run_backend.output import StreamChatResponse 9 | 10 | 11 | def convert_file_output( 12 | type: Literal["json", "html", "markdown"], 13 | output: Any, 14 | limit: Optional[int] = None, 15 | skip: Optional[int] = None, 16 | ) -> Union[str, Dict[str, Any], Any]: 17 | transformed_output: Any = output 18 | 19 | if type == "json": 20 | if isinstance(output, pd.DataFrame) or ( 21 | hasattr(output, "__class__") 22 | and output.__class__.__name__.endswith("DataFrame") 23 | ): 24 | df = transformed_output 25 | count = len(df) 26 | limit = limit if limit is not None else len(df) 27 | skip = skip if skip is not None else 0 28 | df = df.iloc[skip : skip + limit] 29 | df = df.replace({float("nan"): None, pd.NaT: None}).to_dict( 30 | orient="records" 31 | ) 32 | return {"count": count, "items": df} 33 | elif isinstance(output, dict) or isinstance(output, list): 34 | transformed_output = json.dumps(output, indent=4, ensure_ascii=False) 35 | elif isinstance(output, StreamChatResponse): 36 | transformed_output = json.dumps( 37 | output.model_dump(), indent=4, ensure_ascii=False 38 | ) 39 | else: 40 | raise Exception(f"Invalid output type: type='json' value={output}") 41 | elif type == "html" or type == "markdown": 42 | if isinstance(output, HtmlResponse): 43 | return output.value 44 | elif isinstance(output, MarkdownResponse): 45 | return output.value 46 | else: 47 | return output 48 | raise Exception(f"Invalid output type: type={type}") 49 | 50 | 51 | def convert_variables_values(variables: Optional[Dict[str, Any]]) -> Dict[str, Any]: 52 | if variables is None: 53 | return {} 54 | variables_: Dict[str, Any] = {} 55 | for k, v in variables.items(): 56 | if isinstance(v, str): 57 | if v.isdigit(): 58 | variables_[k] = int(v) 59 | continue 60 | try: 61 | f_v = float(v) 62 | variables_[k] = f_v 63 | continue 64 | except ValueError: 65 | pass 66 | variables_[k] = v 67 | return variables_ 68 | 69 | 70 | def set_command_args(): 71 | if len(sys.argv) < 2: 72 | sys.argv = ["", "serve"] 73 | -------------------------------------------------------------------------------- /core/morph/cli/README.md: -------------------------------------------------------------------------------- 1 | # Adding a new command 2 | 3 | ## `main.py` 4 | 5 | Add the new command with all necessary decorators. Every command will need at minimum: 6 | 7 | - a decorator for the click group it belongs to which also names the command 8 | - the postflight decorator (must come before other decorators from the requires module for error handling) 9 | - the preflight decorator 10 | 11 | ```python 12 | @cli.command("my-new-command") 13 | @requires.postflight 14 | @requires.preflight 15 | def my_new_command(ctx, **kwargs): 16 | ... 17 | ``` 18 | 19 | # Exception Handling 20 | 21 | ## `requires.py` 22 | 23 | ### `postflight` 24 | -------------------------------------------------------------------------------- /core/morph/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/cli/__init__.py -------------------------------------------------------------------------------- /core/morph/cli/main.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | 3 | from __future__ import annotations 4 | 5 | import functools 6 | from typing import Callable, Dict, Optional, Tuple, Union 7 | 8 | import click 9 | 10 | from morph.cli import params, requires 11 | from morph.cli.flags import check_version_warning 12 | 13 | 14 | def global_flags( 15 | func: Callable[..., Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]] 16 | ) -> Callable[..., Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]]: 17 | @params.log_format 18 | @functools.wraps(func) 19 | def wrapper( 20 | *args: Tuple[Union[Dict[str, Union[str, int, bool]], None], bool], 21 | **kwargs: Dict[str, Union[str, int, bool]], 22 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 23 | ctx = click.get_current_context() 24 | 25 | if ctx.info_name == "serve": 26 | # Warn about version before running the command 27 | check_version_warning() 28 | else: 29 | # Warn about version after running the command 30 | ctx.call_on_close(check_version_warning) 31 | 32 | return func(*args, **kwargs) 33 | 34 | return wrapper 35 | 36 | 37 | @click.group( 38 | context_settings={"help_option_names": ["-h", "--help"]}, 39 | invoke_without_command=True, 40 | no_args_is_help=True, 41 | epilog="Specify one of these sub-commands and you can find more help from there.", 42 | ) 43 | @click.version_option( 44 | package_name="morph-data", 45 | prog_name="morph", 46 | message="morph-data CLI version: %(version)s", 47 | ) 48 | @click.pass_context 49 | @global_flags 50 | def cli(ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]]) -> None: 51 | """A data analysis tool for transformations, visualization by using SQL and Python. 52 | For more information on these commands, visit: docs.morph-data.io 53 | """ 54 | 55 | 56 | @cli.command("config") 57 | @params.profile 58 | @click.pass_context 59 | @global_flags 60 | @requires.preflight 61 | @requires.postflight 62 | def config( 63 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 64 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 65 | """Configure morph credentials to run project.""" 66 | from morph.task.config import ConfigTask 67 | 68 | task = ConfigTask(ctx.obj["flags"]) 69 | results = task.run() 70 | return results, True 71 | 72 | 73 | @cli.command("new") 74 | @click.argument("directory_name", required=False) 75 | @params.project_id 76 | @click.pass_context 77 | @global_flags 78 | @requires.preflight 79 | @requires.postflight 80 | def new( 81 | ctx: click.Context, 82 | directory_name: Optional[str], 83 | **kwargs: Dict[str, Union[str, int, bool]], 84 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 85 | """Create a new morph project.""" 86 | from morph.task.new import NewTask 87 | 88 | task = NewTask(ctx.obj["flags"], directory_name) 89 | results = task.run() 90 | return results, True 91 | 92 | 93 | @cli.command("compile") 94 | @click.option("--force", "-f", is_flag=True, help="Force compile.") 95 | @click.pass_context 96 | @global_flags 97 | @params.verbose 98 | @requires.preflight 99 | @requires.postflight 100 | def compile( 101 | ctx: click.Context, force: bool, **kwargs: Dict[str, Union[str, int, bool]] 102 | ) -> Tuple[None, bool]: 103 | """Analyse morph functions into indexable objects.""" 104 | from morph.task.compile import CompileTask 105 | 106 | task = CompileTask(ctx.obj["flags"], force=force) 107 | task.run() 108 | return None, True 109 | 110 | 111 | @cli.command("run") 112 | @click.argument("filename", required=True) 113 | @click.pass_context 114 | @global_flags 115 | @params.data 116 | @params.run_id 117 | @params.dag 118 | @requires.preflight 119 | @requires.postflight 120 | def run( 121 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 122 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 123 | """Run sql and python file and bring the results in output file.""" 124 | from morph.task.run import RunTask 125 | 126 | task = RunTask(ctx.obj["flags"]) 127 | results = task.run() 128 | 129 | return results, True 130 | 131 | 132 | @cli.command("clean") 133 | @params.verbose 134 | @params.force 135 | @click.pass_context 136 | @global_flags 137 | @requires.preflight 138 | @requires.postflight 139 | def clean( 140 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 141 | ) -> Tuple[None, bool]: 142 | """Clean all the cache and garbage in Morph project.""" 143 | from morph.task.clean import CleanTask 144 | 145 | task = CleanTask(ctx.obj["flags"]) 146 | task.run() 147 | 148 | return None, True 149 | 150 | 151 | @cli.command("deploy") 152 | @params.no_cache 153 | @params.verbose 154 | @click.pass_context 155 | @global_flags 156 | @requires.preflight 157 | @requires.postflight 158 | def deploy( 159 | ctx: click.Context, no_cache: bool, **kwargs: Dict[str, Union[str, int, bool]] 160 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 161 | """Deploy morph project to the cloud.""" 162 | from morph.task.deploy import DeployTask 163 | 164 | task = DeployTask(ctx.obj["flags"]) 165 | results = task.run() 166 | return results, True 167 | 168 | 169 | @cli.command("serve") 170 | @params.workdir 171 | @click.pass_context 172 | @global_flags 173 | @requires.preflight 174 | @requires.postflight 175 | def serve( 176 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 177 | ) -> Tuple[None, bool]: 178 | """Launch API server.""" 179 | from morph.task.api import ApiTask 180 | 181 | task = ApiTask(ctx.obj["flags"]) 182 | task.run() 183 | 184 | return None, True 185 | 186 | 187 | @cli.command("init") 188 | @click.pass_context 189 | @global_flags 190 | @requires.preflight 191 | @requires.postflight 192 | def init( 193 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 194 | ) -> Tuple[Union[Dict[str, Union[str, int, bool]], None], bool]: 195 | """Initialize morph connection setting to run project.""" 196 | from morph.task.init import InitTask 197 | 198 | task = InitTask(ctx.obj["flags"]) 199 | results = task.run() 200 | return results, True 201 | 202 | 203 | @cli.command("context") 204 | @params.output 205 | @click.pass_context 206 | @global_flags 207 | @requires.preflight 208 | @requires.postflight 209 | def context( 210 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 211 | ) -> Tuple[None, bool]: 212 | """Print or save the user information context.""" 213 | from morph.task.context import ContextTask 214 | 215 | task = ContextTask(ctx.obj["flags"]) 216 | task.run() 217 | 218 | return None, True 219 | 220 | 221 | @cli.command("add") 222 | @click.argument("plugin_name", required=True) 223 | @click.pass_context 224 | @global_flags 225 | @requires.preflight 226 | @requires.postflight 227 | def add_plugin( 228 | ctx: click.Context, **kwargs: Dict[str, Union[str, int, bool]] 229 | ) -> Tuple[None, bool]: 230 | """Add a plugin to your project.""" 231 | from morph.task.plugin import PluginTask 232 | 233 | task = PluginTask(ctx.obj["flags"]) 234 | task.run() 235 | 236 | return None, True 237 | -------------------------------------------------------------------------------- /core/morph/cli/params.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import json 3 | import re 4 | from typing import Any 5 | 6 | import click 7 | 8 | log_format = click.option( 9 | "--log-format", 10 | envvar="MORPH_LOG_FORMAT", 11 | help="Specify the format of logging to the console and the log file. Use --log-format-file to configure the format for the log file differently than the console.", 12 | type=click.Choice(["text", "debug", "json", "default"], case_sensitive=False), 13 | default="default", 14 | ) 15 | 16 | 17 | def parse_key_value(ctx, param, value): 18 | data_dict: dict[str, Any] = {} 19 | 20 | def format_to_json(val: str) -> str: 21 | def add_quotes(match): 22 | key, val = match.groups() 23 | key = key.strip() 24 | val = val.strip() 25 | return f'"{key}": "{val}"' 26 | 27 | formatted = re.sub(r"\{(\w+):(\w+)\}", lambda m: "{" + add_quotes(m) + "}", val) 28 | return formatted 29 | 30 | def convert_value(val: str) -> Any: 31 | if val.lower() == "true": 32 | return True 33 | elif val.lower() == "false": 34 | return False 35 | if val.isdigit(): 36 | return int(val) 37 | try: 38 | return float(val) 39 | except ValueError: 40 | pass 41 | 42 | if val.startswith("[") and val.endswith("]"): 43 | try: 44 | parsed_list = json.loads(val.replace("'", '"')) 45 | if isinstance(parsed_list, list): 46 | return parsed_list 47 | except json.JSONDecodeError: 48 | pass 49 | 50 | try: 51 | parsed_list = ast.literal_eval(val) 52 | if isinstance(parsed_list, list): 53 | return parsed_list 54 | except (ValueError, SyntaxError): 55 | pass 56 | 57 | if re.match(r"\{(\w+):(\w+)\}", val): 58 | val = format_to_json(val) 59 | try: 60 | parsed_json = json.loads(val) 61 | if isinstance(parsed_json, dict): 62 | return parsed_json 63 | except json.JSONDecodeError: 64 | pass 65 | try: 66 | parsed_dict = ast.literal_eval(val) 67 | if isinstance(parsed_dict, dict): 68 | return parsed_dict 69 | except (ValueError, SyntaxError): 70 | pass 71 | 72 | return val 73 | 74 | for item in value: 75 | try: 76 | key, val = item.split("=", 1) 77 | data_dict[key.strip()] = convert_value(val.strip()) 78 | except ValueError: 79 | raise click.BadParameter(f"'{item}' is not a valid key=value pair") 80 | 81 | return data_dict 82 | 83 | 84 | data = click.option( 85 | "-d", 86 | "--data", 87 | multiple=True, 88 | callback=parse_key_value, 89 | help="Key-value pairs in the form key=value", 90 | ) 91 | 92 | run_id = click.option( 93 | "--run-id", 94 | "-c", 95 | help="Specify the run id.", 96 | ) 97 | 98 | dag = click.option( 99 | "--dag", 100 | is_flag=True, 101 | help="Run as a Directed Acyclic Graph (DAG).", 102 | ) 103 | 104 | verbose = click.option( 105 | "--verbose", 106 | "-v", 107 | is_flag=True, 108 | help="Enable verbose mode.", 109 | ) 110 | 111 | workdir = click.option( 112 | "--workdir", 113 | type=str, 114 | help="Specify the project workdir.", 115 | ) 116 | 117 | profile = click.option( 118 | "--profile", 119 | type=str, 120 | help="Specify the profile name.", 121 | ) 122 | 123 | project_id = click.option( 124 | "--project-id", 125 | type=str, 126 | help="Specify the project id.", 127 | ) 128 | 129 | no_cache = click.option( 130 | "--no-cache", 131 | is_flag=True, 132 | help="Disable cache.", 133 | ) 134 | 135 | output = click.option( 136 | "--output", 137 | "-o", 138 | type=str, 139 | help="Specify output file path.", 140 | ) 141 | 142 | force = click.option( 143 | "--force", 144 | "-f", 145 | is_flag=True, 146 | help="Force execution.", 147 | ) 148 | -------------------------------------------------------------------------------- /core/morph/cli/requires.py: -------------------------------------------------------------------------------- 1 | from functools import update_wrapper 2 | 3 | from click import Context 4 | 5 | from morph.cli.flags import Flags, set_flags 6 | 7 | 8 | def preflight(func): 9 | def wrapper(*args, **kwargs): 10 | ctx = args[0] 11 | assert isinstance(ctx, Context) 12 | ctx.obj = ctx.obj or {} 13 | 14 | # Flags 15 | flags = Flags(ctx) 16 | ctx.obj["flags"] = flags 17 | set_flags(flags) 18 | 19 | return func(*args, **kwargs) 20 | 21 | return update_wrapper(wrapper, func) 22 | 23 | 24 | def postflight(func): 25 | def wrapper(*args, **kwargs): 26 | # ctx = args[0] 27 | success = False 28 | 29 | try: 30 | result, success = func(*args, **kwargs) 31 | except Exception as e: 32 | raise e 33 | finally: 34 | pass 35 | return (result, success) 36 | 37 | return update_wrapper(wrapper, func) 38 | -------------------------------------------------------------------------------- /core/morph/cli/types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List 3 | 4 | 5 | class Command(Enum): 6 | INIT = "init" 7 | RUN = "run" 8 | 9 | @classmethod 10 | def from_str(cls, s: str) -> "Command": 11 | try: 12 | return cls(s) 13 | except ValueError: 14 | raise Exception(f"No value '{s}' exists in Command enum") 15 | 16 | def to_list(self) -> List[str]: 17 | return { 18 | Command.RUN: ["run", "--dag"], 19 | }.get(self, [self.value]) 20 | -------------------------------------------------------------------------------- /core/morph/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/config/__init__.py -------------------------------------------------------------------------------- /core/morph/constants.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | class MorphConstant: 5 | """Directories""" 6 | 7 | INIT_DIR = os.path.expanduser("~/.morph") 8 | TMP_MORPH_DIR = "/tmp/morph" 9 | PLUGIN_DIR = "src/plugin" 10 | 11 | @staticmethod 12 | def frontend_dir(project_root: str) -> str: 13 | return os.path.join(project_root, ".morph", "frontend") 14 | 15 | """ Files """ 16 | MORPH_CRED_PATH = os.path.expanduser("~/.morph/credentials") 17 | MORPH_CONNECTION_PATH = os.path.expanduser("~/.morph/connections.yml") 18 | 19 | """ Others """ 20 | EXECUTABLE_EXTENSIONS = [".sql", ".py"] 21 | -------------------------------------------------------------------------------- /core/morph/include/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/include/__init__.py -------------------------------------------------------------------------------- /core/morph/include/starter_template/.env: -------------------------------------------------------------------------------- 1 | # Environment variables defined here are loaded automatically by morph framework. 2 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/.gitignore: -------------------------------------------------------------------------------- 1 | # Morph config files and directories 2 | .morph 3 | 4 | # Morph build output 5 | dist 6 | 7 | # Node.js 8 | node_modules 9 | 10 | # Python 11 | __pycache__ 12 | .pytest_cache/ 13 | 14 | # Virtual environments 15 | env/ 16 | venv/ 17 | ENV/ 18 | env.bak/ 19 | venv.bak/ 20 | 21 | # Environment variables 22 | .env 23 | 24 | # IDE / Editor directories 25 | .vscode/ 26 | .idea/ 27 | 28 | # Git 29 | .gitconfig 30 | 31 | # OS 32 | .DS_Store 33 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/.mock_user_context.json: -------------------------------------------------------------------------------- 1 | { 2 | "user_id": "cea122ea-b240-49d7-ae7f-8b1e3d40dd8f", 3 | "email": "mock_user@morph-data.io", 4 | "username": "mock_user", 5 | "first_name": "Mock", 6 | "last_name": "User", 7 | "roles": ["Admin"] 8 | } 9 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/README.md: -------------------------------------------------------------------------------- 1 | Welcome to your new Morph project! 2 | 3 | ### Using the starter project 4 | 5 | Try running the following commands: 6 | 7 | - Run your data app 8 | 9 | ```bash 10 | morph serve 11 | ``` 12 | 13 | ### Resources 14 | - Learn more about Morph [in the docs](https://docs.morph-data.io) 15 | - Check out [the blog](https://www.morph-data.io/blogs) 16 | 17 | ### Getting started 18 | 19 | Write connection info in `~/.morph/config.yml` like this: 20 | 21 | ```yml 22 | connections: 23 | connection_name: 24 | type: postgres 25 | host: localhost 26 | port: 5432 27 | user: postgres 28 | password: postgres 29 | ``` 30 | 31 | You can set it up by running the following command: 32 | 33 | ```bash 34 | morph config 35 | ``` 36 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "new-york", 4 | "rsc": false, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "", 8 | "css": "src/pages/index.css", 9 | "baseColor": "neutral", 10 | "cssVariables": true, 11 | "prefix": "" 12 | }, 13 | "aliases": { 14 | "components": "@/pages/_components", 15 | "utils": "@/pages/_lib/utils", 16 | "ui": "@/pages/_components/ui", 17 | "lib": "@/pages/_lib", 18 | "hooks": "@/pages/_hooks" 19 | }, 20 | "iconLibrary": "lucide" 21 | } 22 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/morph_project.yml: -------------------------------------------------------------------------------- 1 | default_connection: DUCKDB 2 | source_paths: 3 | - src 4 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "morph-project", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "morph-frontend dev", 8 | "build": "morph-frontend build" 9 | }, 10 | "devDependencies": { 11 | "@tailwindcss/vite": "^4.0.14", 12 | "@types/react": "^18.3.18", 13 | "@types/react-dom": "^18.3.5", 14 | "vite": "^6.2.1" 15 | }, 16 | "dependencies": { 17 | "@morph-data/frontend": "0.3.0-beta.8", 18 | "class-variance-authority": "^0.7.1", 19 | "react": "^18.3.1", 20 | "react-dom": "^18.3.1", 21 | "react-error-boundary": "^5.0.0", 22 | "tailwind-merge": "^3.0.2", 23 | "tailwindcss": "^4.0.14", 24 | "tailwindcss-animate": "^1.0.7" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/include/starter_template/src/__init__.py -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/404.tsx: -------------------------------------------------------------------------------- 1 | export default function NotFound() { 2 | return ( 3 | <> 4 |

404

5 |

Page not found

6 | 7 | ); 8 | } 9 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import { Head } from "@morph-data/frontend/components"; 2 | import { TableOfContents } from "./_components/table-of-contents"; 3 | import { Header } from "./_components/header"; 4 | import { 5 | usePageMeta, 6 | MdxComponentsProvider, 7 | Outlet, 8 | useRefresh, 9 | extractComponents, 10 | } from "@morph-data/frontend/components"; 11 | import { ErrorBoundary } from "react-error-boundary"; 12 | import { Callout } from "@/pages/_components/ui/callout"; 13 | 14 | import "./index.css"; 15 | 16 | const uiComponents = extractComponents( 17 | import.meta.glob("./_components/ui/**/*.tsx", { 18 | eager: true, 19 | }) 20 | ); 21 | 22 | const morphComponents = extractComponents( 23 | import.meta.glob("./_components/*.tsx", { 24 | eager: true, 25 | }) 26 | ); 27 | 28 | export default function App() { 29 | const pageMeta = usePageMeta(); 30 | 31 | useRefresh(); 32 | 33 | return ( 34 | <> 35 | 36 | {pageMeta?.title} 37 | 38 | 39 | 42 |
43 | 44 | 45 | {pageMeta && } 46 | 47 | 48 | 49 |
50 |
51 |
52 | ( 54 | 55 | {typeof error.message === "string" 56 | ? error.message 57 | : "Something went wrong"} 58 | 59 | )} 60 | > 61 | 62 | 63 |
64 |
65 | 69 |
70 |
71 |
72 |
73 |
74 | 75 | ); 76 | } 77 | 78 | export const Catch = () => ( 79 | 80 | Something went wrong 81 | 82 | ); 83 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/_components/header.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | DropdownMenu, 3 | DropdownMenuTrigger, 4 | DropdownMenuContent, 5 | DropdownMenuItem, 6 | } from "@/pages/_components/ui/dropdown-menu"; 7 | import { Button } from "@/pages/_components/ui/button"; 8 | import { usePages, Link } from "@morph-data/frontend/components"; 9 | import { PropsWithChildren } from "react"; 10 | 11 | const Root = ({ children }: PropsWithChildren) => { 12 | return
{children}
; 13 | }; 14 | 15 | const DropDownMenu = () => { 16 | const pages = usePages(); 17 | 18 | return ( 19 | 20 | 21 | 39 | 40 | 41 | {pages.map((page) => ( 42 | 43 | 44 | {page.title} 45 | 46 | 47 | ))} 48 | {/* {props.showAdminPage && ( 49 | <> 50 | 51 | 52 | Admin Page 53 | 54 | 55 | )} */} 56 | 57 | 58 | ); 59 | }; 60 | 61 | const PageTitle = ({ title }: { title: string }) => { 62 | return
{title}
; 63 | }; 64 | 65 | const Spacer = () =>
; 66 | 67 | const MorphLogo = () => ( 68 |
69 | Made with 70 | 71 | Morph 76 | 77 |
78 | ); 79 | 80 | export const Header = { 81 | Root, 82 | DropDownMenu, 83 | PageTitle, 84 | Spacer, 85 | MorphLogo, 86 | }; 87 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/_components/table-of-contents.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | HoverCard, 3 | HoverCardTrigger, 4 | HoverCardContent, 5 | } from "@/pages/_components/ui/hover-card"; 6 | import { Button } from "@/pages/_components/ui/button"; 7 | import { LucideTableOfContents } from "lucide-react"; 8 | import { cn } from "@/pages/_lib/utils"; 9 | import { Toc } from "@morph-data/frontend/components"; 10 | 11 | export interface TocProps { 12 | toc?: Toc; 13 | className?: string; 14 | } 15 | 16 | export const TableOfContents: React.FC = ({ toc, className }) => { 17 | if (!toc) { 18 | return null; 19 | } 20 | 21 | return ( 22 | <> 23 | 30 |
31 | 32 | 33 | 36 | 37 | 38 |
39 | {toc.map((entry) => ( 40 | 41 | ))} 42 |
43 |
44 |
45 |
46 | 47 | ); 48 | }; 49 | 50 | const Heading = ({ entry }: { entry: Toc[number] }) => { 51 | return ( 52 | <> 53 | 57 | {entry.value} 58 | 59 | {entry.children?.map((child) => ( 60 | 61 | ))} 62 | 63 | ); 64 | }; 65 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/_lib/utils.ts: -------------------------------------------------------------------------------- 1 | import { clsx, type ClassValue } from "clsx"; 2 | import { twMerge } from "tailwind-merge"; 3 | 4 | export function cn(...inputs: ClassValue[]) { 5 | return twMerge(clsx(inputs)); 6 | } 7 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/index.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss"; 2 | 3 | @plugin "tailwindcss-animate"; 4 | 5 | @custom-variant dark (&:is(.dark *)); 6 | 7 | :root { 8 | --background: oklch(1 0 0); 9 | --foreground: oklch(0.145 0 0); 10 | --card: oklch(1 0 0); 11 | --card-foreground: oklch(0.145 0 0); 12 | --popover: oklch(1 0 0); 13 | --popover-foreground: oklch(0.145 0 0); 14 | --primary: oklch(0.205 0 0); 15 | --primary-foreground: oklch(0.985 0 0); 16 | --secondary: oklch(0.97 0 0); 17 | --secondary-foreground: oklch(0.205 0 0); 18 | --muted: oklch(0.97 0 0); 19 | --muted-foreground: oklch(0.556 0 0); 20 | --accent: oklch(0.97 0 0); 21 | --accent-foreground: oklch(0.205 0 0); 22 | --destructive: oklch(0.577 0.245 27.325); 23 | --destructive-foreground: oklch(0.577 0.245 27.325); 24 | --border: oklch(0.922 0 0); 25 | --input: oklch(0.922 0 0); 26 | --ring: oklch(0.708 0 0); 27 | --chart-1: oklch(0.646 0.222 41.116); 28 | --chart-2: oklch(0.6 0.118 184.704); 29 | --chart-3: oklch(0.398 0.07 227.392); 30 | --chart-4: oklch(0.828 0.189 84.429); 31 | --chart-5: oklch(0.769 0.188 70.08); 32 | --radius: 0.625rem; 33 | --sidebar: oklch(0.985 0 0); 34 | --sidebar-foreground: oklch(0.145 0 0); 35 | --sidebar-primary: oklch(0.205 0 0); 36 | --sidebar-primary-foreground: oklch(0.985 0 0); 37 | --sidebar-accent: oklch(0.97 0 0); 38 | --sidebar-accent-foreground: oklch(0.205 0 0); 39 | --sidebar-border: oklch(0.922 0 0); 40 | --sidebar-ring: oklch(0.708 0 0); 41 | } 42 | 43 | .dark { 44 | --background: oklch(0.145 0 0); 45 | --foreground: oklch(0.985 0 0); 46 | --card: oklch(0.145 0 0); 47 | --card-foreground: oklch(0.985 0 0); 48 | --popover: oklch(0.145 0 0); 49 | --popover-foreground: oklch(0.985 0 0); 50 | --primary: oklch(0.985 0 0); 51 | --primary-foreground: oklch(0.205 0 0); 52 | --secondary: oklch(0.269 0 0); 53 | --secondary-foreground: oklch(0.985 0 0); 54 | --muted: oklch(0.269 0 0); 55 | --muted-foreground: oklch(0.708 0 0); 56 | --accent: oklch(0.269 0 0); 57 | --accent-foreground: oklch(0.985 0 0); 58 | --destructive: oklch(0.396 0.141 25.723); 59 | --destructive-foreground: oklch(0.637 0.237 25.331); 60 | --border: oklch(0.269 0 0); 61 | --input: oklch(0.269 0 0); 62 | --ring: oklch(0.439 0 0); 63 | --chart-1: oklch(0.488 0.243 264.376); 64 | --chart-2: oklch(0.696 0.17 162.48); 65 | --chart-3: oklch(0.769 0.188 70.08); 66 | --chart-4: oklch(0.627 0.265 303.9); 67 | --chart-5: oklch(0.645 0.246 16.439); 68 | --sidebar: oklch(0.205 0 0); 69 | --sidebar-foreground: oklch(0.985 0 0); 70 | --sidebar-primary: oklch(0.488 0.243 264.376); 71 | --sidebar-primary-foreground: oklch(0.985 0 0); 72 | --sidebar-accent: oklch(0.269 0 0); 73 | --sidebar-accent-foreground: oklch(0.985 0 0); 74 | --sidebar-border: oklch(0.269 0 0); 75 | --sidebar-ring: oklch(0.439 0 0); 76 | } 77 | 78 | @theme inline { 79 | --color-background: var(--background); 80 | --color-foreground: var(--foreground); 81 | --color-card: var(--card); 82 | --color-card-foreground: var(--card-foreground); 83 | --color-popover: var(--popover); 84 | --color-popover-foreground: var(--popover-foreground); 85 | --color-primary: var(--primary); 86 | --color-primary-foreground: var(--primary-foreground); 87 | --color-secondary: var(--secondary); 88 | --color-secondary-foreground: var(--secondary-foreground); 89 | --color-muted: var(--muted); 90 | --color-muted-foreground: var(--muted-foreground); 91 | --color-accent: var(--accent); 92 | --color-accent-foreground: var(--accent-foreground); 93 | --color-destructive: var(--destructive); 94 | --color-destructive-foreground: var(--destructive-foreground); 95 | --color-border: var(--border); 96 | --color-input: var(--input); 97 | --color-ring: var(--ring); 98 | --color-chart-1: var(--chart-1); 99 | --color-chart-2: var(--chart-2); 100 | --color-chart-3: var(--chart-3); 101 | --color-chart-4: var(--chart-4); 102 | --color-chart-5: var(--chart-5); 103 | --radius-sm: calc(var(--radius) - 4px); 104 | --radius-md: calc(var(--radius) - 2px); 105 | --radius-lg: var(--radius); 106 | --radius-xl: calc(var(--radius) + 4px); 107 | --color-sidebar: var(--sidebar); 108 | --color-sidebar-foreground: var(--sidebar-foreground); 109 | --color-sidebar-primary: var(--sidebar-primary); 110 | --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); 111 | --color-sidebar-accent: var(--sidebar-accent); 112 | --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); 113 | --color-sidebar-border: var(--sidebar-border); 114 | --color-sidebar-ring: var(--sidebar-ring); 115 | --animate-accordion-down: accordion-down 0.2s ease-out; 116 | --animate-accordion-up: accordion-up 0.2s ease-out; 117 | 118 | @keyframes accordion-down { 119 | from { 120 | height: 0; 121 | } 122 | to { 123 | height: var(--radix-accordion-content-height); 124 | } 125 | } 126 | 127 | @keyframes accordion-up { 128 | from { 129 | height: var(--radix-accordion-content-height); 130 | } 131 | to { 132 | height: 0; 133 | } 134 | } 135 | } 136 | 137 | @layer base { 138 | * { 139 | @apply border-border outline-ring/50; 140 | 141 | /* Pre-defined Styles for user generated contents */ 142 | .morph-page { 143 | h1 { 144 | @apply mt-10 scroll-m-20 text-3xl font-bold tracking-tight lg:text-4xl; 145 | } 146 | h2 { 147 | @apply mt-8 scroll-m-20 pb-2 text-2xl font-semibold tracking-tight transition-colors first:mt-0; 148 | } 149 | h3 { 150 | @apply mt-6 scroll-m-20 text-xl font-semibold tracking-tight; 151 | } 152 | p { 153 | @apply leading-relaxed [&:not(:first-child)]:mt-6; 154 | } 155 | a { 156 | @apply font-medium underline underline-offset-4; 157 | &.x-underline { 158 | @apply no-underline; 159 | } 160 | } 161 | blockquote { 162 | @apply mt-6 border-l-2 pl-6 italic; 163 | } 164 | ul { 165 | @apply my-3 ml-3 list-disc list-inside [&>li]:mt-2; 166 | } 167 | table { 168 | @apply table-auto min-w-full text-sm text-left rtl:text-right py-4; 169 | } 170 | thead { 171 | @apply [&_tr]:border-b; 172 | } 173 | th { 174 | @apply py-2 px-2 text-left align-middle font-medium [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]; 175 | } 176 | tbody { 177 | @apply [&_tr:last-child]:border-0; 178 | } 179 | td { 180 | @apply p-2 align-middle [&:has([role=checkbox])]:pr-0 [&>[role=checkbox]]:translate-y-[2px]; 181 | } 182 | figure[data-rehype-pretty-code-figure] { 183 | width: 100%; 184 | max-width: 100%; 185 | min-width: 100%; 186 | } 187 | pre { 188 | @apply my-4; 189 | width: 100%; 190 | max-width: 100%; 191 | min-width: 100%; 192 | overflow-x: auto; 193 | padding: 1rem 0; 194 | border-radius: 0.5rem; 195 | box-sizing: border-box; 196 | 197 | [data-line] { 198 | padding: 0 1rem; 199 | font-size: 0.8rem; 200 | } 201 | 202 | > code { 203 | display: block; 204 | } 205 | } 206 | :not(pre) > code { 207 | @apply font-mono text-sm rounded bg-gray-100 dark:bg-neutral-700 px-1.5 py-1; 208 | } 209 | } 210 | } 211 | body { 212 | @apply bg-background text-foreground; 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/src/pages/index.mdx: -------------------------------------------------------------------------------- 1 | # Welcome to Morph 2 | 3 | Morph is a Python + Markdown framework for building internal AI apps. 4 | 5 | ## 📚 Resources 6 | 7 | - [🚀 Deploy Now](https://app.morph-data.io) 8 | - [📖 Documentation](https://docs.morph-data.io) 9 | - [💻 GitHub](https://github.com/morph-data/morph) 10 | 11 | ## 🛠️ Getting started 12 | 13 | ### Tutorials 14 | 15 | - [🚀 Quickstart for AI App](https://docs.morph-data.io/docs/en/quickstart/building-app) 16 | - [📊 Dashboard tutorial](https://docs.morph-data.io/docs/en/develop/tutorials/plotly) 17 | - [📈 Pygwalker tutorial](https://docs.morph-data.io/docs/en/develop/tutorials/pygwalker) 18 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/include/starter_template/static/favicon.ico -------------------------------------------------------------------------------- /core/morph/include/starter_template/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@morph-data/frontend/tsconfig.app.json", 3 | "compilerOptions": { 4 | "composite": true, 5 | "baseUrl": ".", 6 | "paths": { 7 | "@/*": ["./src/*"] 8 | } 9 | }, 10 | "include": ["src"] 11 | } 12 | -------------------------------------------------------------------------------- /core/morph/include/starter_template/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { morph } from "@morph-data/frontend/plugin"; 2 | import { defineConfig } from "vite"; 3 | import path from "path"; 4 | import tailwindcss from "@tailwindcss/vite"; 5 | 6 | export default defineConfig({ 7 | plugins: [morph(), tailwindcss()], 8 | resolve: { 9 | alias: { 10 | "@": path.resolve(__dirname, "./src"), 11 | }, 12 | }, 13 | }); 14 | -------------------------------------------------------------------------------- /core/morph/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/py.typed -------------------------------------------------------------------------------- /core/morph/task/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/task/__init__.py -------------------------------------------------------------------------------- /core/morph/task/api.py: -------------------------------------------------------------------------------- 1 | import os 2 | import signal 3 | import socket 4 | import subprocess 5 | import sys 6 | import threading 7 | from pathlib import Path 8 | from typing import Any, List, Optional 9 | 10 | import click 11 | from dotenv import dotenv_values, load_dotenv 12 | from morph.cli.flags import Flags 13 | from morph.task.base import BaseTask 14 | from morph.task.utils.morph import find_project_root_dir 15 | 16 | 17 | class ApiTask(BaseTask): 18 | def __init__(self, args: Flags): 19 | super().__init__(args) 20 | self.args = args 21 | 22 | # port 23 | self.server_port = self._find_available_port(8080) 24 | os.environ["MORPH_SERVER_PORT"] = str(self.server_port) 25 | self.front_port = self._find_available_port(3000) 26 | os.environ["MORPH_FRONT_PORT"] = str(self.front_port) 27 | 28 | # change working directory if specified 29 | self.workdir = args.WORKDIR 30 | if self.workdir: 31 | os.chdir(self.workdir) 32 | else: 33 | self.workdir = os.getcwd() 34 | 35 | os.environ["MORPH_LOCAL_DEV_MODE"] = "true" 36 | 37 | # load environment variables from .env file 38 | self.project_root = find_project_root_dir() 39 | dotenv_path = os.path.join(self.project_root, ".env") 40 | load_dotenv(dotenv_path) 41 | env_vars = dotenv_values(dotenv_path) 42 | for e_key, e_val in env_vars.items(): 43 | os.environ[e_key] = str(e_val) 44 | 45 | # for managing subprocesses 46 | self.processes: List[subprocess.Popen[str]] = [] 47 | 48 | def _find_available_port(self, start_port: int, max_port: int = 65535) -> int: 49 | port = start_port 50 | 51 | while port <= max_port: 52 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 53 | if s.connect_ex(("0.0.0.0", port)) != 0: 54 | return port 55 | port += 1 56 | 57 | click.echo( 58 | click.style( 59 | f"Error: No available port found in range {start_port}-{max_port}.", 60 | fg="red", 61 | ) 62 | ) 63 | sys.exit(1) 64 | 65 | def run(self): 66 | current_dir = Path(__file__).resolve().parent 67 | server_script_path = os.path.join(current_dir, "server.py") 68 | 69 | signal.signal(signal.SIGINT, self._signal_handler) 70 | try: 71 | click.echo( 72 | click.style( 73 | "🚀 Starting Morph server...", 74 | fg="green", 75 | ) 76 | ) 77 | 78 | # run frontend 79 | self._run_frontend() 80 | 81 | # run server process 82 | self._run_process( 83 | [sys.executable, server_script_path] 84 | + sys.argv[1:] 85 | + ["--port", str(self.server_port)], 86 | ) 87 | 88 | click.echo( 89 | click.style( 90 | "✅ Done server setup", 91 | fg="green", 92 | ) 93 | ) 94 | 95 | running_url = f"http://localhost:{self.server_port}" 96 | click.echo( 97 | click.style( 98 | f"\nMorph is running!🚀\n\n -> Local: {running_url}\n", 99 | fg="yellow", 100 | ) 101 | ) 102 | if hasattr(signal, "pause"): 103 | signal.pause() 104 | else: 105 | import time 106 | 107 | while True: 108 | time.sleep(1) 109 | except KeyboardInterrupt: 110 | self._signal_handler(None, None) 111 | 112 | def _run_frontend(self) -> None: 113 | try: 114 | subprocess.run( 115 | "npm install", 116 | cwd=self.project_root, 117 | shell=True, 118 | check=True, 119 | ) 120 | except subprocess.CalledProcessError: 121 | click.echo( 122 | click.style("Failed to install frontend dependencies.", fg="yellow") 123 | ) 124 | exit(1) 125 | 126 | self._run_process( 127 | ["npm", "run", "dev", "--", "--port", f"{self.front_port}"], 128 | cwd=self.project_root, 129 | is_debug=True, 130 | ) 131 | 132 | def _run_process( 133 | self, 134 | command: List[str], 135 | cwd: Optional[str] = None, 136 | is_debug: Optional[bool] = True, 137 | ) -> None: 138 | if sys.platform == "win32": 139 | process = subprocess.Popen( 140 | ["cmd.exe", "/c"] + command, 141 | cwd=cwd, 142 | stdout=subprocess.PIPE if is_debug else subprocess.DEVNULL, 143 | stderr=subprocess.PIPE if is_debug else subprocess.DEVNULL, 144 | text=True, 145 | ) 146 | else: 147 | process = subprocess.Popen( 148 | command, 149 | cwd=cwd, 150 | stdout=subprocess.PIPE if is_debug else subprocess.DEVNULL, 151 | stderr=subprocess.PIPE if is_debug else subprocess.DEVNULL, 152 | text=True, 153 | ) 154 | 155 | def log_output(pipe): 156 | for line in iter(pipe.readline, ""): 157 | color = _get_color_for_log_level(line) 158 | for sub_line in line.splitlines(): 159 | click.echo( 160 | click.style( 161 | sub_line, 162 | fg=color, 163 | ), 164 | err=False, 165 | ) 166 | 167 | def _get_color_for_log_level(line: str) -> str: 168 | if "ERROR" in line: 169 | return "red" 170 | elif "WARNING" in line: 171 | return "yellow" 172 | elif "DEBUG" in line: 173 | return "blue" 174 | elif "INFO" in line: 175 | return "green" 176 | else: 177 | return "white" 178 | 179 | if is_debug: 180 | threading.Thread(target=log_output, args=(process.stdout,)).start() 181 | threading.Thread(target=log_output, args=(process.stderr,)).start() 182 | 183 | self.processes.append(process) 184 | 185 | def _terminate_processes(self) -> None: 186 | for process in self.processes: 187 | try: 188 | process.terminate() 189 | process.wait(timeout=5) 190 | except Exception as e: 191 | click.echo( 192 | click.style( 193 | f"Error terminating process {process.pid}: {e}", 194 | fg="red", 195 | ), 196 | err=True, 197 | ) 198 | finally: 199 | try: 200 | process.kill() 201 | except: # noqa 202 | pass 203 | 204 | def _signal_handler(self, sig: Any, frame: Any) -> None: 205 | self._terminate_processes() 206 | sys.exit(0) 207 | -------------------------------------------------------------------------------- /core/morph/task/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | from morph.cli.flags import Flags 4 | 5 | 6 | class BaseTask(metaclass=ABCMeta): 7 | def __init__(self, args: Flags) -> None: 8 | self.args = args 9 | 10 | @abstractmethod 11 | def run(self): 12 | raise Exception("Not implemented") 13 | -------------------------------------------------------------------------------- /core/morph/task/clean.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import click 5 | 6 | from morph.cli.flags import Flags 7 | from morph.constants import MorphConstant 8 | from morph.task.base import BaseTask 9 | from morph.task.utils.morph import find_project_root_dir 10 | 11 | 12 | class CleanTask(BaseTask): 13 | def __init__(self, args: Flags): 14 | super().__init__(args) 15 | self.args = args 16 | self.force = args.FORCE 17 | 18 | try: 19 | self.project_root = find_project_root_dir() 20 | except FileNotFoundError as e: 21 | click.echo(click.style(str(e), fg="red")) 22 | raise e 23 | 24 | # Define directories for cleanup 25 | self.clean_dir = Path(self.project_root).joinpath(".morph") 26 | self.frontend_dir = Path(MorphConstant.frontend_dir(self.project_root)) 27 | 28 | def run(self): 29 | verbose = self.args.VERBOSE 30 | 31 | if self.clean_dir.exists(): 32 | # Iterate through the contents of the .morph directory and remove files/directories 33 | for item in self.clean_dir.iterdir(): 34 | if item.resolve() == self.frontend_dir.resolve(): 35 | # Remove frontend_dir only if force flag is set 36 | if not self.force: 37 | continue 38 | 39 | # Display removal message in verbose mode 40 | if verbose: 41 | click.echo(click.style(f"Removing {item}...", fg="yellow")) 42 | 43 | # Remove files or directories 44 | if item.is_dir(): 45 | shutil.rmtree(item) 46 | else: 47 | item.unlink() 48 | 49 | # Ensure the .morph directory exists even after cleaning 50 | self.clean_dir.mkdir(parents=True, exist_ok=True) 51 | else: 52 | if verbose: 53 | click.echo( 54 | click.style(f"Directory {self.clean_dir} not found", fg="yellow") 55 | ) 56 | 57 | click.echo( 58 | click.style( 59 | "Cache cleared! 🧹 Your workspace is fresh and ready.", fg="green" 60 | ) 61 | ) 62 | -------------------------------------------------------------------------------- /core/morph/task/compile.py: -------------------------------------------------------------------------------- 1 | import json 2 | import sys 3 | from pathlib import Path 4 | from typing import List 5 | 6 | import click 7 | import pydantic 8 | 9 | from morph.cli.flags import Flags 10 | from morph.config.project import load_project 11 | from morph.task.base import BaseTask 12 | from morph.task.utils.morph import find_project_root_dir 13 | from morph.task.utils.run_backend.inspection import get_checksum 14 | from morph.task.utils.run_backend.state import ( 15 | MorphFunctionMetaObjectCacheManager, 16 | MorphGlobalContext, 17 | ) 18 | 19 | 20 | class CompileTask(BaseTask): 21 | def __init__(self, args: Flags, force: bool = False): 22 | super().__init__(args) 23 | self.args = args 24 | self.force = force 25 | 26 | def run(self): 27 | try: 28 | project_root = find_project_root_dir() 29 | except FileNotFoundError as e: 30 | click.echo(click.style(str(e), fg="red")) 31 | raise e 32 | 33 | try: 34 | cache = MorphFunctionMetaObjectCacheManager().get_cache() 35 | except (pydantic.ValidationError, json.decoder.JSONDecodeError): 36 | click.echo( 37 | click.style( 38 | "Warning: Morph-cli project cache is corrupted. Recompiling...", 39 | fg="yellow", 40 | ) 41 | ) 42 | cache = None 43 | 44 | if cache is None: 45 | needs_compile = True 46 | elif len(cache.errors) > 0: 47 | needs_compile = True 48 | else: 49 | needs_compile = False 50 | project = load_project(project_root) 51 | if project is not None: 52 | source_paths = project.source_paths 53 | else: 54 | source_paths = [] 55 | 56 | extra_paths: List[str] = [] 57 | compare_dirs = [] 58 | if len(source_paths) == 0: 59 | compare_dirs.append(Path(project_root)) 60 | else: 61 | for source_path in source_paths: 62 | compare_dirs.append(Path(f"{project_root}/{source_path}")) 63 | 64 | for epath in extra_paths: 65 | epath_p = Path(epath) 66 | if not epath_p.exists() or not epath_p.is_dir(): 67 | continue 68 | compare_dirs.append(epath_p) 69 | 70 | for compare_dir in compare_dirs: 71 | if cache.directory_checksums.get( 72 | compare_dir.as_posix(), "" 73 | ) != get_checksum(Path(compare_dir)): 74 | needs_compile = True 75 | break 76 | 77 | if self.force: 78 | needs_compile = True 79 | 80 | if needs_compile: 81 | context = MorphGlobalContext.get_instance() 82 | errors = context.load(project_root) 83 | context.dump() 84 | 85 | if len(errors) > 0: 86 | for error in errors: 87 | click.echo( 88 | click.style( 89 | f"""Error occurred in {error.file_path}:{error.name} [{error.category}] {error.error}""", 90 | fg="red", 91 | ) 92 | ) 93 | click.echo( 94 | click.style( 95 | "Error: Failed to compile morph project.", fg="red", bg="yellow" 96 | ), 97 | err=True, 98 | ) 99 | sys.exit(1) 100 | 101 | if self.args.VERBOSE: 102 | info: dict = { 103 | "needs_compile": needs_compile, 104 | } 105 | if needs_compile: 106 | info["errors"] = errors 107 | elif cache is not None: 108 | info["errors"] = cache.errors 109 | 110 | click.echo(json.dumps(info, indent=2)) 111 | 112 | click.echo(click.style("Successfully compiled! 🎉", fg="green")) 113 | -------------------------------------------------------------------------------- /core/morph/task/config.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import os 3 | import socket 4 | import sys 5 | 6 | import click 7 | 8 | from morph.api.cloud.client import MorphApiClient, MorphApiKeyClientImpl 9 | from morph.constants import MorphConstant 10 | from morph.task.base import BaseTask 11 | 12 | 13 | class ConfigTask(BaseTask): 14 | def run(self): 15 | profile_name = self.args.PROFILE or "default" 16 | 17 | # Verify network connectivity 18 | if not self.check_network_connection(): 19 | click.echo("No network connection. Please check your internet settings.") 20 | return False 21 | 22 | # Check if the .morph directory exists in the user's home directory; create it if not 23 | morph_dir = MorphConstant.INIT_DIR 24 | if not os.path.exists(morph_dir): 25 | os.makedirs(morph_dir) 26 | click.echo(f"Created directory at {morph_dir}") 27 | 28 | # Request configuration settings from the user 29 | api_key = input("Please input your API Key on cloud: ") 30 | 31 | if not api_key: 32 | click.echo("Error: API key is required.") 33 | return False 34 | 35 | click.echo(click.style("Verifying the API Key...")) 36 | 37 | # set api key to environment variable 38 | os.environ["MORPH_API_KEY"] = api_key 39 | 40 | client = MorphApiClient(MorphApiKeyClientImpl) 41 | check_secret = client.req.check_api_secret() 42 | if check_secret.is_error(): 43 | click.echo( 44 | click.style( 45 | "Error: API key is invalid.", 46 | fg="red", 47 | ) 48 | ) 49 | sys.exit(1) 50 | click.echo(click.style("✅ Verified", fg="green")) 51 | 52 | # Load existing file or create new one if it doesn't exist 53 | config = configparser.ConfigParser() 54 | cred_file = os.path.join(morph_dir, "credentials") 55 | if os.path.exists(cred_file): 56 | config.read(cred_file) 57 | 58 | # Warn user if profile already exists and prompt for overwrite 59 | if config.has_section(profile_name): 60 | warning_message = click.style( 61 | f"Warning: Profile '{profile_name}' already exists. Overwrite?", 62 | fg="yellow", 63 | ) 64 | confirm = click.confirm(warning_message, default=False) 65 | if not confirm: 66 | click.echo( 67 | click.style( 68 | "Operation canceled. No credentials overwritten.", 69 | fg="red", 70 | ) 71 | ) 72 | sys.exit(1) 73 | click.echo("Overwriting existing credentials...") 74 | else: 75 | click.echo("Creating new credentials...") 76 | 77 | # Update the settings in the specific section 78 | config[profile_name] = { 79 | "api_key": api_key, 80 | } 81 | 82 | # Write the updated profile back to the file 83 | with open(cred_file, "w") as file: 84 | config.write(file) 85 | 86 | click.echo(f"Credentials saved to {cred_file}") 87 | click.echo( 88 | click.style( 89 | f"✅ Successfully setup! This profile can be accessed by profile name '{profile_name}' via morph cli.", 90 | fg="green", 91 | ) 92 | ) 93 | return True 94 | 95 | @staticmethod 96 | def check_network_connection(): 97 | try: 98 | # Attempt to connect to Cloudflare DNS server on port 53 99 | socket.create_connection(("1.1.1.1", 53), timeout=10) 100 | return True 101 | except OSError: 102 | return False 103 | -------------------------------------------------------------------------------- /core/morph/task/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from pathlib import Path 4 | 5 | import click 6 | 7 | from morph.api.cloud.client import MorphApiKeyClientImpl 8 | from morph.api.cloud.types import UserInfo 9 | from morph.cli.flags import Flags 10 | from morph.config.project import load_project 11 | from morph.task.base import BaseTask 12 | from morph.task.utils.morph import find_project_root_dir 13 | 14 | 15 | class ContextTask(BaseTask): 16 | def __init__(self, args: Flags): 17 | super().__init__(args) 18 | self.args = args 19 | self.output = self.args.OUTPUT 20 | 21 | try: 22 | self.project_root = find_project_root_dir(os.getcwd()) 23 | except FileNotFoundError as e: 24 | click.echo(click.style(f"Error: {str(e)}", fg="red")) 25 | sys.exit(1) 26 | 27 | project = load_project(self.project_root) 28 | if not project: 29 | click.echo(click.style("Project configuration not found.", fg="red")) 30 | sys.exit(1) 31 | elif project.project_id is None: 32 | click.echo( 33 | click.style( 34 | "Error: No project id found. Please fill project_id in morph_project.yml.", 35 | fg="red", 36 | ) 37 | ) 38 | sys.exit(1) 39 | 40 | try: 41 | self.client = MorphApiKeyClientImpl() 42 | except ValueError as e: 43 | click.echo(click.style(f"Error: {str(e)}", fg="red")) 44 | sys.exit(1) 45 | 46 | def run(self): 47 | res = self.client.verify_api_secret() 48 | if res.is_error(): 49 | click.echo(click.style("Error: Could not find user info.", fg="red")) 50 | sys.exit(1) 51 | response_json = res.json() 52 | if "user" not in response_json: 53 | click.echo(click.style("Error: Could not find user info.", fg="red")) 54 | sys.exit(1) 55 | 56 | if self.output: 57 | if Path(self.output).parent != Path("."): 58 | os.makedirs(os.path.dirname(self.output), exist_ok=True) 59 | with open(self.output, "w") as f: 60 | f.write(UserInfo(**response_json["user"]).model_dump_json(indent=4)) 61 | else: 62 | click.echo(UserInfo(**response_json["user"]).model_dump_json(indent=4)) 63 | -------------------------------------------------------------------------------- /core/morph/task/plugin.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import sys 4 | import zipfile 5 | 6 | import click 7 | import requests 8 | 9 | from morph.cli.flags import Flags 10 | from morph.config.project import load_project 11 | from morph.constants import MorphConstant 12 | from morph.task.base import BaseTask 13 | from morph.task.utils.morph import find_project_root_dir 14 | 15 | 16 | class PluginTask(BaseTask): 17 | def __init__(self, args: Flags): 18 | super().__init__(args) 19 | self.args = args 20 | self.plugin_name: str = args.PLUGIN_NAME 21 | 22 | try: 23 | self.project_root = find_project_root_dir(os.getcwd()) 24 | except FileNotFoundError as e: 25 | click.echo(click.style(f"Error: {str(e)}", fg="red")) 26 | sys.exit(1) 27 | 28 | project = load_project(self.project_root) 29 | if not project: 30 | click.echo(click.style("Project configuration not found.", fg="red")) 31 | sys.exit(1) 32 | 33 | def run(self): 34 | branch = "main" 35 | package_name = "morph-plugins" 36 | organization = "morph-data" 37 | plugin_git_url = f"https://github.com/{organization}/{package_name}" 38 | plugin_dir = os.path.join(self.project_root, MorphConstant.PLUGIN_DIR) 39 | zip_url = f"{plugin_git_url}/archive/refs/heads/{branch}.zip" 40 | 41 | try: 42 | response = requests.get(zip_url) 43 | response.raise_for_status() 44 | with zipfile.ZipFile(io.BytesIO(response.content)) as zip_ref: 45 | if not any( 46 | file.startswith(f"{package_name}-{branch}/{self.plugin_name}/") 47 | for file in zip_ref.namelist() 48 | ): 49 | raise Exception(f"{self.plugin_name} not found in plugins.") 50 | for file in zip_ref.namelist(): 51 | if file.startswith(f"{package_name}-{branch}/{self.plugin_name}/"): 52 | relative_path = file.replace(f"{package_name}-{branch}/", "", 1) 53 | extract_path = os.path.join(plugin_dir, relative_path) 54 | 55 | if file.endswith("/"): 56 | os.makedirs(extract_path, exist_ok=True) 57 | continue 58 | 59 | os.makedirs(os.path.dirname(extract_path), exist_ok=True) 60 | 61 | with zip_ref.open(file) as source, open( 62 | extract_path, "wb" 63 | ) as target: 64 | target.write(source.read()) 65 | except Exception as e: 66 | click.echo( 67 | click.style( 68 | f"Error: {str(e)}\nFailed to fetch plugin {self.plugin_name}.", 69 | fg="red", 70 | ) 71 | ) 72 | sys.exit(1) 73 | 74 | click.echo( 75 | click.style( 76 | f"✅ Plugin {self.plugin_name} has been added to {plugin_dir}/{self.plugin_name}/.", 77 | fg="green", 78 | ) 79 | ) 80 | -------------------------------------------------------------------------------- /core/morph/task/resource.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import sys 5 | from pathlib import Path 6 | from typing import Any, List, Literal, cast 7 | 8 | import click 9 | import pydantic 10 | 11 | from morph.cli.flags import Flags 12 | from morph.config.project import load_project 13 | from morph.task.base import BaseTask 14 | from morph.task.utils.morph import Resource, find_project_root_dir 15 | from morph.task.utils.run_backend.inspection import get_checksum 16 | from morph.task.utils.run_backend.state import ( 17 | MorphFunctionMetaObjectCacheManager, 18 | MorphGlobalContext, 19 | ) 20 | 21 | 22 | class PrintResourceTask(BaseTask): 23 | def __init__(self, args: Flags): 24 | super().__init__(args) 25 | self.args = args 26 | 27 | target: str 28 | target_type: Literal["alias", "file", "all"] 29 | if args.ALL: 30 | target = "" 31 | target_type = "all" 32 | elif args.ALIAS: 33 | target = args.ALIAS 34 | target_type = "alias" 35 | elif args.FILE: 36 | target = args.FILE 37 | target_type = "file" 38 | else: 39 | click.echo("Either --alias, --file or --all must be provided.") 40 | raise click.Abort() 41 | 42 | self.target = target 43 | self.target_type = target_type 44 | 45 | try: 46 | self.project_root = find_project_root_dir() 47 | except FileNotFoundError as e: 48 | click.echo(click.style(str(e), fg="red")) 49 | raise e 50 | 51 | def run(self): 52 | try: 53 | cache = MorphFunctionMetaObjectCacheManager().get_cache() 54 | except (pydantic.ValidationError, json.decoder.JSONDecodeError): 55 | click.echo( 56 | click.style( 57 | "Warning: Morph-cli project cache is corrupted. Recompiling...", 58 | fg="yellow", 59 | ) 60 | ) 61 | cache = None 62 | 63 | output: dict[str, Any] = {} 64 | if cache is None: 65 | needs_compile = True 66 | elif len(cache.errors) > 0: 67 | needs_compile = True 68 | else: 69 | needs_compile = False 70 | project = load_project(self.project_root) 71 | if project is not None: 72 | source_paths = project.source_paths 73 | else: 74 | source_paths = [] 75 | extra_paths: List[str] = [] 76 | compare_dirs = [] 77 | if len(source_paths) == 0: 78 | compare_dirs.append(Path(self.project_root)) 79 | else: 80 | for source_path in source_paths: 81 | compare_dirs.append(Path(f"{self.project_root}/{source_path}")) 82 | 83 | for epath in extra_paths: 84 | epath_p = Path(epath) 85 | if not epath_p.exists() or not epath_p.is_dir(): 86 | continue 87 | compare_dirs.append(epath_p) 88 | 89 | for compare_dir in compare_dirs: 90 | if cache.directory_checksums.get( 91 | compare_dir.as_posix(), "" 92 | ) != get_checksum(Path(compare_dir)): 93 | needs_compile = True 94 | break 95 | 96 | if needs_compile or cache is None: 97 | context = MorphGlobalContext.get_instance() 98 | errors = context.load(self.project_root) 99 | if len(errors) > 0: 100 | output["errors"] = [error.model_dump() for error in errors] 101 | cache = context.dump() 102 | elif cache is not None and len(cache.errors) > 0: 103 | output["errors"] = [error.model_dump() for error in cache.errors] 104 | 105 | if self.target_type == "all": 106 | resource_dicts: list[dict] = [] 107 | for item in cache.items: 108 | # id is formatted as {filename}:{function_name} 109 | if not item.spec.id or not item.spec.name: 110 | continue 111 | 112 | if sys.platform == "win32": 113 | if len(item.spec.id.split(":")) > 2: 114 | filepath = ( 115 | item.spec.id.rsplit(":", 1)[0] if item.spec.id else "" 116 | ) 117 | else: 118 | filepath = item.spec.id if item.spec.id else "" 119 | else: 120 | filepath = item.spec.id.split(":")[0] 121 | resource_item = Resource( 122 | alias=item.spec.name, 123 | path=filepath, 124 | connection=(item.spec.connection if item.spec.connection else None), 125 | data_requirements=( 126 | cast(list, item.spec.data_requirements) 127 | if item.spec.data_requirements 128 | else None 129 | ), 130 | ) 131 | resource_dicts.append(resource_item.model_dump()) 132 | 133 | output["resources"] = resource_dicts 134 | click.echo(json.dumps(output, indent=2)) 135 | elif self.target_type == "alias": 136 | # NOTE: use Resource entity to keep backward compatibility with old output format 137 | resource: Resource | None = None 138 | for item in cache.items: 139 | if item.spec.name == self.target: 140 | # id is formatted as {filename}:{function_name} 141 | if not item.spec.id or not item.spec.name: 142 | continue 143 | filepath = item.spec.id.split(":")[0] 144 | resource = Resource( 145 | alias=item.spec.name, 146 | path=filepath, 147 | connection=( 148 | item.spec.connection if item.spec.connection else None 149 | ), 150 | data_requirements=( 151 | cast(list, item.spec.data_requirements) 152 | if item.spec.data_requirements 153 | else None 154 | ), 155 | ) 156 | break 157 | if resource: 158 | output["resources"] = [resource.model_dump()] 159 | click.echo(json.dumps(output, indent=2)) 160 | else: 161 | click.echo(f"Alias {self.target} not found.") 162 | elif self.target_type == "file": 163 | abs_path = Path(self.target).as_posix() 164 | resource = None 165 | for item in cache.items: 166 | # id is formatted as {filename}:{function_name} 167 | if not item.spec.id or not item.spec.name: 168 | continue 169 | filepath = item.spec.id.split(":")[0] 170 | if filepath == abs_path: 171 | resource = Resource( 172 | alias=item.spec.name, 173 | path=filepath, 174 | connection=( 175 | item.spec.connection if item.spec.connection else None 176 | ), 177 | data_requirements=( 178 | cast(list, item.spec.data_requirements) 179 | if item.spec.data_requirements 180 | else None 181 | ), 182 | ) 183 | break 184 | if resource: 185 | output["resources"] = [resource.model_dump()] 186 | click.echo(json.dumps(output, indent=2)) 187 | else: 188 | click.echo(f"File {self.target} not found.") 189 | -------------------------------------------------------------------------------- /core/morph/task/server.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | import click 5 | import uvicorn 6 | 7 | 8 | class UvicornLoggerHandler(logging.Handler): 9 | def emit(self, record): 10 | log_entry = self.format(record) 11 | click.echo(log_entry, err=False) 12 | 13 | 14 | logger = logging.getLogger("uvicorn") 15 | logger.setLevel(logging.INFO) 16 | handler = UvicornLoggerHandler() 17 | formatter = logging.Formatter("%(levelname)s: %(message)s") 18 | handler.setFormatter(formatter) 19 | logger.addHandler(handler) 20 | 21 | 22 | def parse_sys_argv(): 23 | port = 8080 24 | 25 | filtered_args = [] 26 | skip_next = False 27 | for i, arg in enumerate(sys.argv[1:]): 28 | if skip_next: 29 | skip_next = False 30 | continue 31 | 32 | if arg == "--port" and i + 1 < len(sys.argv): 33 | try: 34 | port = int(sys.argv[i + 2]) 35 | skip_next = True 36 | except ValueError: 37 | port = 8080 38 | continue 39 | 40 | filtered_args.append(arg) 41 | 42 | sys.argv = [sys.argv[0]] + filtered_args 43 | 44 | return port 45 | 46 | 47 | def start_server(port: int) -> None: 48 | uvicorn.run("morph.api.app:app", host="0.0.0.0", port=port, reload=True) 49 | 50 | 51 | if __name__ == "__main__": 52 | port = parse_sys_argv() 53 | start_server(port) 54 | -------------------------------------------------------------------------------- /core/morph/task/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/task/utils/__init__.py -------------------------------------------------------------------------------- /core/morph/task/utils/connections/athena/api.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any, Dict, Optional 3 | 4 | 5 | class AthenaApi: 6 | @staticmethod 7 | def query( 8 | access_key: str, 9 | secret_key: str, 10 | session_token: str, 11 | region: str, 12 | sql: str, 13 | next_token: Optional[str] = None, 14 | limit: Optional[int] = None, 15 | catalog: Optional[str] = None, 16 | database: Optional[str] = None, 17 | work_group: Optional[str] = None, 18 | ) -> Optional[Dict[str, Any]]: 19 | from boto3 import client 20 | 21 | athena_client = client( 22 | "athena", 23 | aws_access_key_id=access_key, 24 | aws_secret_access_key=secret_key, 25 | aws_session_token=session_token, 26 | region_name=region, 27 | ) 28 | 29 | start_query_execution_input: Dict[str, Any] = { 30 | "QueryString": sql, 31 | "WorkGroup": work_group, 32 | } 33 | 34 | if catalog or database: 35 | start_query_execution_input["QueryExecutionContext"] = {} 36 | if catalog: 37 | start_query_execution_input["QueryExecutionContext"][ 38 | "Catalog" 39 | ] = catalog 40 | if database: 41 | start_query_execution_input["QueryExecutionContext"][ 42 | "Database" 43 | ] = database 44 | 45 | start_query: Dict[str, Any] = athena_client.start_query_execution( 46 | **start_query_execution_input 47 | ) 48 | query_execution_id = start_query.get("QueryExecutionId") 49 | 50 | if query_execution_id is None: 51 | return None 52 | 53 | while True: 54 | get_query_execution: Dict[str, Any] = athena_client.get_query_execution( 55 | QueryExecutionId=query_execution_id 56 | ) 57 | status = get_query_execution["QueryExecution"]["Status"]["State"] 58 | 59 | if status in ["RUNNING", "QUEUED"]: 60 | time.sleep(0.5) 61 | else: 62 | break 63 | 64 | if get_query_execution is None or "QueryExecution" not in get_query_execution: 65 | return None 66 | 67 | if get_query_execution["QueryExecution"]["Status"]["State"] == "FAILED": 68 | if "AthenaError" in get_query_execution["QueryExecution"]["Status"]: 69 | error_message = get_query_execution["QueryExecution"]["Status"][ 70 | "AthenaError" 71 | ]["ErrorMessage"] 72 | raise Exception(error_message) 73 | 74 | get_query_results_input = { 75 | "QueryExecutionId": query_execution_id, 76 | } 77 | if next_token: 78 | get_query_results_input["NextToken"] = next_token 79 | if limit: 80 | get_query_results_input["MaxResults"] = limit 81 | 82 | result: Dict[str, Any] = athena_client.get_query_results( 83 | **get_query_results_input 84 | ) 85 | return result 86 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/athena/usecase.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Dict, List, Optional, Tuple 4 | 5 | from morph.task.utils.connections.athena.api import AthenaApi 6 | 7 | 8 | class AthenaUsecase: 9 | def __init__( 10 | self, 11 | access_key: str, 12 | secret_key: str, 13 | session_token: str, 14 | region: str, 15 | catalog: str, 16 | database: Optional[str], 17 | work_group: Optional[str], 18 | ): 19 | self.access_key = access_key 20 | self.secret_key = secret_key 21 | self.session_token = session_token 22 | self.region = region 23 | self.catalog = catalog 24 | self.database = database 25 | self.work_group = work_group 26 | 27 | def query(self, sql: str, limit: Optional[int] = None) -> List[Dict[str, Any]]: 28 | def _query( 29 | _limit: Optional[int] = None, 30 | _next_token: Optional[str] = None, 31 | _initial: Optional[bool] = False, 32 | ) -> Tuple[List[Dict[str, Any]], Optional[str], bool]: 33 | _result = AthenaApi.query( 34 | access_key=self.access_key, 35 | secret_key=self.secret_key, 36 | session_token=self.session_token, 37 | region=self.region, 38 | sql=sql, 39 | next_token=_next_token, 40 | limit=_limit, 41 | catalog=self.catalog, 42 | database=self.database, 43 | work_group=self.work_group, 44 | ) 45 | if _result is None: 46 | raise Exception("Failed to query") 47 | 48 | columns: Dict[Any, Any] = _result["ResultSet"]["ResultSetMetadata"][ 49 | "ColumnInfo" 50 | ] 51 | _rows: List[Dict[str, Any]] = [] 52 | for idx, _row in enumerate(_result["ResultSet"]["Rows"]): 53 | obj: Dict[str, Any] = {} 54 | for i, column in enumerate(columns): 55 | if i < len(_row["Data"]) and "VarCharValue" in _row["Data"][i]: 56 | obj[column["Name"]] = _row["Data"][i]["VarCharValue"] 57 | else: 58 | obj[column["Name"]] = None 59 | if idx == 0 and _initial: 60 | if all(key == value for key, value in obj.items()): 61 | continue 62 | _rows.append(obj) 63 | return _rows, _result.get("NextToken", None), False 64 | 65 | items = [] 66 | next_token_ = None 67 | initial_ = True 68 | while True: 69 | rows, next_token, initial = _query(limit, next_token_, initial_) 70 | initial_ = initial 71 | items.extend(rows) 72 | if next_token is not None: 73 | next_token_ = next_token 74 | else: 75 | break 76 | return items 77 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/bigquery/api.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from typing import Any, Dict, Optional, Union, cast 4 | 5 | import requests 6 | from cryptography.hazmat.backends import default_backend 7 | from cryptography.hazmat.primitives.serialization import load_pem_private_key 8 | 9 | from morph.task.utils.connections.bigquery.types import ( 10 | BigqueryException, 11 | BigqueryQueryResponse, 12 | ) 13 | from morph.task.utils.connections.utils import normalize_newlines 14 | 15 | BASE_URL = "https://bigquery.googleapis.com/bigquery/v2" 16 | 17 | 18 | class BigqueryApi: 19 | @staticmethod 20 | def encode_next_token( 21 | job_id: str, page_token: str, location: Optional[str] = None 22 | ) -> str: 23 | return json.dumps( 24 | {"job_id": job_id, "page_token": page_token, "location": location} 25 | ) 26 | 27 | @staticmethod 28 | def decode_next_token(next_token: str) -> Any: 29 | return json.loads(next_token or "{}") 30 | 31 | @staticmethod 32 | def post_query( 33 | project_id: str, 34 | query: str, 35 | access_token: str, 36 | location: Optional[str] = "asia-northeast1", 37 | limit: Optional[int] = None, 38 | next_token: Optional[str] = None, 39 | ) -> BigqueryQueryResponse: 40 | url = f"{BASE_URL}/projects/{project_id}/queries" 41 | if next_token: 42 | job_id, page_token, location = BigqueryApi.decode_next_token( 43 | next_token 44 | ).values() 45 | return BigqueryApi.get_extra_query_result( 46 | project_id, job_id, access_token, page_token, location, limit 47 | ) 48 | 49 | body = {"query": query, "useLegacySql": False} 50 | if limit: 51 | body["maxResults"] = limit 52 | if location: 53 | body["location"] = location 54 | 55 | response = requests.post( 56 | url, json=body, headers={"Authorization": f"Bearer {access_token}"} 57 | ) 58 | response_json = response.json() 59 | 60 | if "error" in response_json: 61 | raise BigqueryException( 62 | response_json["error"]["message"], 63 | response_json["error"]["code"], 64 | response_json["error"]["errors"], 65 | response_json["error"]["status"], 66 | ) 67 | 68 | job_id = response_json["jobReference"]["jobId"] 69 | 70 | job_complete = cast(bool, response_json["jobComplete"]) 71 | schema = response_json["schema"] if "schema" in response_json else None 72 | if not job_complete and schema is None: 73 | time.sleep(1) 74 | return BigqueryApi.get_extra_query_result( 75 | project_id, job_id, access_token, None, location, limit 76 | ) 77 | 78 | page_token = ( 79 | response_json["pageToken"] if "pageToken" in response_json else None 80 | ) 81 | next_token = ( 82 | BigqueryApi.encode_next_token(job_id, page_token, location) 83 | if job_id and page_token 84 | else None 85 | ) 86 | if "rows" not in response_json: 87 | response_json["rows"] = [] 88 | 89 | return BigqueryQueryResponse.model_validate( 90 | {**response_json, "next_token": next_token} 91 | ) 92 | 93 | @staticmethod 94 | def get_extra_query_result( 95 | project_id: str, 96 | job_id: str, 97 | access_token: str, 98 | page_token: Optional[str] = None, 99 | location: Optional[str] = None, 100 | limit: Optional[int] = None, 101 | ) -> BigqueryQueryResponse: 102 | url = f"{BASE_URL}/projects/{project_id}/queries/{job_id}" 103 | 104 | params = {} 105 | if page_token: 106 | params["pageToken"] = page_token 107 | if limit: 108 | params["maxResults"] = str(limit) 109 | if location: 110 | params["location"] = location 111 | 112 | response = requests.get( 113 | url, params=params, headers={"Authorization": f"Bearer {access_token}"} 114 | ) 115 | response_json = response.json() 116 | 117 | if "error" in response_json: 118 | raise BigqueryException( 119 | response_json["error"]["message"], 120 | response_json["error"]["code"], 121 | response_json["error"]["errors"], 122 | response_json["error"]["status"], 123 | ) 124 | 125 | job_id = response_json["jobReference"]["jobId"] 126 | 127 | job_complete = cast(bool, response_json["jobComplete"]) 128 | schema = response_json["schema"] if "schema" in response_json else None 129 | if not job_complete and schema is None: 130 | time.sleep(1) 131 | return BigqueryApi.get_extra_query_result( 132 | project_id, job_id, access_token, page_token, location, limit 133 | ) 134 | 135 | page_token = ( 136 | response_json["pageToken"] if "pageToken" in response_json else None 137 | ) 138 | next_token = ( 139 | BigqueryApi.encode_next_token(job_id, page_token, location) 140 | if job_id and page_token 141 | else None 142 | ) 143 | if "rows" not in response_json: 144 | response_json["rows"] = [] 145 | 146 | return BigqueryQueryResponse.model_validate( 147 | {**response_json, "next_token": next_token} 148 | ) 149 | 150 | @staticmethod 151 | def get_access_token_from_service_account( 152 | service_account_info: Union[Dict[str, Any], str] 153 | ) -> str: 154 | import jwt 155 | 156 | url = "https://oauth2.googleapis.com/token" 157 | 158 | credential: Optional[Dict[str, Any]] = None 159 | try: 160 | if isinstance(service_account_info, str): 161 | try: 162 | credential_ = json.loads(service_account_info) 163 | credential = cast(Dict[str, Any], json.loads(credential_)) 164 | except Exception: # noqa 165 | credential_str = open(service_account_info, "r").read() 166 | credential = cast(Dict[str, Any], json.loads(credential_str)) 167 | else: 168 | credential = service_account_info 169 | except Exception as e: 170 | raise ValueError(f" Error invalid credential: {e}") 171 | 172 | if not credential: 173 | raise ValueError("Error Invalid credential") 174 | 175 | private_key = normalize_newlines(cast(str, credential["private_key"])) 176 | client_email = credential["client_email"] 177 | token_uri = credential["token_uri"] 178 | 179 | issued_at = int(time.time()) 180 | expiration_time = issued_at + 3600 181 | 182 | payload = { 183 | "iss": client_email, 184 | "scope": "https://www.googleapis.com/auth/bigquery", 185 | "aud": token_uri, 186 | "iat": issued_at, 187 | "exp": expiration_time, 188 | } 189 | 190 | private_key_obj = load_pem_private_key( 191 | private_key.encode("utf-8"), password=None, backend=default_backend() 192 | ) 193 | jwt_token = jwt.encode(payload, private_key_obj, algorithm="RS256") 194 | 195 | data = { 196 | "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", 197 | "assertion": jwt_token, 198 | } 199 | 200 | response = requests.post(url, data=data) 201 | 202 | if response.status_code == 200: 203 | return cast(str, response.json()["access_token"]) 204 | else: 205 | raise Exception(f"Error obtaining access token: {response.text}") 206 | 207 | @staticmethod 208 | def refresh_access_token( 209 | client_id: str, client_secret: str, refresh_token: str 210 | ) -> Dict[str, Any]: 211 | url = "https://oauth2.googleapis.com/token" 212 | 213 | data = { 214 | "client_id": client_id, 215 | "client_secret": client_secret, 216 | "refresh_token": refresh_token, 217 | "grant_type": "refresh_token", 218 | } 219 | 220 | try: 221 | response = requests.post(url, data=data) 222 | response.raise_for_status() 223 | except requests.exceptions.RequestException as e: 224 | raise RuntimeError(f"Failed to refresh access token: {e}") 225 | 226 | access_token = response.json()["access_token"] 227 | expires_in = response.json()["expires_in"] 228 | 229 | return { 230 | "access_token": access_token, 231 | "refresh_token": refresh_token, 232 | "expires_in": expires_in, 233 | } 234 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/bigquery/types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Any, Dict, List, Optional 3 | 4 | from pydantic import BaseModel, Field 5 | 6 | 7 | class BigqueryException(Exception): 8 | def __init__( 9 | self, message: str, code: int, errors: List[Dict[str, Any]], status: str 10 | ): 11 | super().__init__(message) 12 | self.code = code 13 | self.errors = errors 14 | self.status = status 15 | 16 | 17 | class BigqueryFieldTypes(str, Enum): 18 | INTEGER = "INTEGER" 19 | INT64 = "INT64" 20 | FLOAT = "FLOAT" 21 | FLOAT64 = "FLOAT64" 22 | STRING = "STRING" 23 | BYTES = "BYTES" 24 | BOOLEAN = "BOOLEAN" 25 | BOOL = "BOOL" 26 | TIMESTAMP = "TIMESTAMP" 27 | DATE = "DATE" 28 | TIME = "TIME" 29 | DATETIME = "DATETIME" 30 | GEOGRAPHY = "GEOGRAPHY" 31 | RECORD = "RECORD" 32 | STRUCT = "STRUCT" 33 | NUMERIC = "NUMERIC" 34 | BIGNUMERIC = "BIGNUMERIC" 35 | JSON = "JSON" 36 | 37 | 38 | class BigqueryFieldModes(str, Enum): 39 | NULLABLE = "NULLABLE" 40 | REQUIRED = "REQUIRED" 41 | REPEATED = "REPEATED" 42 | 43 | 44 | class BigqueryTableFieldSchema(BaseModel): 45 | name: str 46 | type: BigqueryFieldTypes 47 | mode: BigqueryFieldModes 48 | fields: List["BigqueryTableFieldSchema"] = Field(default_factory=list) 49 | description: Optional[str] = None 50 | 51 | 52 | class BigqueryTableSchema(BaseModel): 53 | fields: List[BigqueryTableFieldSchema] = Field(default_factory=list) 54 | 55 | 56 | class BigqueryQueryResponse(BaseModel): 57 | schema_: BigqueryTableSchema = Field(..., alias="schema") 58 | rows: List[Dict[str, Any]] 59 | next_token: Optional[str] = None 60 | 61 | 62 | class BigqueryQueryErrorResponse(BaseModel): 63 | code: int 64 | message: str 65 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/bigquery/usecase.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Tuple 2 | 3 | from morph.task.utils.connections.bigquery.api import BigqueryApi 4 | 5 | 6 | class BigqueryUsecase: 7 | def __init__( 8 | self, 9 | project_id: str, 10 | access_token: str, 11 | location: Optional[str] = "asia-northeast1", 12 | ): 13 | self.project_id = project_id 14 | self.access_token = access_token 15 | self.location = location 16 | 17 | def query(self, sql: str, limit: Optional[int] = None) -> List[Dict[str, Any]]: 18 | def _query( 19 | _limit: Optional[int] = None, _next_token: Optional[str] = None 20 | ) -> Tuple[List[Dict[str, Any]], Optional[str]]: 21 | _result = BigqueryApi.post_query( 22 | project_id=self.project_id, 23 | query=sql, 24 | access_token=self.access_token, 25 | location=self.location, 26 | limit=_limit, 27 | next_token=_next_token, 28 | ) 29 | _rows: List[Dict[str, Any]] = [] 30 | for _row in _result.rows: 31 | obj = {} 32 | for i, f in enumerate(_result.schema_.fields): 33 | obj[f.name] = _row["f"][i]["v"] 34 | _rows.append(obj) 35 | return _rows, _result.next_token 36 | 37 | items = [] 38 | next_token_ = None 39 | while True: 40 | rows, next_token = _query(limit, next_token_) 41 | items.extend(rows) 42 | if next_token is not None: 43 | next_token_ = next_token 44 | else: 45 | break 46 | return items 47 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/mssql.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | from typing import Any, Optional, Union 4 | 5 | from colorama import Fore 6 | from sqlalchemy import create_engine, text 7 | from sqlalchemy.orm import Session, sessionmaker 8 | 9 | from morph.task.utils.connection import SQLServerConnection 10 | from morph.task.utils.connections.database.utils import is_sql_returning_result 11 | from morph.task.utils.connections.utils import normalize_newlines 12 | 13 | CONNECTION_TIMEOUT = 10 14 | 15 | 16 | class SQLServerConnector: 17 | def __init__( 18 | self, 19 | connection: SQLServerConnection, 20 | use_ssh: Optional[bool] = False, 21 | ): 22 | self.connection = connection 23 | self.use_ssh = use_ssh 24 | self.ssh_server: Optional[Any] = None 25 | self.session: Optional[sessionmaker[Session]] = None 26 | self.engine = self._create_engine() 27 | 28 | def _get_db_url(self, local_port: Optional[Union[str, int]] = None) -> str: 29 | user = self.connection.user 30 | password = self.connection.password 31 | host = "localhost" if local_port else self.connection.host 32 | port = local_port if local_port else self.connection.port 33 | database = self.connection.dbname 34 | return f"mssql+pytds://{user}:{password}@{host}:{port}/{database}" 35 | 36 | def _start_ssh_tunnel(self): 37 | from paramiko import RSAKey 38 | from sshtunnel import SSHTunnelForwarder 39 | 40 | ssh_host = self.connection.ssh_host 41 | ssh_port = int(self.connection.ssh_port) if self.connection.ssh_port else 22 42 | ssh_user = self.connection.ssh_user 43 | ssh_password = self.connection.ssh_password 44 | ssh_pkey = None 45 | if self.connection.ssh_private_key: 46 | _ssh_private_key = self.connection.ssh_private_key 47 | if _ssh_private_key.startswith("~"): 48 | _ssh_private_key = os.path.expanduser(_ssh_private_key) 49 | if os.path.exists(_ssh_private_key): 50 | ssh_pkey_str = open(_ssh_private_key).read() 51 | else: 52 | ssh_pkey_str = normalize_newlines(_ssh_private_key) 53 | private_key_file = io.StringIO(ssh_pkey_str) 54 | ssh_pkey = RSAKey.from_private_key(private_key_file) 55 | remote_bind_address = ( 56 | self.connection.host, 57 | int(self.connection.port) if self.connection.port else 3306, 58 | ) 59 | 60 | self.ssh_server = SSHTunnelForwarder( 61 | (ssh_host, ssh_port), 62 | ssh_username=ssh_user, 63 | ssh_password=ssh_password, 64 | ssh_pkey=ssh_pkey, 65 | remote_bind_address=remote_bind_address, 66 | local_bind_address=("localhost", 10001), 67 | ) 68 | if not self.ssh_server: 69 | raise RuntimeError("Failed to create SSH tunnel") 70 | 71 | self.ssh_server.start() 72 | 73 | return self.ssh_server.local_bind_port 74 | 75 | def _create_engine(self) -> Any: 76 | local_port = None 77 | if self.use_ssh: 78 | local_port = self._start_ssh_tunnel() 79 | db_url = self._get_db_url(local_port) 80 | return create_engine(db_url, echo=False) 81 | 82 | def get_session(self) -> Any: 83 | if not self.engine: 84 | self.engine = self._create_engine() 85 | if not self.session: 86 | Session = sessionmaker(bind=self.engine, expire_on_commit=False) 87 | self.session = Session() 88 | return self.session 89 | 90 | def close_session(self) -> None: 91 | if self.session: 92 | self.session.close() 93 | self.session = None 94 | if self.ssh_server: 95 | self.ssh_server.stop() 96 | self.ssh_server = None 97 | 98 | def execute_sql(self, sql: str) -> Any: 99 | session = self.get_session() 100 | try: 101 | if not is_sql_returning_result(sql): 102 | with session.begin(): 103 | session.execute(text(sql)) 104 | return None 105 | if sql.strip().lower().startswith("select"): 106 | result = session.execute(text(sql)) 107 | return result.fetchall(), result.keys() 108 | else: 109 | with session.begin(): 110 | result = session.execute(text(sql)) 111 | return result.fetchall(), result.keys() 112 | except Exception as e: 113 | print(Fore.RED + f"{e}" + Fore.RESET) 114 | print( 115 | Fore.RED 116 | + "\n\n=== Executed SQL ===\n" 117 | + f"{sql}" 118 | + "\n===========\n" 119 | + Fore.RESET 120 | ) 121 | if not sql.strip().lower().startswith("select"): 122 | session.rollback() 123 | raise RuntimeError(f"Error executing query: {e}") 124 | finally: 125 | self.close_session() 126 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/mysql.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | from typing import Any, Optional, Union 4 | 5 | from colorama import Fore 6 | from sqlalchemy import create_engine, text 7 | from sqlalchemy.orm import Session, sessionmaker 8 | 9 | from morph.task.utils.connection import MysqlConnection 10 | from morph.task.utils.connections.database.utils import is_sql_returning_result 11 | from morph.task.utils.connections.utils import normalize_newlines 12 | 13 | CONNECTION_TIMEOUT = 10 14 | 15 | 16 | class MysqlConnector: 17 | def __init__( 18 | self, 19 | connection: MysqlConnection, 20 | use_ssh: Optional[bool] = False, 21 | ): 22 | self.connection = connection 23 | self.use_ssh = use_ssh 24 | self.ssh_server: Optional[Any] = None 25 | self.session: Optional[sessionmaker[Session]] = None 26 | self.engine = self._create_engine() 27 | 28 | def _get_db_url(self, local_port: Optional[Union[str, int]] = None) -> str: 29 | user = self.connection.user 30 | password = self.connection.password 31 | host = "localhost" if local_port else self.connection.host 32 | port = local_port if local_port else self.connection.port 33 | database = self.connection.dbname 34 | return f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}?connect_timeout={CONNECTION_TIMEOUT}" 35 | 36 | def _start_ssh_tunnel(self): 37 | from paramiko import RSAKey 38 | from sshtunnel import SSHTunnelForwarder 39 | 40 | ssh_host = self.connection.ssh_host 41 | ssh_port = int(self.connection.ssh_port) if self.connection.ssh_port else 22 42 | ssh_user = self.connection.ssh_user 43 | ssh_password = self.connection.ssh_password 44 | ssh_pkey = None 45 | if self.connection.ssh_private_key: 46 | _ssh_private_key = self.connection.ssh_private_key 47 | if _ssh_private_key.startswith("~"): 48 | _ssh_private_key = os.path.expanduser(_ssh_private_key) 49 | if os.path.exists(_ssh_private_key): 50 | ssh_pkey_str = open(_ssh_private_key).read() 51 | else: 52 | ssh_pkey_str = normalize_newlines(_ssh_private_key) 53 | private_key_file = io.StringIO(ssh_pkey_str) 54 | ssh_pkey = RSAKey.from_private_key(private_key_file) 55 | remote_bind_address = ( 56 | self.connection.host, 57 | int(self.connection.port) if self.connection.port else 3306, 58 | ) 59 | 60 | self.ssh_server = SSHTunnelForwarder( 61 | (ssh_host, ssh_port), 62 | ssh_username=ssh_user, 63 | ssh_password=ssh_password, 64 | ssh_pkey=ssh_pkey, 65 | remote_bind_address=remote_bind_address, 66 | local_bind_address=("localhost", 10001), 67 | ) 68 | if not self.ssh_server: 69 | raise RuntimeError("Failed to create SSH tunnel") 70 | 71 | self.ssh_server.start() 72 | 73 | return self.ssh_server.local_bind_port 74 | 75 | def _create_engine(self) -> Any: 76 | local_port = None 77 | if self.use_ssh: 78 | local_port = self._start_ssh_tunnel() 79 | db_url = self._get_db_url(local_port) 80 | return create_engine(db_url, echo=False) 81 | 82 | def get_session(self) -> Any: 83 | if not self.engine: 84 | self.engine = self._create_engine() 85 | if not self.session: 86 | Session = sessionmaker(bind=self.engine, expire_on_commit=False) 87 | self.session = Session() 88 | return self.session 89 | 90 | def close_session(self) -> None: 91 | if self.session: 92 | self.session.close() 93 | self.session = None 94 | if self.ssh_server: 95 | self.ssh_server.stop() 96 | self.ssh_server = None 97 | 98 | def execute_sql(self, sql: str) -> Any: 99 | session = self.get_session() 100 | try: 101 | if not is_sql_returning_result(sql): 102 | with session.begin(): 103 | session.execute(text(sql)) 104 | return None 105 | if sql.strip().lower().startswith("select"): 106 | result = session.execute(text(sql)) 107 | return result 108 | else: 109 | with session.begin(): 110 | result = session.execute(text(sql)) 111 | return result 112 | except Exception as e: 113 | print(Fore.RED + f"{e}" + Fore.RESET) 114 | print( 115 | Fore.RED 116 | + "\n\n=== Executed SQL ===\n" 117 | + f"{sql}" 118 | + "\n===========\n" 119 | + Fore.RESET 120 | ) 121 | if not sql.strip().lower().startswith("select"): 122 | session.rollback() 123 | raise RuntimeError(f"Error executing query: {e}") 124 | finally: 125 | self.close_session() 126 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/postgres.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | from typing import Any, Optional, Union 4 | 5 | from colorama import Fore 6 | from sqlalchemy import create_engine, text 7 | from sqlalchemy.orm import Session, sessionmaker 8 | 9 | from morph.task.utils.connection import PostgresqlConnection 10 | from morph.task.utils.connections.database.utils import is_sql_returning_result 11 | from morph.task.utils.connections.utils import normalize_newlines 12 | 13 | CONNECTION_TIMEOUT = 10 14 | 15 | 16 | class PostgresqlConnector: 17 | def __init__( 18 | self, 19 | connection: PostgresqlConnection, 20 | use_ssh: Optional[bool] = False, 21 | ): 22 | self.connection = connection 23 | self.use_ssh = use_ssh 24 | self.ssh_server: Optional[Any] = None 25 | self.session: Optional[sessionmaker[Session]] = None 26 | self.engine = self._create_engine() 27 | 28 | def _get_db_url(self, local_port: Optional[Union[str, int]] = None) -> str: 29 | user = self.connection.user 30 | password = self.connection.password 31 | host = "localhost" if local_port else self.connection.host 32 | port = local_port if local_port else self.connection.port 33 | database = self.connection.dbname 34 | db_url = f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}?connect_timeout={CONNECTION_TIMEOUT}" 35 | if self.connection.timezone: 36 | db_url += f"&options=-c timezone={self.connection.timezone}" 37 | return db_url 38 | 39 | def _start_ssh_tunnel(self): 40 | from paramiko import RSAKey 41 | from sshtunnel import SSHTunnelForwarder 42 | 43 | ssh_host = self.connection.ssh_host 44 | ssh_port = int(self.connection.ssh_port) if self.connection.ssh_port else 22 45 | ssh_user = self.connection.ssh_user 46 | ssh_password = self.connection.ssh_password 47 | ssh_pkey = None 48 | if self.connection.ssh_private_key: 49 | _ssh_private_key = self.connection.ssh_private_key 50 | if _ssh_private_key.startswith("~"): 51 | _ssh_private_key = os.path.expanduser(_ssh_private_key) 52 | if os.path.exists(_ssh_private_key): 53 | ssh_pkey_str = open(_ssh_private_key).read() 54 | else: 55 | ssh_pkey_str = normalize_newlines(_ssh_private_key) 56 | private_key_file = io.StringIO(ssh_pkey_str) 57 | ssh_pkey = RSAKey.from_private_key(private_key_file) 58 | remote_bind_address = ( 59 | self.connection.host, 60 | int(self.connection.port) if self.connection.port else 5432, 61 | ) 62 | 63 | self.ssh_server = SSHTunnelForwarder( 64 | (ssh_host, ssh_port), 65 | ssh_username=ssh_user, 66 | ssh_password=ssh_password, 67 | ssh_pkey=ssh_pkey, 68 | remote_bind_address=remote_bind_address, 69 | local_bind_address=("localhost", 10002), 70 | ) 71 | if not self.ssh_server: 72 | raise RuntimeError("Failed to create SSH tunnel") 73 | 74 | self.ssh_server.start() 75 | 76 | return self.ssh_server.local_bind_port 77 | 78 | def _create_engine(self) -> Any: 79 | local_port = None 80 | if self.use_ssh: 81 | local_port = self._start_ssh_tunnel() 82 | db_url = self._get_db_url(local_port) 83 | return create_engine(db_url, echo=False) 84 | 85 | def get_session(self) -> Any: 86 | if not self.engine: 87 | self.engine = self._create_engine() 88 | if not self.session: 89 | Session = sessionmaker(bind=self.engine, expire_on_commit=False) 90 | self.session = Session() 91 | return self.session 92 | 93 | def close_session(self) -> None: 94 | if self.session: 95 | self.session.close() 96 | self.session = None 97 | if self.ssh_server: 98 | self.ssh_server.stop() 99 | self.ssh_server = None 100 | 101 | def execute_sql(self, sql: str) -> Any: 102 | session = self.get_session() 103 | try: 104 | if not is_sql_returning_result(sql): 105 | with session.begin(): 106 | session.execute(text(sql)) 107 | return None 108 | if sql.strip().lower().startswith("select"): 109 | result = session.execute(text(sql)) 110 | return result 111 | else: 112 | with session.begin(): 113 | result = session.execute(text(sql)) 114 | return result 115 | except Exception as e: 116 | print(Fore.RED + f"{e}" + Fore.RESET) 117 | print( 118 | Fore.RED 119 | + "\n\n=== Executed SQL ===\n" 120 | + f"{sql}" 121 | + "\n===========\n" 122 | + Fore.RESET 123 | ) 124 | if not sql.strip().lower().startswith("select"): 125 | session.rollback() 126 | raise RuntimeError(f"Error executing query: {e}") 127 | finally: 128 | self.close_session() 129 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/redshift.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | from typing import Any, Optional, Union 4 | 5 | from colorama import Fore 6 | from sqlalchemy import create_engine, text 7 | from sqlalchemy.orm import Session, sessionmaker 8 | 9 | from morph.task.utils.connection import RedshiftConnection 10 | from morph.task.utils.connections.database.utils import is_sql_returning_result 11 | from morph.task.utils.connections.utils import normalize_newlines 12 | 13 | CONNECTION_TIMEOUT = 10 14 | 15 | 16 | class RedshiftConnector: 17 | def __init__( 18 | self, 19 | connection: RedshiftConnection, 20 | use_ssh: Optional[bool] = False, 21 | ): 22 | self.connection = connection 23 | self.use_ssh = use_ssh 24 | self.ssh_server: Optional[Any] = None 25 | self.session: Optional[sessionmaker[Session]] = None 26 | self.engine = self._create_engine() 27 | 28 | def _get_db_url(self, local_port: Optional[Union[str, int]] = None) -> str: 29 | user = self.connection.user 30 | password = self.connection.password 31 | host = "localhost" if local_port else self.connection.host 32 | port = local_port if local_port else self.connection.port 33 | database = self.connection.dbname 34 | return f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}?connect_timeout={CONNECTION_TIMEOUT}" 35 | 36 | def _start_ssh_tunnel(self): 37 | from paramiko import RSAKey 38 | from sshtunnel import SSHTunnelForwarder 39 | 40 | ssh_host = self.connection.ssh_host 41 | ssh_port = int(self.connection.ssh_port) if self.connection.ssh_port else 22 42 | ssh_user = self.connection.ssh_user 43 | ssh_password = self.connection.ssh_password 44 | ssh_pkey = None 45 | if self.connection.ssh_private_key: 46 | _ssh_private_key = self.connection.ssh_private_key 47 | if _ssh_private_key.startswith("~"): 48 | _ssh_private_key = os.path.expanduser(_ssh_private_key) 49 | if os.path.exists(_ssh_private_key): 50 | ssh_pkey_str = open(_ssh_private_key).read() 51 | else: 52 | ssh_pkey_str = normalize_newlines(_ssh_private_key) 53 | private_key_file = io.StringIO(ssh_pkey_str) 54 | ssh_pkey = RSAKey.from_private_key(private_key_file) 55 | remote_bind_address = ( 56 | self.connection.host, 57 | int(self.connection.port) if self.connection.port else 5439, 58 | ) 59 | self.ssh_server = SSHTunnelForwarder( 60 | (ssh_host, ssh_port), 61 | ssh_username=ssh_user, 62 | ssh_password=ssh_password, 63 | ssh_pkey=ssh_pkey, 64 | remote_bind_address=remote_bind_address, 65 | local_bind_address=("localhost", 10003), 66 | ) 67 | if not self.ssh_server: 68 | raise RuntimeError("Failed to create SSH tunnel") 69 | 70 | self.ssh_server.start() 71 | 72 | return self.ssh_server.local_bind_port 73 | 74 | def _create_engine(self) -> Any: 75 | local_port = None 76 | if self.use_ssh: 77 | local_port = self._start_ssh_tunnel() 78 | db_url = self._get_db_url(local_port) 79 | return create_engine( 80 | db_url, 81 | echo=False, 82 | connect_args={"sslmode": "prefer"}, 83 | ) 84 | 85 | def get_session(self) -> Session: 86 | if not self.engine: 87 | self.engine = self._create_engine() 88 | if not self.session: 89 | Session = sessionmaker(bind=self.engine) 90 | self.session = Session() 91 | return self.session 92 | 93 | def close_session(self) -> None: 94 | if self.session: 95 | self.session.close() 96 | self.session = None 97 | if self.ssh_server: 98 | self.ssh_server.stop() 99 | self.ssh_server = None 100 | 101 | def execute_sql(self, sql: str) -> Any: 102 | session = self.get_session() 103 | try: 104 | if not is_sql_returning_result(sql): 105 | with session.begin(): 106 | session.execute(text(sql)) 107 | return None 108 | if sql.strip().lower().startswith("select"): 109 | result = session.execute(text(sql)) 110 | return result 111 | else: 112 | with session.begin(): 113 | result = session.execute(text(sql)) 114 | return result 115 | except Exception as e: 116 | print(Fore.RED + f"{e}" + Fore.RESET) 117 | print( 118 | Fore.RED 119 | + "\n\n=== Executed SQL ===\n" 120 | + f"{sql}" 121 | + "\n===========\n" 122 | + Fore.RESET 123 | ) 124 | if not sql.strip().lower().startswith("select"): 125 | session.rollback() 126 | raise RuntimeError(f"Error executing query: {e}") 127 | finally: 128 | self.close_session() 129 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DBType(Enum): 5 | POSTGRES = "postgres" 6 | MYSQL = "mysql" 7 | REDSHIFT = "redshift" 8 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/database/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | def is_sql_returning_result(sql: str) -> bool: 5 | if sql is None or sql == "": 6 | return False 7 | non_select_patterns = [ 8 | r"\bINSERT\b", 9 | r"\bUPDATE\b", 10 | r"\bDELETE\b", 11 | r"\bCREATE\b", 12 | r"\bALTER\b", 13 | r"\bDROP\b", 14 | r"\bTRUNCATE\b", 15 | r"\bSET\b", 16 | r"\bGRANT\b", 17 | r"\bREVOKE\b", 18 | ] 19 | for pattern in non_select_patterns: 20 | if re.search(pattern, sql, re.IGNORECASE): 21 | return False 22 | 23 | select_pattern = r"\bSELECT\b" 24 | if re.search(select_pattern, sql, re.IGNORECASE): 25 | return True 26 | 27 | return False 28 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/snowflake/types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Any, Dict, Generic, List, Optional, TypeVar, Union 3 | 4 | from pydantic import BaseModel, Field, field_validator 5 | 6 | T = TypeVar("T") 7 | 8 | 9 | class SnowflakeException(Exception): 10 | def __init__(self, message, code=None, sqlState=None, statementHandle=None): 11 | super().__init__(message) 12 | self.code = code 13 | self.sqlState = sqlState 14 | self.statementHandle = statementHandle 15 | 16 | 17 | class SnowflakeNetworkResponse(BaseModel, Generic[T]): 18 | data: T 19 | status: int 20 | 21 | 22 | class SnowflakeNetworkErrorResponse(BaseModel): 23 | code: str 24 | message: str 25 | sqlState: str 26 | statementHandle: str 27 | 28 | 29 | def is_snowflake_network_error(input: Any) -> bool: 30 | return isinstance(input, SnowflakeNetworkErrorResponse) or ( 31 | isinstance(input, dict) 32 | and "code" in input 33 | and "message" in input 34 | and "sqlState" in input 35 | and "statementHandle" in input 36 | ) 37 | 38 | 39 | class SnowflakeOAuthError(BaseModel): 40 | code: str 41 | message: str 42 | 43 | def __init__(self, **data): 44 | super().__init__(**data) 45 | if self.code != "390318": 46 | raise ValueError("code must be '390318'") 47 | 48 | 49 | def is_snowflake_oauth_error(input: Any) -> bool: 50 | return isinstance(input, SnowflakeOAuthError) or ( 51 | isinstance(input, dict) 52 | and "code" in input 53 | and input["code"] == "390318" 54 | and "message" in input 55 | and isinstance(input["message"], str) 56 | ) 57 | 58 | 59 | class SnowflakeRowTypeFieldType(str, Enum): 60 | NUMBER = "NUMBER" 61 | DECIMAL = "DECIMAL" 62 | NUMERIC = "NUMERIC" 63 | INT = "INT" 64 | INTEGER = "INTEGER" 65 | BIGINT = "BIGINT" 66 | SMALLINT = "SMALLINT" 67 | TINYINT = "TINYINT" 68 | BYTEINT = "BYTEINT" 69 | FLOAT = "FLOAT" 70 | FLOAT4 = "FLOAT4" 71 | FLOAT8 = "FLOAT8" 72 | FIXED = "FIXED" 73 | REAL = "REAL" 74 | DOUBLE = "DOUBLE" 75 | DOUBLE_PRECISION = "DOUBLE PRECISION" 76 | VARCHAR = "VARCHAR" 77 | CHAR = "CHAR" 78 | CHARACTER = "CHARACTER" 79 | STRING = "STRING" 80 | TEXT = "TEXT" 81 | BINARY = "BINARY" 82 | VARBINARY = "VARBINARY" 83 | BOOLEAN = "BOOLEAN" 84 | DATE = "DATE" 85 | DATETIME = "DATETIME" 86 | TIME = "TIME" 87 | TIMESTAMP = "TIMESTAMP" 88 | TIMESTAMP_LTZ = "TIMESTAMP_LTZ" 89 | TIMESTAMP_NTZ = "TIMESTAMP_NTZ" 90 | TIMESTAMP_TZ = "TIMESTAMP_TZ" 91 | VARIANT = "VARIANT" 92 | OBJECT = "OBJECT" 93 | ARRAY = "ARRAY" 94 | GEOGRAPHY = "GEOGRAPHY" 95 | 96 | 97 | class SnowflakeRowType(BaseModel): 98 | name: str 99 | database: str 100 | schema_: str = Field(..., alias="schema") 101 | table: str 102 | precision: Optional[int] 103 | byteLength: Optional[int] 104 | scale: Optional[int] 105 | type: SnowflakeRowTypeFieldType 106 | nullable: bool 107 | collation: Optional[str] 108 | length: Optional[int] 109 | 110 | @field_validator("type", mode="before") 111 | def convert_type_to_uppercase(cls, v): 112 | if isinstance(v, str): 113 | return v.upper() 114 | return v 115 | 116 | 117 | class PartitionInfo(BaseModel): 118 | rowCount: int 119 | uncompressedSize: int 120 | 121 | 122 | class ResultSetMetaData(BaseModel): 123 | numRows: int 124 | format: str 125 | partitionInfo: List[PartitionInfo] 126 | rowType: List[SnowflakeRowType] 127 | 128 | @classmethod 129 | def parse_obj(cls, data: Dict[str, Any]) -> "ResultSetMetaData": 130 | partitionInfo = [ 131 | PartitionInfo.model_validate(pi) for pi in data["partitionInfo"] 132 | ] 133 | rowType = [SnowflakeRowType.model_validate(rt) for rt in data["rowType"]] 134 | return cls( 135 | numRows=data["numRows"], 136 | format=data["format"], 137 | partitionInfo=partitionInfo, 138 | rowType=rowType, 139 | ) 140 | 141 | 142 | class SnowflakeExecuteSqlStatementsResponse(BaseModel): 143 | resultSetMetaData: ResultSetMetaData 144 | data: List[List[Union[str, int, float, bool, None]]] 145 | code: str 146 | statementStatusUrl: str 147 | requestId: str 148 | sqlState: str 149 | statementHandle: str 150 | message: str 151 | createdOn: int 152 | 153 | @classmethod 154 | def parse_obj(cls, data: Dict[str, Any]) -> "SnowflakeExecuteSqlStatementsResponse": 155 | resultSetMetaData = ResultSetMetaData.parse_obj(data["resultSetMetaData"]) 156 | return cls( 157 | resultSetMetaData=resultSetMetaData, 158 | data=data["data"], 159 | code=data["code"], 160 | statementStatusUrl=data["statementStatusUrl"], 161 | requestId=data["requestId"], 162 | sqlState=data["sqlState"], 163 | statementHandle=data["statementHandle"], 164 | message=data["message"], 165 | createdOn=data["createdOn"], 166 | ) 167 | 168 | 169 | class SnowflakeExecuteSqlImplResponse(BaseModel): 170 | data: SnowflakeExecuteSqlStatementsResponse 171 | status: int 172 | 173 | @classmethod 174 | def parse_obj(cls, data: Dict[str, Any]) -> "SnowflakeExecuteSqlImplResponse": 175 | data_ = SnowflakeExecuteSqlStatementsResponse.parse_obj(data["data"]) 176 | return cls(data=data_, status=data["status"]) 177 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/snowflake/usecase.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any, Dict, List, Optional, Tuple 3 | 4 | from morph.task.utils.connections.snowflake.api import SnowflakeApi 5 | from morph.task.utils.connections.snowflake.types import ( 6 | SnowflakeRowType, 7 | is_snowflake_network_error, 8 | is_snowflake_oauth_error, 9 | ) 10 | 11 | 12 | class SnowflakeUsecase: 13 | def __init__( 14 | self, 15 | account: str, 16 | access_token: str, 17 | database: str, 18 | schema: Optional[str] = None, 19 | warehouse: Optional[str] = None, 20 | role: Optional[str] = None, 21 | ): 22 | self.account = account 23 | self.access_token = access_token 24 | self.database = database 25 | self.schema = schema 26 | self.warehouse = warehouse 27 | self.role = role 28 | 29 | def query( 30 | self, 31 | sql: str, 32 | statement_handle: Optional[str] = None, 33 | max_partition_num: Optional[int] = None, 34 | next_token: Optional[str] = None, 35 | row_type: Optional[List[SnowflakeRowType]] = [], 36 | ) -> List[Dict[str, Any]]: 37 | def _query( 38 | _statement_handle: Optional[str] = None, 39 | _max_partition_num: Optional[int] = None, 40 | _next_token: Optional[str] = None, 41 | _rowType: Optional[List[SnowflakeRowType]] = [], 42 | ) -> Tuple[ 43 | List[Dict[str, Any]], 44 | Optional[str], 45 | Optional[int], 46 | Optional[str], 47 | Optional[List[SnowflakeRowType]], 48 | ]: 49 | __statement_handle = _statement_handle 50 | __max_partition_num = _max_partition_num 51 | __partition = 0 if _next_token is None else int(_next_token) 52 | __next_token = _next_token 53 | __data = None 54 | __row_type = _rowType 55 | 56 | if __statement_handle is not None: 57 | result = SnowflakeApi.get_sql_statements( 58 | account=self.account, 59 | access_token=self.access_token, 60 | statement_handle=__statement_handle, 61 | partition=__partition, 62 | ) 63 | if is_snowflake_network_error(result) or is_snowflake_oauth_error( 64 | result 65 | ): 66 | raise Exception(json.dumps(result)) 67 | __data = result["data"]["data"] # type: ignore 68 | __partition += 1 69 | if __max_partition_num and __partition < __max_partition_num: 70 | __next_token = str(__partition) 71 | else: 72 | __next_token = None 73 | else: 74 | result = SnowflakeApi.execute_sql( # type: ignore 75 | account=self.account, 76 | access_token=self.access_token, 77 | statement=sql, 78 | database=self.database, 79 | schema=self.schema, 80 | warehouse=self.warehouse, 81 | role=self.role, 82 | ) 83 | if is_snowflake_network_error(result) or is_snowflake_oauth_error( 84 | result 85 | ): 86 | raise Exception(json.dumps(result)) 87 | __statement_handle = result["statementHandle"] # type: ignore 88 | __max_partition_num = result["partitionNum"] # type: ignore 89 | __rowType = result["data"]["resultSetMetaData"]["rowType"] # type: ignore 90 | __data = result["data"]["data"] # type: ignore 91 | 92 | __partition += 1 93 | if __max_partition_num and __partition < __max_partition_num: 94 | __next_token = str(__partition) 95 | else: 96 | __next_token = None 97 | 98 | rows = [] 99 | for row in __data: 100 | obj = {} 101 | for i, t in enumerate(__rowType): 102 | obj[t["name"]] = row[i] 103 | rows.append(obj) 104 | return ( 105 | rows, 106 | __statement_handle, 107 | __max_partition_num, 108 | __next_token, 109 | __row_type, 110 | ) 111 | 112 | items = [] 113 | statement_handle_ = statement_handle 114 | max_partition_num_ = max_partition_num 115 | next_token_ = next_token 116 | row_type_ = row_type 117 | while True: 118 | ( 119 | rows__, 120 | statement_handle__, 121 | max_partition_num__, 122 | next_token__, 123 | row_type__, 124 | ) = _query(statement_handle_, max_partition_num_, next_token_, row_type_) 125 | items.extend(rows__) 126 | if statement_handle__ is not None and next_token__ is not None: 127 | statement_handle_ = statement_handle__ 128 | max_partition_num_ = max_partition_num__ 129 | next_token_ = next_token__ 130 | row_type_ = row_type__ 131 | else: 132 | break 133 | return items 134 | -------------------------------------------------------------------------------- /core/morph/task/utils/connections/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | def normalize_newlines(raw: str) -> str: 5 | normalized_str = re.sub(r"\\+n", "\n", raw) 6 | return normalized_str 7 | -------------------------------------------------------------------------------- /core/morph/task/utils/file_upload.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | class FileWithProgress: 5 | def __init__(self, file_path, pbar): 6 | self._file_path = file_path 7 | self._f = open(file_path, "rb") 8 | self._pbar = pbar 9 | 10 | def __len__(self): 11 | return os.path.getsize(self._file_path) 12 | 13 | def read(self, size=-1): 14 | """ 15 | Read up to size bytes from the object and update the progress bar. 16 | @param size: 17 | @return: 18 | """ 19 | data = self._f.read(size) 20 | if data: 21 | self._pbar.update(len(data)) 22 | return data 23 | 24 | def close(self): 25 | if not self._f.closed: 26 | self._f.close() 27 | 28 | def __iter__(self): 29 | """ 30 | Iterate over the file and update the progress bar. 31 | @return: 32 | """ 33 | for chunk in iter(lambda: self._f.read(1024 * 1024), b""): 34 | self._pbar.update(len(chunk)) 35 | yield chunk 36 | 37 | def __enter__(self): 38 | return self 39 | 40 | def __exit__(self, *args): 41 | self.close() 42 | -------------------------------------------------------------------------------- /core/morph/task/utils/load_dockerfile.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional, Tuple 2 | 3 | import requests 4 | 5 | 6 | def get_dockerfile_from_api( 7 | framework: str, 8 | provider: str, 9 | package_manager: Optional[str] = None, 10 | runtime: Optional[str] = None, 11 | ) -> Tuple[str, str]: 12 | """ 13 | Fetch dockerfile and dockerignore from the Morph API. 14 | 15 | Args: 16 | framework: The framework to get the dockerfile for 17 | provider: The provider to get the dockerfile for 18 | package_manager: Optional package manager to use 19 | runtime: Optional runtime to use 20 | 21 | Returns: 22 | Tuple containing (dockerfile, dockerignore) 23 | """ 24 | url = f"https://dockerfile-template.morph-cb9.workers.dev/dockerfile/{framework}" 25 | 26 | params: Dict[str, Any] = { 27 | "provider": provider, 28 | } 29 | if package_manager: 30 | params["packageManager"] = package_manager 31 | if runtime: 32 | params["runtime"] = runtime 33 | 34 | response = requests.get(url, params=params) 35 | 36 | response.raise_for_status() 37 | 38 | data = response.json() 39 | 40 | if "error" in data: 41 | raise ValueError(data["error"]) 42 | 43 | return data["dockerfile"], data["dockerignore"] 44 | -------------------------------------------------------------------------------- /core/morph/task/utils/logging.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from contextlib import asynccontextmanager, contextmanager 4 | 5 | import colorlog 6 | 7 | 8 | class LoggerStream: 9 | def __init__(self, logger, level): 10 | self.logger = logger 11 | self.level = level 12 | self.line_buffer = "" 13 | 14 | def write(self, message): 15 | self.line_buffer += message 16 | while "\n" in self.line_buffer: 17 | line, self.line_buffer = self.line_buffer.split("\n", 1) 18 | self.logger.log(self.level, line.strip()) 19 | 20 | def flush(self): 21 | if self.line_buffer: 22 | self.logger.log(self.level, self.line_buffer.strip()) 23 | self.line_buffer = "" 24 | 25 | 26 | @contextmanager 27 | def redirect_stdout_to_logger(logger, level): 28 | original_stdout = sys.stdout 29 | sys.stdout = LoggerStream(logger, level) # type: ignore 30 | try: 31 | yield 32 | finally: 33 | sys.stdout.flush() 34 | sys.stdout = original_stdout 35 | 36 | 37 | @asynccontextmanager 38 | async def redirect_stdout_to_logger_async(logger, level=logging.INFO): 39 | with redirect_stdout_to_logger(logger, level): 40 | yield 41 | 42 | 43 | def get_morph_logger() -> logging.Logger: 44 | logger = logging.getLogger("morph_logger") 45 | 46 | if not logger.hasHandlers(): 47 | logger.setLevel(logging.DEBUG) 48 | 49 | # Console handler with color formatting 50 | console_handler = colorlog.StreamHandler() 51 | console_handler.setLevel(logging.DEBUG) 52 | console_formatter = colorlog.ColoredFormatter( 53 | "%(log_color)s%(asctime)s [%(levelname)s] %(message)s", 54 | log_colors={ 55 | "DEBUG": "white", 56 | "INFO": "cyan", 57 | "WARNING": "yellow", 58 | "ERROR": "red", 59 | "CRITICAL": "bold_red", 60 | }, 61 | ) 62 | console_handler.setFormatter(console_formatter) 63 | logger.addHandler(console_handler) 64 | 65 | return logger 66 | -------------------------------------------------------------------------------- /core/morph/task/utils/morph.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import logging 3 | import os 4 | import re 5 | from pathlib import Path 6 | from typing import List, Optional, Union 7 | 8 | from morph.constants import MorphConstant 9 | from pydantic import BaseModel 10 | 11 | IGNORE_DIRS = ["/private/tmp", "/tmp"] 12 | 13 | 14 | def find_project_root_dir(abs_filepath: Optional[str] = None) -> str: 15 | current_dir = ( 16 | abs_filepath if abs_filepath and os.path.isabs(abs_filepath) else os.getcwd() 17 | ) 18 | 19 | for ignore_dir in IGNORE_DIRS: 20 | if ignore_dir in current_dir: 21 | current_dir = os.getcwd() 22 | 23 | project_yaml_files = ["morph_project.yml", "morph_project.yaml"] 24 | while current_dir != os.path.dirname(current_dir): 25 | for project_yaml_file in project_yaml_files: 26 | if os.path.isfile(os.path.join(current_dir, project_yaml_file)): 27 | return os.path.abspath(current_dir) 28 | current_dir = os.path.dirname(current_dir) 29 | 30 | morph_project_path = os.path.join(Path.home(), "morph_project.yml") 31 | if os.path.isfile(morph_project_path): 32 | return os.path.abspath(os.path.dirname(morph_project_path)) 33 | morph_project_path = os.path.join(Path.home(), "morph_project.yaml") 34 | if os.path.isfile(morph_project_path): 35 | return os.path.abspath(os.path.dirname(morph_project_path)) 36 | 37 | raise FileNotFoundError( 38 | "morph_project.yml not found in the current directory or any parent directories." 39 | ) 40 | 41 | 42 | class Resource(BaseModel): 43 | alias: str 44 | path: str 45 | connection: Optional[str] = None 46 | output_paths: Optional[List[str]] = None 47 | public: Optional[bool] = None 48 | data_requirements: Optional[List[str]] = None 49 | 50 | def __init__( 51 | self, 52 | alias: str, 53 | path: str, 54 | connection: Optional[str] = None, 55 | output_paths: Optional[List[str]] = None, 56 | public: Optional[bool] = None, 57 | data_requirements: Optional[List[str]] = None, 58 | ): 59 | super().__init__( 60 | alias=alias, 61 | path=path, 62 | connection=connection, 63 | output_paths=output_paths, 64 | public=public, 65 | data_requirements=data_requirements, 66 | ) 67 | 68 | # Add attributes for executable files 69 | ext = os.path.splitext(path)[1] 70 | if ext in MorphConstant.EXECUTABLE_EXTENSIONS: 71 | self.connection = connection 72 | self.output_paths = output_paths 73 | else: 74 | self.connection = None 75 | self.output_paths = None 76 | 77 | @staticmethod 78 | def _write_output_file( 79 | output_file: str, 80 | output: Union[str, bytes], 81 | ) -> None: 82 | if not os.path.exists(os.path.dirname(output_file)): 83 | os.makedirs(os.path.dirname(output_file)) 84 | 85 | if os.path.exists(output_file) and ( 86 | output_file.startswith(MorphConstant.TMP_MORPH_DIR) 87 | or output_file.startswith("/private/tmp") 88 | ): 89 | os.unlink(output_file) 90 | 91 | mode = "wb" if isinstance(output, bytes) else "w" 92 | with open(output_file, mode) as f: 93 | f.write(output or "") 94 | 95 | def save_output_to_file( 96 | self, 97 | output: Union[str, bytes, List[Union[str, bytes]]], 98 | logger: logging.Logger = logging.getLogger(), 99 | ) -> "Resource": 100 | processed_output_paths = [] 101 | 102 | for original_output_path in self.output_paths or []: 103 | output_files = [original_output_path] 104 | for output_file in output_files: 105 | if isinstance(output, list): 106 | # For multiple outputs, HTML and PNG outputs are saved as files 107 | for raw_output in output: 108 | should_save_as_html = output_file.endswith(".html") 109 | should_save_as_png = output_file.endswith(".png") 110 | 111 | is_html_encoded = ( 112 | isinstance(raw_output, str) 113 | and re.compile(r"<[^>]+>").search(raw_output) is not None 114 | ) 115 | if should_save_as_html and not is_html_encoded: 116 | continue 117 | 118 | is_base64_encoded = ( 119 | isinstance(raw_output, str) 120 | and re.match(r"^[A-Za-z0-9+/=]*$", raw_output) is not None 121 | ) 122 | if should_save_as_png and not is_base64_encoded: 123 | continue 124 | 125 | if should_save_as_png: 126 | base64.b64decode(raw_output, validate=True) 127 | raw_output = base64.b64decode(raw_output) 128 | 129 | self._write_output_file(output_file, raw_output) 130 | processed_output_paths.append(output_file) 131 | logger.info( 132 | f"Output was saved to: {str(Path(output_file).resolve())}" 133 | ) 134 | else: 135 | self._write_output_file(output_file, output) 136 | processed_output_paths.append(output_file) 137 | logger.info( 138 | f"Output was saved to: {str(Path(output_file).resolve())}" 139 | ) 140 | 141 | self.output_paths = processed_output_paths 142 | return self 143 | -------------------------------------------------------------------------------- /core/morph/task/utils/run_backend/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph/task/utils/run_backend/__init__.py -------------------------------------------------------------------------------- /core/morph/task/utils/run_backend/cache.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from typing import List 3 | 4 | 5 | class ExecutionCache: 6 | """ 7 | A class to maintain TTL-based caching of run results. 8 | If expiration_seconds == 0, caching is disabled (no cache entries are stored or retrieved). 9 | """ 10 | 11 | def __init__(self, expiration_seconds: int = 0): 12 | """ 13 | Initialize the execution cache. 14 | 15 | :param expiration_seconds: The number of seconds after which a cache entry expires. 16 | If set to 0, caching is disabled. 17 | """ 18 | self.cache: dict[str, dict[str, object]] = {} 19 | self.expiration_seconds = expiration_seconds 20 | 21 | def update_cache(self, function_name: str, cache_paths: List[str]) -> None: 22 | """ 23 | Update or add an entry to the cache. Replaces existing cache paths with the provided ones. 24 | If expiration_seconds == 0, do nothing (caching is disabled). 25 | """ 26 | if self.expiration_seconds == 0: 27 | return # Skip storing anything 28 | 29 | current_time = datetime.now().isoformat() 30 | cache_entry = self.cache.get(function_name) 31 | 32 | # Check if an existing cache entry is still valid 33 | if cache_entry: 34 | last_executed_at = cache_entry.get("last_executed_at") 35 | if isinstance(last_executed_at, str): 36 | last_executed_time = datetime.fromisoformat(last_executed_at) 37 | if datetime.now() - last_executed_time <= timedelta( 38 | seconds=self.expiration_seconds 39 | ): 40 | # Cache is still valid, only update the cache paths 41 | cache_entry["cache_paths"] = cache_paths 42 | return 43 | 44 | # Either no cache entry exists, or the cache has expired 45 | self.cache[function_name] = { 46 | "last_executed_at": current_time, 47 | "cache_paths": cache_paths, 48 | } 49 | 50 | def clear_cache(self, function_name: str) -> None: 51 | """ 52 | Remove the cache entry for a specific function. 53 | """ 54 | if function_name in self.cache: 55 | del self.cache[function_name] 56 | 57 | def clear_all_cache(self) -> None: 58 | """ 59 | Clear all cache entries. 60 | """ 61 | self.cache.clear() 62 | 63 | def list_all_cache(self) -> List[str]: 64 | """ 65 | List all function names currently cached. 66 | """ 67 | return list(self.cache.keys()) 68 | -------------------------------------------------------------------------------- /core/morph/task/utils/run_backend/decorators.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import inspect 4 | from functools import wraps 5 | from typing import Any, Callable, List, Literal, Optional, TypeVar 6 | 7 | from typing_extensions import ParamSpec 8 | 9 | from .state import MorphFunctionMetaObject, MorphGlobalContext 10 | 11 | Param = ParamSpec("Param") 12 | RetType = TypeVar("RetType") 13 | F = TypeVar("F", bound=Callable) 14 | 15 | 16 | def _get_morph_function_id(func: Callable) -> str: 17 | if hasattr(func, "__morph_fid__"): 18 | return str(func.__morph_fid__) 19 | else: 20 | filename = inspect.getfile(func) 21 | function_name = func.__name__ 22 | new_fid = f"{filename}:{function_name}" 23 | func.__morph_fid__ = new_fid # type: ignore 24 | return new_fid 25 | 26 | 27 | def func( 28 | name: str | None = None, 29 | description: str | None = None, 30 | alias: str | None = None, 31 | **kwargs: dict[str, Any], 32 | ) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]: 33 | name = alias or name 34 | 35 | context = MorphGlobalContext.get_instance() 36 | 37 | def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]: 38 | fid = _get_morph_function_id(func) 39 | 40 | variables = kwargs.get("variables", {}) 41 | 42 | data_req_value = kwargs.get("data_requirements", []) # type: ignore 43 | data_requirements: List[str] = ( 44 | data_req_value if isinstance(data_req_value, list) else [] 45 | ) 46 | 47 | connection = kwargs.get("connection") 48 | if not isinstance(connection, (str, type(None))): 49 | connection = None 50 | 51 | meta_obj = MorphFunctionMetaObject( 52 | id=fid, 53 | name=name or func.__name__, 54 | function=func, 55 | description=description, 56 | variables=variables, 57 | data_requirements=data_requirements, 58 | connection=connection, 59 | ) 60 | context.update_meta_object(fid, meta_obj) 61 | 62 | @wraps(func) 63 | def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType: 64 | return func(*args, **kwargs) 65 | 66 | return wrapper 67 | 68 | # check if decorator is called with args 69 | if callable(name): 70 | func = name # type: ignore 71 | name = func.__name__ 72 | description = None 73 | return decorator(func) 74 | 75 | return decorator 76 | 77 | 78 | def variables( 79 | var_name: str, 80 | default: Optional[Any] = None, 81 | required: Optional[bool] = False, 82 | type: Optional[Literal["str", "bool", "int", "float", "dict", "list"]] = None, 83 | ) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]: 84 | """ 85 | variables 86 | { 87 | "var_name": { 88 | "default": default, 89 | "required": required, 90 | "type": type, 91 | } 92 | } 93 | """ 94 | context = MorphGlobalContext.get_instance() 95 | 96 | def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]: 97 | fid = _get_morph_function_id(func) 98 | meta = context.search_meta_object(fid) 99 | if meta and meta.variables: 100 | context.update_meta_object( 101 | fid, 102 | MorphFunctionMetaObject( 103 | id=fid, 104 | name=meta.name, 105 | function=meta.function, 106 | description=meta.description, 107 | title=meta.title, 108 | variables={ 109 | **meta.variables, 110 | **{ 111 | var_name: { 112 | "default": default, 113 | "required": required, 114 | "type": type, 115 | } 116 | }, 117 | }, 118 | data_requirements=meta.data_requirements, 119 | connection=meta.connection, 120 | ), 121 | ) 122 | else: 123 | context.update_meta_object( 124 | fid, 125 | MorphFunctionMetaObject( 126 | id=fid, 127 | name=func.__name__, 128 | function=func, 129 | description=None, 130 | title=None, 131 | variables={ 132 | var_name: { 133 | "default": default, 134 | "required": required, 135 | "type": type, 136 | } 137 | }, 138 | data_requirements=None, 139 | connection=None, 140 | ), 141 | ) 142 | 143 | @wraps(func) 144 | def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType: 145 | return func(*args, **kwargs) 146 | 147 | return wrapper 148 | 149 | return decorator 150 | 151 | 152 | def load_data( 153 | name: str, 154 | ) -> Callable[[Callable[Param, RetType]], Callable[Param, RetType]]: 155 | context = MorphGlobalContext.get_instance() 156 | 157 | def decorator(func: Callable[Param, RetType]) -> Callable[Param, RetType]: 158 | fid = _get_morph_function_id(func) 159 | meta = context.search_meta_object(fid) 160 | if meta and meta.data_requirements: 161 | context.update_meta_object( 162 | fid, 163 | MorphFunctionMetaObject( 164 | id=fid, 165 | name=meta.name, 166 | function=meta.function, 167 | description=meta.description, 168 | title=meta.title, 169 | variables=meta.variables, 170 | data_requirements=meta.data_requirements + [name], 171 | connection=meta.connection, 172 | ), 173 | ) 174 | else: 175 | context.update_meta_object( 176 | fid, 177 | MorphFunctionMetaObject( 178 | id=fid, 179 | name=func.__name__, 180 | function=func, 181 | description=None, 182 | title=None, 183 | variables=None, 184 | data_requirements=[name], 185 | connection=None, 186 | ), 187 | ) 188 | 189 | @wraps(func) 190 | def wrapper(*args: Param.args, **kwargs: Param.kwargs) -> RetType: 191 | return func(*args, **kwargs) 192 | 193 | return wrapper 194 | 195 | return decorator 196 | -------------------------------------------------------------------------------- /core/morph/task/utils/run_backend/errors.py: -------------------------------------------------------------------------------- 1 | import linecache 2 | import traceback 3 | from enum import Enum 4 | from typing import List 5 | 6 | from colorama import Fore 7 | from pydantic import BaseModel 8 | 9 | 10 | class MorphFunctionLoadErrorCategory(str, Enum): 11 | IMPORT_ERROR = "IMPORT_ERROR" 12 | DUPLICATED_ALIAS = "DUPLICATED_ALIAS" 13 | MISSING_ALIAS = "MISSING_ALIAS" 14 | CYCLIC_ALIAS = "CYCLIC_ALIAS" 15 | INVALID_SYNTAX = "INVALID_SYNTAX" 16 | 17 | 18 | class MorphFunctionLoadError(BaseModel): 19 | category: MorphFunctionLoadErrorCategory 20 | file_path: str 21 | name: str 22 | error: str 23 | 24 | @staticmethod 25 | def format_errors(errors: List["MorphFunctionLoadError"]) -> str: 26 | error_txt = [ 27 | "BOOM!💣 Failed to compile file before executing. No log data is saved in case of compilation errors. Please resolve the errors and try again.🔧\n" 28 | ] 29 | for i, error in enumerate(errors): 30 | error_txt.append( 31 | f"""{Fore.RED}[ERROR No.{i+1}] 32 | {Fore.RED}[error]: {error.category.value} 33 | {Fore.RED}[name]: {error.name} 34 | {Fore.RED}[filepath]: {error.file_path} 35 | {Fore.RED}[detail]: {error.error}""" 36 | ) 37 | return "\n".join(error_txt) 38 | 39 | 40 | def logging_file_error_exception(exc: BaseException, target_file: str) -> str: 41 | tb = exc.__traceback__ 42 | filtered_traceback = [] 43 | error_txt = [] 44 | 45 | error_txt.append(f"{type(exc).__name__}: {str(exc)}\n") 46 | 47 | while tb is not None: 48 | frame = tb.tb_frame 49 | code = frame.f_code 50 | if target_file in code.co_filename: 51 | filtered_traceback.append( 52 | { 53 | "filename": code.co_filename, 54 | "lineno": tb.tb_lineno, 55 | "name": code.co_name, 56 | "line": linecache.getline(code.co_filename, tb.tb_lineno).strip(), 57 | } 58 | ) 59 | tb = tb.tb_next 60 | 61 | for entry in filtered_traceback: 62 | error_txt.append( 63 | f'File "{entry["filename"]}", line {entry["lineno"]}, in {entry["name"]}\n' 64 | f' {entry["line"]}\n' 65 | ) 66 | 67 | error_txt.append("\nFull traceback:\n") 68 | error_txt.append("".join(traceback.format_tb(exc.__traceback__))) 69 | 70 | return "".join(error_txt) 71 | -------------------------------------------------------------------------------- /core/morph/task/utils/run_backend/types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List, Literal, Optional, Union 3 | 4 | from pydantic import BaseModel 5 | 6 | 7 | class RunStatus(str, Enum): 8 | DONE = "done" 9 | TIMEOUT = "timeout" 10 | IN_PROGRESS = "inProgress" 11 | FAILED = "failed" 12 | 13 | 14 | class StackTraceFrame(BaseModel): 15 | filename: str 16 | lineno: Optional[int] = None 17 | name: str 18 | line: Optional[str] = None 19 | 20 | 21 | class PythonError(BaseModel): 22 | type: str 23 | message: str 24 | code: str 25 | stacktrace: str 26 | structured_stacktrace: List[StackTraceFrame] 27 | 28 | 29 | GeneralError = str 30 | 31 | 32 | class CliError(BaseModel): 33 | type: Literal["python", "general"] 34 | details: Union[PythonError, GeneralError] 35 | -------------------------------------------------------------------------------- /core/morph_lib/__init__.py: -------------------------------------------------------------------------------- 1 | from .database import execute_sql 2 | 3 | __all__ = ["execute_sql"] 4 | -------------------------------------------------------------------------------- /core/morph_lib/api.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import cast 4 | 5 | from morph_lib.error import MorphApiError 6 | 7 | from morph.api.cloud.client import MorphApiClient, MorphApiKeyClientImpl 8 | from morph.task.utils.connection import ( 9 | AirtableConnectionOAuth, 10 | AttioConnectionOAuth, 11 | BigqueryConnectionOAuth, 12 | ConnectionYaml, 13 | FreeeConnectionOAuth, 14 | GoogleAnalyticsConnectionOAuth, 15 | HubspotConnectionOAuth, 16 | IntercomConnectionOAuth, 17 | LinearConnectionOAuth, 18 | MailchimpConnectionOAuth, 19 | NotionConnectionOAuth, 20 | SalesforceConnectionOAuth, 21 | StripeConnectionOAuth, 22 | ) 23 | 24 | # =============================================== 25 | # 26 | # Functions 27 | # 28 | # =============================================== 29 | 30 | 31 | def get_auth_token(connection_slug: str) -> str: 32 | """ 33 | Get and refresh the authentication token from environment variables. 34 | Make sure to set the environment variables before calling this function. 35 | @param: connection_slug: The connection slug on morph app 36 | """ 37 | connection_yaml = ConnectionYaml.load_yaml() 38 | database_connection = ConnectionYaml.find_connection( 39 | connection_yaml, connection_slug 40 | ) 41 | 42 | if database_connection is not None and ( 43 | isinstance(database_connection, BigqueryConnectionOAuth) 44 | or isinstance(database_connection, GoogleAnalyticsConnectionOAuth) 45 | or isinstance(database_connection, SalesforceConnectionOAuth) 46 | or isinstance(database_connection, NotionConnectionOAuth) 47 | or isinstance(database_connection, StripeConnectionOAuth) 48 | or isinstance(database_connection, AttioConnectionOAuth) 49 | or isinstance(database_connection, AirtableConnectionOAuth) 50 | or isinstance(database_connection, FreeeConnectionOAuth) 51 | or isinstance(database_connection, HubspotConnectionOAuth) 52 | or isinstance(database_connection, IntercomConnectionOAuth) 53 | or isinstance(database_connection, LinearConnectionOAuth) 54 | or isinstance(database_connection, MailchimpConnectionOAuth) 55 | ): 56 | return database_connection.access_token or "" 57 | 58 | client = MorphApiClient(MorphApiKeyClientImpl) 59 | response = client.req.find_external_connection(connection_slug) 60 | if response.is_error(): 61 | raise MorphApiError(f"Failed to get auth token. {response.text}") 62 | 63 | response_json = response.json() 64 | if ( 65 | response_json["connectionType"] == "mysql" 66 | or response_json["connectionType"] == "postgres" 67 | or response_json["connectionType"] == "redshift" 68 | or response_json["connectionType"] == "mssql" 69 | ): 70 | raise MorphApiError(f"No auth token in db connection {connection_slug}") 71 | elif ( 72 | "accessToken" not in response_json["data"] 73 | or response_json["data"]["accessToken"] is None 74 | ): 75 | raise MorphApiError("Failed to get auth token") 76 | 77 | return cast(str, response_json["data"]["accessToken"]) 78 | -------------------------------------------------------------------------------- /core/morph_lib/error.py: -------------------------------------------------------------------------------- 1 | class MorphApiError(Exception): 2 | pass 3 | 4 | 5 | class RequestError(Exception): 6 | pass 7 | -------------------------------------------------------------------------------- /core/morph_lib/function.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Dict, Optional 4 | 5 | from morph_lib.error import MorphApiError 6 | 7 | from morph.config.project import load_project 8 | from morph.task.utils.logging import get_morph_logger 9 | from morph.task.utils.morph import find_project_root_dir 10 | from morph.task.utils.run_backend.execution import run_cell 11 | from morph.task.utils.run_backend.state import ( 12 | MorphFunctionMetaObjectCacheManager, 13 | MorphGlobalContext, 14 | ) 15 | 16 | 17 | def load_data(alias: str, variables: Optional[Dict[str, Any]] = None) -> Any: 18 | """ 19 | Get execution result of the alias. 20 | """ 21 | project_root = find_project_root_dir() 22 | project = load_project(project_root) 23 | if not project: 24 | raise MorphApiError("Project configuration not found.") 25 | 26 | context = MorphGlobalContext.get_instance() 27 | context.partial_load(project_root, alias) 28 | 29 | resource = context.search_meta_object_by_name(alias) 30 | if not resource: 31 | raise MorphApiError(f"Resource {alias} not found.") 32 | 33 | meta_obj_cache = MorphFunctionMetaObjectCacheManager().get_cache() 34 | 35 | vars = variables or {} 36 | logger = get_morph_logger() 37 | 38 | return run_cell( 39 | project, 40 | resource, 41 | vars, 42 | logger, 43 | None, 44 | meta_obj_cache, 45 | ).result 46 | -------------------------------------------------------------------------------- /core/morph_lib/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph_lib/py.typed -------------------------------------------------------------------------------- /core/morph_lib/stream.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from morph_lib.types import MorphChatStreamChunk 4 | 5 | 6 | def create_chunk( 7 | text: Optional[str] = None, content: Optional[str] = None 8 | ) -> MorphChatStreamChunk: 9 | """ 10 | Create a MorphChatStreamChunk object with the given text and content. 11 | @param text: The text of the chunk. 12 | @param content: The additional content of the chunk. ex.) html, markdown, etc. 13 | """ 14 | return MorphChatStreamChunk( 15 | text=text, 16 | content=content, 17 | ) 18 | 19 | 20 | def stream_chat(text: Optional[str] = None) -> MorphChatStreamChunk: 21 | """ 22 | Create a MorphChatStreamChunk object with the given text and content. 23 | @param text: The text of the chunk. 24 | @param content: The additional content of the chunk. ex.) html, markdown, etc. 25 | """ 26 | return MorphChatStreamChunk(text=text, content=None) 27 | -------------------------------------------------------------------------------- /core/morph_lib/types.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from pydantic import BaseModel, Field 4 | 5 | 6 | class HtmlResponse(BaseModel): 7 | value: str 8 | 9 | def __init__(self, value: str): 10 | super().__init__(value=value) 11 | 12 | 13 | class MarkdownResponse(BaseModel): 14 | value: str 15 | 16 | def __init__(self, value: str): 17 | super().__init__(value=value) 18 | 19 | 20 | class ImageResponse(BaseModel): 21 | value: str 22 | 23 | def __init__(self, value: str): 24 | super().__init__(value=value) 25 | 26 | 27 | class MorphChatStreamChunk(BaseModel): 28 | text: Optional[str] = Field(default="") 29 | content: Optional[str] = Field(default="") 30 | 31 | @staticmethod 32 | def is_chat_stream_chunk_json(data: Dict[str, Any]) -> bool: 33 | return ( 34 | isinstance(data, dict) 35 | and "data" in data 36 | and isinstance(data["data"], list) 37 | and all("text" in item and "content" in item for item in data["data"]) 38 | ) 39 | -------------------------------------------------------------------------------- /core/morph_lib/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/morph-data/morph/876d78e7c1b72d53d66e08197b5c5f02f389d2be/core/morph_lib/utils/__init__.py -------------------------------------------------------------------------------- /core/morph_lib/utils/db_connector.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Union 2 | 3 | from morph.task.utils.connection import ( 4 | MysqlConnection, 5 | PostgresqlConnection, 6 | RedshiftConnection, 7 | SQLServerConnection, 8 | ) 9 | from morph.task.utils.connections.database.mssql import SQLServerConnector 10 | from morph.task.utils.connections.database.mysql import MysqlConnector 11 | from morph.task.utils.connections.database.postgres import PostgresqlConnector 12 | from morph.task.utils.connections.database.redshift import RedshiftConnector 13 | 14 | 15 | class DBConnector: 16 | """ 17 | Database connector implements sqlalchemy connection to the database 18 | Available mysql, postgres, redshift connection 19 | @param connection: Connection object from `get_service_connection` method or dict params 20 | """ 21 | 22 | def __init__( 23 | self, 24 | connection: Union[ 25 | PostgresqlConnection, 26 | MysqlConnection, 27 | SQLServerConnection, 28 | RedshiftConnection, 29 | Dict[str, Any], 30 | ], 31 | ): 32 | if isinstance(connection, dict): 33 | connection_type = connection.get("type") 34 | if connection_type == "mysql": 35 | connection = MysqlConnection(**connection) 36 | elif connection_type == "postgres": 37 | connection = PostgresqlConnection(**connection) 38 | elif connection_type == "mssql": 39 | connection = SQLServerConnection(**connection) 40 | elif connection_type == "redshift": 41 | connection = RedshiftConnection(**connection) 42 | else: 43 | raise ValueError(f"Invalid connection type: {connection_type}") 44 | self.connection = connection 45 | 46 | def execute_sql(self, sql: str) -> Any: 47 | """ 48 | Execute sql query on the specified database 49 | """ 50 | if isinstance(self.connection, PostgresqlConnection): 51 | pg_connection = PostgresqlConnector( 52 | self.connection, 53 | use_ssh=self.connection.ssh_host is not None 54 | and self.connection.ssh_host != "", 55 | ) 56 | return pg_connection.execute_sql(sql) 57 | elif isinstance(self.connection, MysqlConnection): 58 | mysql_connection = MysqlConnector( 59 | self.connection, 60 | use_ssh=self.connection.ssh_host is not None 61 | and self.connection.ssh_host != "", 62 | ) 63 | return mysql_connection.execute_sql(sql) 64 | elif isinstance(self.connection, SQLServerConnection): 65 | mssql_connection = SQLServerConnector( 66 | self.connection, 67 | use_ssh=self.connection.ssh_host is not None 68 | and self.connection.ssh_host != "", 69 | ) 70 | return mssql_connection.execute_sql(sql) 71 | elif isinstance(self.connection, RedshiftConnection): 72 | redshift_connection = RedshiftConnector(self.connection) 73 | return redshift_connection.execute_sql(sql) 74 | else: 75 | raise ValueError("Invalid connection type") 76 | 77 | def get_connection( 78 | self, 79 | ) -> Union[ 80 | PostgresqlConnector, MysqlConnector, SQLServerConnector, RedshiftConnector 81 | ]: 82 | """ 83 | Get db connection object 84 | """ 85 | if isinstance(self.connection, PostgresqlConnection): 86 | pg_connection = PostgresqlConnector( 87 | self.connection, 88 | use_ssh=self.connection.ssh_host is not None 89 | and self.connection.ssh_host != "", 90 | ) 91 | return pg_connection 92 | elif isinstance(self.connection, MysqlConnection): 93 | mysql_connection = MysqlConnector( 94 | self.connection, 95 | use_ssh=self.connection.ssh_host is not None 96 | and self.connection.ssh_host != "", 97 | ) 98 | return mysql_connection 99 | elif isinstance(self.connection, SQLServerConnection): 100 | mssql_connection = SQLServerConnector( 101 | self.connection, 102 | use_ssh=self.connection.ssh_host is not None 103 | and self.connection.ssh_host != "", 104 | ) 105 | return mssql_connection 106 | elif isinstance(self.connection, RedshiftConnection): 107 | redshift_connection = RedshiftConnector(self.connection) 108 | return redshift_connection 109 | else: 110 | raise ValueError("Invalid connection type") 111 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | namespace_packages = True 4 | check_untyped_defs = True 5 | disallow_any_unimported = False 6 | disallow_any_expr = False 7 | disallow_any_decorated = False 8 | disallow_any_explicit = False 9 | disallow_any_generics = False 10 | disallow_incomplete_defs = True 11 | disallow_subclassing_any = True 12 | warn_return_any = True 13 | warn_unused_ignores = True 14 | warn_unused_configs = True 15 | warn_redundant_casts = True 16 | warn_unreachable = True 17 | warn_incomplete_stub = True 18 | strict_optional = True 19 | no_implicit_optional = True 20 | show_error_context = True 21 | show_column_numbers = True 22 | show_error_codes = True 23 | pretty = True 24 | incremental = False 25 | follow_imports = silent 26 | 27 | [mypy-paramiko] 28 | ignore_missing_imports = True 29 | 30 | [mypy-test.*] 31 | ignore_errors = False 32 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "morph-data" 3 | version = "0.3.1" 4 | description = "Morph is a python-centric full-stack framework for building and deploying data apps." 5 | authors = ["Morph "] 6 | packages = [ 7 | { include = "morph", from = "core" }, 8 | { include = "morph_lib", from = "core" } 9 | ] 10 | license = "Apache License 2.0" 11 | repository = "https://github.com/morph-data/morph" 12 | homepage = "https://www.morph-data.io" 13 | readme = "README.md" 14 | classifiers=[ 15 | "Development Status :: 5 - Production/Stable", 16 | "Environment :: Console", 17 | "Environment :: Web Environment", 18 | "Intended Audience :: Developers", 19 | "Intended Audience :: Science/Research", 20 | "License :: OSI Approved :: Apache Software License", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | "Topic :: Database :: Front-Ends", 27 | "Topic :: Office/Business :: Financial :: Spreadsheet", 28 | "Topic :: Scientific/Engineering :: Information Analysis", 29 | "Topic :: Scientific/Engineering :: Visualization", 30 | "Topic :: Software Development :: Libraries :: Application Frameworks", 31 | "Topic :: Software Development :: Widget Sets", 32 | ] 33 | 34 | [tool.poetry.dependencies] 35 | python = ">=3.9.2,<3.13" 36 | click = "^8.1.7" 37 | pandas = "^2.1.3" 38 | requests = "^2.31.0" 39 | boto3 = "^1.26.80" 40 | pydantic = "^2.5.3" 41 | pyyaml = "^6.0.1" 42 | python-dotenv = "^1.0.1" 43 | colorlog = "^6.8.2" 44 | sqlalchemy = "^2.0.36" 45 | sshtunnel = "^0.4.0" 46 | cryptography = "^44.0.1" 47 | snowflake-connector-python = "^3.11.0" 48 | aiomysql = "^0.2.0" 49 | paramiko = "^3.4.0" 50 | psycopg2-binary = "^2.9.9" 51 | google = "^3.0.0" 52 | gspread = "^6.1.2" 53 | google-api-python-client = "^2.139.0" 54 | google-cloud-bigquery = "^3.25.0" 55 | duckdb = "^1.0.0" 56 | Jinja2 = "^3.1.6" 57 | pyarrow = "^17.0.0" 58 | sqlglot = "^25.22.0" 59 | colorama = "^0.4.6" 60 | fastapi = "^0.115.4" 61 | uvicorn = "^0.32.0" 62 | typing-extensions = "^4.12.2" 63 | python-multipart = "^0.0.18" 64 | fastapi-inertia = "^1.0.3" 65 | itsdangerous = "^2.2.0" 66 | starlette = "^0.41.3" 67 | sqlalchemy-pytds = "^1.0.2" 68 | tabulate = "^0.9.0" 69 | httpx = "^0.27.0" 70 | tqdm = ">=4.7.4,<5.0.0" 71 | 72 | [tool.mypy] 73 | exclude = ["^.*\b(example)\b.*$"] 74 | 75 | [tool.poetry.group.dev.dependencies] 76 | types-requests = "^2.28.11.13" 77 | pytest = "^7.4.4" 78 | flake8 = "^5.0.4" 79 | mypy = "^1.8.0" 80 | pre-commit = "^3.6.0" 81 | types-pytz = "^2023.3.1.1" 82 | pytest-asyncio = "^0.23.3" 83 | pydantic = {extras = ["mypy"], version = "^2.8.2"} 84 | pandas-stubs = "^2.2.2.240603" 85 | types-pyyaml = "^6.0.12.20240311" 86 | types-paramiko = "^3.4.0.20240423" 87 | types-tabulate = "^0.9.0.20241207" 88 | 89 | [tool.poetry.scripts] 90 | morph = "morph.cli.main:cli" 91 | 92 | [build-system] 93 | requires = ["poetry-core"] 94 | build-backend = "poetry.core.masonry.api" 95 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_mode=auto 3 | pythonpath=. 4 | testpaths=testUnknown config option: asyncio_mode 5 | --------------------------------------------------------------------------------