├── .github ├── ISSUE_TEMPLATE │ ├── bug.md │ └── other.md └── workflows │ ├── checks.yaml │ └── deploy.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .secrets.baseline ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── astronomer_starship ├── .eslintrc.cjs ├── .stylelintrc.json ├── __init__.py ├── compat │ ├── __init__.py │ └── starship_compatability.py ├── index.html ├── package-lock.json ├── package.json ├── prettierrc.js ├── providers │ ├── __init__.py │ └── starship │ │ ├── __init__.py │ │ ├── hooks │ │ ├── __init__.py │ │ └── starship.py │ │ └── operators │ │ ├── __init__.py │ │ └── starship.py ├── src │ ├── App.jsx │ ├── State.jsx │ ├── component │ │ ├── AppLoading.jsx │ │ ├── DataTable.jsx │ │ ├── HiddenValue.jsx │ │ ├── MigrateButton.jsx │ │ ├── PageLoading.jsx │ │ ├── StarshipPage.jsx │ │ ├── TooltipHeader.jsx │ │ └── ValidatedUrlCheckbox.jsx │ ├── constants.js │ ├── index.css │ ├── index.jsx │ ├── pages │ │ ├── ConnectionsPage.jsx │ │ ├── DAGHistoryPage.jsx │ │ ├── EnvVarsPage.jsx │ │ ├── PoolsPage.jsx │ │ ├── SetupPage.jsx │ │ ├── TelescopePage.jsx │ │ └── VariablesPage.jsx │ └── util.js ├── starship.py ├── starship_api.py ├── templates │ └── index.html ├── tests │ └── src │ │ └── pages │ │ └── DAGHistoryPage.test.js └── vite.config.js ├── docs ├── api.md ├── index.md ├── migration_source │ ├── gcc.md │ ├── gcc_pkg.png │ ├── gcc_pkg_starship.png │ ├── mwaa.md │ └── mwaa_pkg.png ├── operator.md ├── starship.png ├── starship.svg └── starship_diagram.svg ├── justfile ├── mkdocs.yml ├── pyproject.toml └── tests ├── __init__.py ├── api_integration_test.py ├── conftest.py ├── docker_test ├── README.md ├── dag.py ├── docker_test.py └── run_container_test.sh ├── e2e ├── gcc │ └── config │ │ └── pip │ │ └── pip.conf ├── justfile └── mwaa │ └── mwaa.yaml └── validation_test.py /.github/ISSUE_TEMPLATE/bug.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug 3 | about: Create a report to help us improve 4 | title: "[BUG]" 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Source**: [e.g. MWAA] 11 | **Destination**: [e.g. Astronomer Software] 12 | **Airflow Version**: [e.g. 2.7.2] 13 | **Starship Version**: [e.g. 2.0.0] 14 | 15 | **Desktop (please complete the following information):** 16 | - OS: [e.g. iOS] 17 | - Browser [e.g. chrome, safari] 18 | - Version [e.g. 22] 19 | 20 | **Screenshots** 21 | If applicable, add screenshots to help explain your problem. 22 | 23 | **Logs** 24 | If applicable, add logs from the web server or web request to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/other.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Other 3 | about: Create an issue 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/workflows/checks.yaml: -------------------------------------------------------------------------------- 1 | name: Check 2 | on: [pull_request] 3 | permissions: 4 | contents: write 5 | jobs: 6 | check: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | - uses: actions/setup-python@v5 11 | with: 12 | python-version: '3.10' 13 | cache: 'pip' 14 | - uses: extractions/setup-just@v2 15 | - run: just install 16 | - run: | 17 | git fetch origin 18 | pre-commit run --from-ref origin/${{ github.event.pull_request.base.ref }} --to-ref ${{ github.event.pull_request.head.sha }} 19 | 20 | test: 21 | runs-on: ubuntu-latest 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | python-version: [ "3.10", "3.11", "3.12" ] 26 | steps: 27 | - uses: actions/checkout@v4 28 | - uses: actions/setup-python@v5 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | cache: 'pip' 32 | - uses: extractions/setup-just@v2 33 | - run: just install 34 | - run: just test-with-coverage 35 | - uses: codecov/codecov-action@v4 36 | with: 37 | token: ${{ secrets.CODECOV_TOKEN }} 38 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | on: 3 | push: 4 | tags: 5 | - "v*.*.*" 6 | workflow_dispatch: {} 7 | jobs: 8 | publish: 9 | runs-on: ubuntu-latest 10 | environment: 11 | name: pypi 12 | url: https://pypi.org/p/astronomer-starship 13 | permissions: 14 | id-token: write 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-python@v5 18 | with: 19 | python-version: '3.10' 20 | cache: 'pip' 21 | - uses: extractions/setup-just@v2 22 | - run: just build 23 | - uses: pypa/gh-action-pypi-publish@release/v1 24 | with: 25 | skip-existing: true 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | site 2 | venv 3 | venv* 4 | /build/ 5 | __pycache__ 6 | /.idea/ 7 | *.whl 8 | .env* 9 | *.egg-info/ 10 | dist/ 11 | .DS_Store 12 | .pytest_cache/ 13 | lib/ 14 | .ruff_cache 15 | dist 16 | node_modules 17 | static 18 | *.test.log 19 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | 4 | default_stages: [commit, push] 5 | 6 | repos: 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v3.2.0 9 | hooks: 10 | - id: trailing-whitespace 11 | - id: end-of-file-fixer 12 | - id: check-yaml 13 | exclude: tests 14 | - id: check-added-large-files 15 | - id: check-merge-conflict 16 | - id: check-vcs-permalinks 17 | - id: debug-statements 18 | - id: detect-private-key 19 | - id: name-tests-test 20 | exclude: tests/docker_test 21 | - id: trailing-whitespace 22 | - id: detect-aws-credentials 23 | args: ["--allow-missing-credentials"] 24 | 25 | - repo: https://github.com/Yelp/detect-secrets 26 | rev: v1.4.0 27 | hooks: 28 | - id: detect-secrets 29 | args: ['--baseline', '.secrets.baseline'] 30 | 31 | - repo: https://github.com/sirosen/texthooks 32 | rev: 0.5.0 33 | hooks: [ { id: fix-smartquotes }, { id: fix-ligatures } ] 34 | 35 | - repo: https://github.com/frnmst/md-toc 36 | rev: 8.1.9 37 | hooks: [ { id: md-toc } ] 38 | 39 | - repo: https://github.com/charliermarsh/ruff-pre-commit 40 | rev: 'v0.0.261' 41 | hooks: 42 | - id: ruff 43 | args: [ --fix, --exit-non-zero-on-fix ] 44 | 45 | - repo: https://github.com/psf/black 46 | rev: 23.3.0 47 | hooks: [ { id: black, args: [--config=pyproject.toml] } ] 48 | 49 | - repo: https://github.com/PyCQA/bandit/ 50 | rev: 1.7.4 51 | hooks: 52 | - id: bandit 53 | args: ["-c", "pyproject.toml"] 54 | additional_dependencies: ["bandit[toml]"] 55 | 56 | - repo: https://github.com/adamchainz/blacken-docs 57 | rev: 1.13.0 58 | hooks: 59 | - id: blacken-docs 60 | additional_dependencies: 61 | - black==22.12.0 62 | -------------------------------------------------------------------------------- /.secrets.baseline: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.4.0", 3 | "plugins_used": [ 4 | { 5 | "name": "ArtifactoryDetector" 6 | }, 7 | { 8 | "name": "AWSKeyDetector" 9 | }, 10 | { 11 | "name": "AzureStorageKeyDetector" 12 | }, 13 | { 14 | "name": "Base64HighEntropyString", 15 | "limit": 4.5 16 | }, 17 | { 18 | "name": "BasicAuthDetector" 19 | }, 20 | { 21 | "name": "CloudantDetector" 22 | }, 23 | { 24 | "name": "DiscordBotTokenDetector" 25 | }, 26 | { 27 | "name": "GitHubTokenDetector" 28 | }, 29 | { 30 | "name": "HexHighEntropyString", 31 | "limit": 3.0 32 | }, 33 | { 34 | "name": "IbmCloudIamDetector" 35 | }, 36 | { 37 | "name": "IbmCosHmacDetector" 38 | }, 39 | { 40 | "name": "JwtTokenDetector" 41 | }, 42 | { 43 | "name": "KeywordDetector", 44 | "keyword_exclude": "" 45 | }, 46 | { 47 | "name": "MailchimpDetector" 48 | }, 49 | { 50 | "name": "NpmDetector" 51 | }, 52 | { 53 | "name": "PrivateKeyDetector" 54 | }, 55 | { 56 | "name": "SendGridDetector" 57 | }, 58 | { 59 | "name": "SlackDetector" 60 | }, 61 | { 62 | "name": "SoftlayerDetector" 63 | }, 64 | { 65 | "name": "SquareOAuthDetector" 66 | }, 67 | { 68 | "name": "StripeDetector" 69 | }, 70 | { 71 | "name": "TwilioKeyDetector" 72 | } 73 | ], 74 | "filters_used": [ 75 | { 76 | "path": "detect_secrets.filters.allowlist.is_line_allowlisted" 77 | }, 78 | { 79 | "path": "detect_secrets.filters.common.is_baseline_file", 80 | "filename": ".secrets.baseline" 81 | }, 82 | { 83 | "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", 84 | "min_level": 2 85 | }, 86 | { 87 | "path": "detect_secrets.filters.heuristic.is_indirect_reference" 88 | }, 89 | { 90 | "path": "detect_secrets.filters.heuristic.is_likely_id_string" 91 | }, 92 | { 93 | "path": "detect_secrets.filters.heuristic.is_lock_file" 94 | }, 95 | { 96 | "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" 97 | }, 98 | { 99 | "path": "detect_secrets.filters.heuristic.is_potential_uuid" 100 | }, 101 | { 102 | "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" 103 | }, 104 | { 105 | "path": "detect_secrets.filters.heuristic.is_sequential_string" 106 | }, 107 | { 108 | "path": "detect_secrets.filters.heuristic.is_swagger_file" 109 | }, 110 | { 111 | "path": "detect_secrets.filters.heuristic.is_templated_secret" 112 | } 113 | ], 114 | "results": { 115 | "tests/astronomer/starship/services/local_airflow_client_test.py": [ 116 | { 117 | "type": "Hex High Entropy String", 118 | "filename": "tests/astronomer/starship/services/local_airflow_client_test.py", 119 | "hashed_secret": "113bd5f1c1923af7b874e409d5153278cb1002a3", 120 | "is_verified": false, 121 | "line_number": 56 122 | } 123 | ] 124 | }, 125 | "generated_at": "2023-10-13T18:24:55Z" 126 | } 127 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | This project welcomes contributions. All Pull Requests should include proper testing, documentation, and follow all existing checks and practices. 2 | 3 | 4 | 5 | - [Project Structure](#project-structure) 6 | - [Development Workflow](#development-workflow) 7 | - [Versioning](#versioning) 8 | - [Linting](#linting) 9 | - [Testing](#testing) 10 | - [Development](#development) 11 | - [Pre-Commit](#pre-commit) 12 | - [IDE Setup](#ide-setup) 13 | - [Pre-commit](#pre-commit-1) 14 | - [Other helpful commands](#other-helpful-commands) 15 | - [Test](#test) 16 | - [Developing CICD](#developing-cicd) 17 | - [Easily test the plugin in a local astro project](#easily-test-the-plugin-in-a-local-astro-project) 18 | - [Alternatively](#alternatively) 19 | - [Issues](#issues) 20 | - [SSL CERTIFICATE_VERIFY_FAILED](#ssl-certificate_verify_failed) 21 | - [Pytest Debugging](#pytest-debugging) 22 | 23 | 24 | # Project Structure 25 | - [`astronomer_starship/index.html`](astronomer_starship/index.html) is used for development, 26 | and `vite build` uses it to compile the Javascript and CSS into the `static` folder 27 | - [`astronomer_starship/src`](astronomer_starship/src) contains the React App 28 | - [`astronomer_starship/starship.py`](astronomer_starship/starship.py) contains the Airflow Plugin to inject the React App 29 | - [`astronomer_starship/template/index.html`](astronomer_starship/templates/index.html) contains the Flask View HTML 30 | - [`astronomer_starship/starship_api.py`](astronomer_starship/starship_api.py) contains the Airflow Plugin to that provides 31 | the Starship API Routes 32 | 33 | # Development Workflow 34 | 35 | 1. Create a branch off `main` 36 | 2. Develop, add tests, ensure all tests are passing 37 | 3. Push up to GitHub (running pre-commit) 38 | 4. Create a PR, get approval 39 | 5. Merge the PR to `main` 40 | 6. On `main`: Create a tag 41 | ```shell 42 | VERSION="v$(python -c 'import astronomer_starship; print(astronomer_starship.__version__)')"; git tag -d $VERSION; git tag $VERSION 43 | ``` 44 | 7. Do any manual or integration testing 45 | 8. Push the tag to GitHub `git push origin --tag`, which will create 46 | a `Draft` [release](https://github.com/astronomer/astronomer-starship/releases) and upload 47 | to [test.pypi.org](https://test.pypi.org/project/astronomer-starship/) via CICD 48 | 9. Approve the [release](https://github.com/astronomer/astronomer-starship/releases) on GitHub, which 49 | will upload to [pypi.org](https://pypi.org/project/astronomer-starship/) via CICD 50 | 51 | ## Versioning 52 | This project follows [Semantic Versioning](https://semver.org/) 53 | 54 | ## Linting 55 | 56 | This project 57 | uses [`black` (link)](https://black.readthedocs.io/en/stable/), [`blacken-docs` (link)](https://github.com/adamchainz/blacken-docs), 58 | and [`ruff` (link)](https://beta.ruff.rs/). They run with pre-commit but you can run them directly with `ruff check .` 59 | in the root. 60 | 61 | ## Testing 62 | 63 | This project utilizes [Doctests](https://docs.python.org/3/library/doctest.html) and `pytest`. 64 | With the `dev` extras installed, you can run all tests with `pytest` in the root of the project. It will automatically 65 | pick up it's configuration in `pyproject.toml` 66 | 67 | ## Development 68 | 69 | ### Pre-Commit 70 | 71 | Pre-commit is utilized to run common checks or linting. 72 | Install it locally to prevent needing to fix things after they fail in CICD 73 | 74 | - This project uses pre-commit 75 | - Install it with 76 | 77 | ```shell 78 | pre-commit install 79 | ``` 80 | 81 | ### IDE Setup 82 | 83 | Install `ruff` and `black` plugins for your IDE 84 | 85 | # Pre-commit 86 | 87 | ```shell 88 | make pre-commit-install 89 | ``` 90 | 91 | It will run when you commit, or you can run 92 | 93 | ```shell 94 | pre-commit run 95 | ``` 96 | 97 | # Other helpful commands 98 | 99 | Check out what is in the [Justfile](./justfile) 100 | You can run `just help` for an overview 101 | 102 | # Test 103 | 104 | The package can be built and manually pushed to Test PyPi. 105 | Note: `twine` must be installed 106 | Further instructions are [here](https://packaging.python.org/en/latest/specifications/pypirc/#the-pypirc-file) 107 | and [here](https://packaging.python.org/en/latest/guides/using-testpypi/) 108 | 109 | Test by adding this to a `requirements.txt` 110 | 111 | ```shell 112 | --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ astronomer-starship 113 | ``` 114 | 115 | and add this to `airflow_settings.yaml` and then run `astro d object import` 116 | 117 | ```shell 118 | airflow: 119 | connections: 120 | - conn_id: aws 121 | conn_type: aws 122 | conn_host: 123 | conn_schema: 124 | conn_login: 125 | conn_password: 126 | conn_port: 127 | conn_extra: 128 | pools: 129 | - pool_name: foo 130 | pool_slot: 999 131 | pool_description: 132 | variables: 133 | - variable_name: foo 134 | variable_value: bar 135 | ``` 136 | 137 | # Developing CICD 138 | 139 | Use https://github.com/nektos/act to run and test CICD changes locally. 140 | 141 | # Easily test the plugin in a local astro project 142 | 143 | 1. Make a new project `astro dev init` 144 | 2. Symlink in starship `ln -s /path/to/starship starship` 145 | 3. add the file `docker-compose.override.yml` 146 | ```yaml 147 | version: "3.1" 148 | services: 149 | webserver: 150 | volumes: 151 | - ./starship:/usr/local/airflow/starship:rw 152 | command: > 153 | bash -c 'if [[ -z "$$AIRFLOW__API__AUTH_BACKEND" ]] && [[ $$(pip show -f apache-airflow | grep basic_auth.py) ]]; 154 | then export AIRFLOW__API__AUTH_BACKEND=airflow.api.auth.backend.basic_auth ; 155 | else export AIRFLOW__API__AUTH_BACKEND=airflow.api.auth.backend.default ; fi && 156 | { airflow users create "$$@" || airflow create_user "$$@" ; } && 157 | { airflow sync-perm || airflow sync_perm ;} && 158 | airflow webserver -d' -- -r Admin -u admin -e admin@example.com -f admin -l user -p admin 159 | ``` 160 | 4. Edit the file `Dockerfile` 161 | ```Dockerfile 162 | FROM quay.io/astronomer/astro-runtime:8.4.0 163 | 164 | COPY --chown=astro:astro --chmod=777 starship starship 165 | USER root 166 | RUN pip install --upgrade pip && pip install ./starship 167 | USER astro 168 | ``` 169 | 5. Build with your symlink starship `tar -czh . | docker build -t local -` 170 | 6. Start (or restart) the astro project `astro dev start -i local` 171 | 1. Quickly restart just the webserver with `docker restart $(docker container ls --filter name=webserver --format="{{.ID}}")` 172 | 173 | # Alternatively 174 | 175 | you may be able to run flask directly, 176 | see [this](https://airflow.apache.org/docs/apache-airflow/stable/authoring-and-scheduling/plugins.html#troubleshooting) 177 | 178 | 179 | # Issues 180 | ## SSL CERTIFICATE_VERIFY_FAILED 181 | If you see a message like `E jwt.exceptions.PyJWKClientConnectionError: Fail to fetch data from the url, err: ""`, do this: https://stackoverflow.com/a/58525755 182 | 183 | ## Pytest Debugging 184 | `pytest-xdist` can prevent a debugger from attaching correctly due to it's distributed/non-local behavior 185 | You can fix this by commenting out `--num-processes=auto` from `pyproject.toml` or running with `--dist no` to return to normal sequential pytest behavior 186 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | All Rights Reserved 2 | 3 | Copyright (c) 2021 Astronomer, LLC 4 | 5 | Created by Astronomer, LLC 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 8 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 9 | FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE 10 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 11 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 12 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 13 | THE SOFTWARE. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 |

3 | Logo of Spaceship 8 |

9 |

10 | Astronomer Starship can send your Airflow workloads to new places! 11 |

12 | 13 | # What is it? 14 | 15 | Starship is a utility to migrate Airflow metadata such as Airflow Variables, 16 | Connections, Environment Variables, Pools, and DAG History between two Airflow instances. 17 | 18 | [Read more at the official documentation!](https://astronomer.github.io/starship) 19 | 20 | --- 21 | 22 | **Artwork** 23 | Starship logo [by Lorenzo](https://thenounproject.com/lorenzo.verdenelli/) used with permission 24 | from [The Noun Project](https://thenounproject.com/icon/starship-6088295/) 25 | under [Creative Commons](https://creativecommons.org/licenses/by/3.0/us/legalcode). 26 | -------------------------------------------------------------------------------- /astronomer_starship/.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | browser: true, 4 | es2021: true, 5 | }, 6 | extends: 'airbnb', 7 | overrides: [ 8 | { 9 | env: { 10 | node: true, 11 | }, 12 | files: [ 13 | '.eslintrc.{js,cjs}', 14 | ], 15 | parserOptions: { 16 | sourceType: 'script', 17 | }, 18 | }, 19 | ], 20 | parserOptions: { 21 | ecmaVersion: 'latest', 22 | sourceType: 'module', 23 | }, 24 | rules: { 25 | }, 26 | }; 27 | -------------------------------------------------------------------------------- /astronomer_starship/.stylelintrc.json: -------------------------------------------------------------------------------- 1 | { "extends": ["stylelint-config-standard"] } 2 | -------------------------------------------------------------------------------- /astronomer_starship/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "2.2.3" 2 | 3 | 4 | def get_provider_info(): 5 | return { 6 | "package-name": "astronomer-starship", # Required 7 | "name": "Astronomer Starship", # Required 8 | "description": "Airflow Migration Utility", # Required 9 | "versions": [__version__], # Required 10 | } 11 | -------------------------------------------------------------------------------- /astronomer_starship/compat/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/astronomer_starship/compat/__init__.py -------------------------------------------------------------------------------- /astronomer_starship/index.html: -------------------------------------------------------------------------------- 1 | {# This should only be for dev #} 2 |
3 | 4 | -------------------------------------------------------------------------------- /astronomer_starship/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "starship", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "dependencies": { 7 | "@chakra-ui/icons": "^2.1.1", 8 | "@chakra-ui/react": "^2.8.2", 9 | "@emotion/react": "^11.11.3", 10 | "@emotion/styled": "^11.11.0", 11 | "@tanstack/react-table": "^8.11.3", 12 | "axios": "^1.6.5", 13 | "framer-motion": "^10.17.9", 14 | "history": "^5.3.0", 15 | "human-format": "^1.2.0", 16 | "lodash.debounce": "^4.0.8", 17 | "lodash.throttle": "^4.1.1", 18 | "prop-types": "^15.8.1", 19 | "qs": "^6.11.2", 20 | "react": "^18.2.0", 21 | "react-dom": "^18.2.0", 22 | "react-icons": "^4.12.0", 23 | "react-router": "^6.21.2", 24 | "react-router-dom": "^6.21.2" 25 | }, 26 | "devDependencies": { 27 | "@types/react": "^18.2.43", 28 | "@types/react-dom": "^18.2.17", 29 | "@vitejs/plugin-react": "^4.2.1", 30 | "eslint": "^8.56.0", 31 | "eslint-config-airbnb": "^19.0.4", 32 | "eslint-plugin-import": "^2.29.1", 33 | "eslint-plugin-jsx-a11y": "^6.8.0", 34 | "eslint-plugin-react": "^7.33.2", 35 | "eslint-plugin-react-hooks": "^4.6.0", 36 | "eslint-plugin-react-refresh": "^0.4.5", 37 | "stylelint": "^16.1.0", 38 | "stylelint-config-standard": "^36.0.0", 39 | "vite": "^5.0.8", 40 | "vitest": "^1.6.1" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /astronomer_starship/prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "trailingComma": "all", 3 | "tabWidth": 2, 4 | "semi": true, 5 | "singleQuote": true, 6 | "printWidth": 120, 7 | "bracketSpacing": true 8 | } 9 | -------------------------------------------------------------------------------- /astronomer_starship/providers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/astronomer_starship/providers/__init__.py -------------------------------------------------------------------------------- /astronomer_starship/providers/starship/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/astronomer_starship/providers/starship/__init__.py -------------------------------------------------------------------------------- /astronomer_starship/providers/starship/hooks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/astronomer_starship/providers/starship/hooks/__init__.py -------------------------------------------------------------------------------- /astronomer_starship/providers/starship/hooks/starship.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hooks for interacting with Starship migrations 3 | """ 4 | from abc import ABC, abstractmethod 5 | 6 | from typing import List 7 | 8 | from airflow.providers.http.hooks.http import HttpHook 9 | from airflow.hooks.base import BaseHook 10 | 11 | from astronomer_starship.starship_api import starship_compat 12 | 13 | POOLS_ROUTE = "/api/starship/pools" 14 | CONNECTIONS_ROUTE = "/api/starship/connections" 15 | VARIABLES_ROUTE = "/api/starship/variables" 16 | DAGS_ROUTE = "/api/starship/dags" 17 | DAG_RUNS_ROUTE = "/api/starship/dag_runs" 18 | TASK_INSTANCES_ROUTE = "/api/starship/task_instances" 19 | 20 | 21 | class StarshipHook(ABC): 22 | @abstractmethod 23 | def get_variables(self): 24 | pass 25 | 26 | @abstractmethod 27 | def set_variable(self, **kwargs): 28 | pass 29 | 30 | @abstractmethod 31 | def get_pools(self): 32 | pass 33 | 34 | @abstractmethod 35 | def set_pool(self, **kwargs): 36 | pass 37 | 38 | @abstractmethod 39 | def get_connections(self): 40 | pass 41 | 42 | @abstractmethod 43 | def set_connection(self, **kwargs): 44 | pass 45 | 46 | @abstractmethod 47 | def get_dags(self): 48 | pass 49 | 50 | @abstractmethod 51 | def set_dag_is_paused(self, dag_id: str, is_paused: bool): 52 | pass 53 | 54 | @abstractmethod 55 | def get_dag_runs(self, dag_id: str, offset: int = 0, limit: int = 10) -> dict: 56 | pass 57 | 58 | @abstractmethod 59 | def set_dag_runs(self, dag_runs: list): 60 | pass 61 | 62 | @abstractmethod 63 | def get_task_instances(self, dag_id: str, offset: int = 0, limit: int = 10): 64 | pass 65 | 66 | @abstractmethod 67 | def set_task_instances(self, task_instances: list): 68 | pass 69 | 70 | 71 | class StarshipLocalHook(BaseHook, StarshipHook): 72 | """Hook to retrieve local Airflow data, which can then be sent to the Target Starship instance.""" 73 | 74 | def get_variables(self): 75 | """ 76 | Get all variables from the local Airflow instance. 77 | """ 78 | return starship_compat.get_variables() 79 | 80 | def set_variable(self, **kwargs): 81 | raise RuntimeError("Setting local data is not supported") 82 | 83 | def get_pools(self): 84 | """ 85 | Get all pools from the local Airflow instance. 86 | """ 87 | return starship_compat.get_pools() 88 | 89 | def set_pool(self, **kwargs): 90 | raise RuntimeError("Setting local data is not supported") 91 | 92 | # noinspection PyMethodOverriding 93 | def get_connections(self): 94 | """ 95 | Get all connections from the local Airflow instance. 96 | """ 97 | return starship_compat.get_connections() 98 | 99 | def set_connection(self, **kwargs): 100 | raise RuntimeError("Setting local data is not supported") 101 | 102 | def get_dags(self) -> dict: 103 | """ 104 | Get all DAGs from the local Airflow instance. 105 | """ 106 | return starship_compat.get_dags() 107 | 108 | def set_dag_is_paused(self, dag_id: str, is_paused: bool): 109 | """ 110 | Set the paused status of a DAG in the local Airflow instance. 111 | """ 112 | return starship_compat.set_dag_is_paused(dag_id, is_paused) 113 | 114 | def get_dag_runs(self, dag_id: str, offset: int = 0, limit: int = 10) -> dict: 115 | """ 116 | Get DAG runs from the local Airflow instance. 117 | """ 118 | return starship_compat.get_dag_runs(dag_id, offset=offset, limit=limit) 119 | 120 | def set_dag_runs(self, dag_runs: list): 121 | raise RuntimeError("Setting local data is not supported") 122 | 123 | def get_task_instances(self, dag_id: str, offset: int = 0, limit: int = 10): 124 | """ 125 | Get task instances from the local Airflow instance. 126 | """ 127 | return starship_compat.get_task_instances(dag_id, offset=offset, limit=limit) 128 | 129 | def set_task_instances(self, task_instances: list): 130 | raise RuntimeError("Setting local data is not supported") 131 | 132 | 133 | class StarshipHttpHook(HttpHook, StarshipHook): 134 | def get_variables(self): 135 | """ 136 | Get all variables from the Target Starship instance. 137 | """ 138 | conn = self.get_conn() 139 | url = self.url_from_endpoint(VARIABLES_ROUTE) 140 | res = conn.get(url) 141 | res.raise_for_status() 142 | return res.json() 143 | 144 | def set_variable(self, **kwargs): 145 | """ 146 | Set a variable in the Target Starship instance. 147 | """ 148 | conn = self.get_conn() 149 | url = self.url_from_endpoint(VARIABLES_ROUTE) 150 | res = conn.post(url, json=kwargs) 151 | res.raise_for_status() 152 | return res.json() 153 | 154 | def get_pools(self): 155 | """ 156 | Get all pools from the Target Starship instance. 157 | """ 158 | conn = self.get_conn() 159 | url = self.url_from_endpoint(POOLS_ROUTE) 160 | res = conn.get(url) 161 | res.raise_for_status() 162 | return res.json() 163 | 164 | def set_pool(self, **kwargs): 165 | """ 166 | Set a pool in the Target Starship instance. 167 | """ 168 | conn = self.get_conn() 169 | url = self.url_from_endpoint(POOLS_ROUTE) 170 | res = conn.post(url, json=kwargs) 171 | res.raise_for_status() 172 | return res.json() 173 | 174 | # noinspection PyMethodOverriding 175 | def get_connections(self): 176 | """ 177 | Get all connections from the Target Starship instance. 178 | """ 179 | conn = self.get_conn() 180 | url = self.url_from_endpoint(CONNECTIONS_ROUTE) 181 | res = conn.get(url) 182 | res.raise_for_status() 183 | return res.json() 184 | 185 | def set_connection(self, **kwargs): 186 | """ 187 | Set a connection in the Target Starship instance. 188 | """ 189 | conn = self.get_conn() 190 | url = self.url_from_endpoint(CONNECTIONS_ROUTE) 191 | res = conn.post(url, json=kwargs) 192 | res.raise_for_status() 193 | return res.json() 194 | 195 | def get_dags(self) -> dict: 196 | """ 197 | Get all DAGs from the Target Starship instance. 198 | """ 199 | conn = self.get_conn() 200 | url = self.url_from_endpoint(DAGS_ROUTE) 201 | res = conn.get(url) 202 | res.raise_for_status() 203 | return res.json() 204 | 205 | def set_dag_is_paused(self, dag_id: str, is_paused: bool): 206 | """ 207 | Set the paused status of a DAG in the Target Starship instance. 208 | """ 209 | conn = self.get_conn() 210 | url = self.url_from_endpoint(DAGS_ROUTE) 211 | res = conn.patch(url, json={"dag_id": dag_id, "is_paused": is_paused}) 212 | res.raise_for_status() 213 | return res.json() 214 | 215 | def get_dag_runs(self, dag_id: str, offset: int = 0, limit: int = 10) -> dict: 216 | """ 217 | Get DAG runs from the Target Starship instance. 218 | """ 219 | conn = self.get_conn() 220 | url = self.url_from_endpoint(DAG_RUNS_ROUTE) 221 | res = conn.get(url, params={"dag_id": dag_id, "limit": limit}) 222 | res.raise_for_status() 223 | return res.json() 224 | 225 | def set_dag_runs(self, dag_runs: List[dict]) -> dict: 226 | """ 227 | Set DAG runs in the Target Starship instance. 228 | """ 229 | conn = self.get_conn() 230 | url = self.url_from_endpoint(DAG_RUNS_ROUTE) 231 | res = conn.post(url, json={"dag_runs": dag_runs}) 232 | res.raise_for_status() 233 | return res.json() 234 | 235 | def get_task_instances(self, dag_id: str, offset: int = 0, limit: int = 10): 236 | """ 237 | Get task instances from the Target Starship instance. 238 | """ 239 | conn = self.get_conn() 240 | url = self.url_from_endpoint(TASK_INSTANCES_ROUTE) 241 | res = conn.get(url, params={"dag_id": dag_id, "limit": limit}) 242 | res.raise_for_status() 243 | return res.json() 244 | 245 | def set_task_instances(self, task_instances: list[dict]) -> dict: 246 | """ 247 | Set task instances in the Target Starship instance. 248 | """ 249 | conn = self.get_conn() 250 | url = self.url_from_endpoint(TASK_INSTANCES_ROUTE) 251 | res = conn.post(url, json={"task_instances": task_instances}) 252 | res.raise_for_status() 253 | return res.json() 254 | -------------------------------------------------------------------------------- /astronomer_starship/providers/starship/operators/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/astronomer_starship/providers/starship/operators/__init__.py -------------------------------------------------------------------------------- /astronomer_starship/providers/starship/operators/starship.py: -------------------------------------------------------------------------------- 1 | """Operators, TaskGroups, and DAGs for interacting with the Starship migrations.""" 2 | import logging 3 | from datetime import datetime 4 | from typing import Any, Union, List 5 | 6 | import airflow 7 | from airflow import DAG 8 | from airflow.decorators import task 9 | from airflow.exceptions import AirflowSkipException 10 | from airflow.models.baseoperator import BaseOperator 11 | from airflow.utils.task_group import TaskGroup 12 | 13 | from astronomer_starship.providers.starship.hooks.starship import ( 14 | StarshipLocalHook, 15 | StarshipHttpHook, 16 | ) 17 | 18 | 19 | # Compatability Notes: 20 | # - @task() is >=AF2.0 21 | # - @task_group is >=AF2.1 22 | # - Dynamic Task Mapping is >=AF2.3 23 | # - Dynamic Task Mapping labelling is >=AF2.9 24 | 25 | 26 | class StarshipMigrationOperator(BaseOperator): 27 | def __init__(self, http_conn_id=None, **kwargs): 28 | super().__init__(**kwargs) 29 | self.source_hook = StarshipLocalHook() 30 | self.target_hook = StarshipHttpHook(http_conn_id=http_conn_id) 31 | 32 | 33 | class StarshipVariableMigrationOperator(StarshipMigrationOperator): 34 | """Operator to migrate a single Variable from one Airflow instance to another.""" 35 | 36 | def __init__(self, variable_key: Union[str, None] = None, **kwargs): 37 | super().__init__(**kwargs) 38 | self.variable_key = variable_key 39 | 40 | def execute(self, context) -> Any: 41 | logging.info("Getting Variable", self.variable_key) 42 | variables = self.source_hook.get_variables() 43 | variable: Union[dict, None] = ( 44 | [v for v in variables if v["key"] == self.variable_key] or [None] 45 | )[0] 46 | if variable is not None: 47 | logging.info("Migrating Variable", self.variable_key) 48 | self.target_hook.set_variable(**variable) 49 | else: 50 | raise RuntimeError("Variable not found! " + self.variable_key) 51 | 52 | 53 | def starship_variables_migration(variables: List[str] = None, **kwargs): 54 | """TaskGroup to fetch and migrate Variables from one Airflow instance to another.""" 55 | with TaskGroup("variables") as tg: 56 | 57 | @task() 58 | def get_variables(): 59 | _variables = StarshipLocalHook().get_variables() 60 | 61 | _variables = ( 62 | [k["key"] for k in _variables if k["key"] in variables] 63 | if variables is not None 64 | else [k["key"] for k in _variables] 65 | ) 66 | 67 | if not len(_variables): 68 | raise AirflowSkipException("Nothing to migrate") 69 | return _variables 70 | 71 | variables_results = get_variables() 72 | if airflow.__version__ >= "2.3.0": 73 | StarshipVariableMigrationOperator.partial( 74 | task_id="migrate_variables", **kwargs 75 | ).expand(variable_key=variables_results) 76 | else: 77 | for variable in variables_results.output: 78 | variables_results >> StarshipVariableMigrationOperator( 79 | task_id="migrate_variable_" + variable, 80 | variable_key=variable, 81 | **kwargs, 82 | ) 83 | return tg 84 | 85 | 86 | class StarshipPoolMigrationOperator(StarshipMigrationOperator): 87 | """Operator to migrate a single Pool from one Airflow instance to another.""" 88 | 89 | def __init__(self, pool_name: Union[str, None] = None, **kwargs): 90 | super().__init__(**kwargs) 91 | self.pool_name = pool_name 92 | 93 | def execute(self, context) -> Any: 94 | logging.info("Getting Pool", self.pool_name) 95 | pool: Union[dict, None] = ( 96 | [v for v in self.source_hook.get_pools() if v["name"] == self.pool_name] 97 | or [None] 98 | )[0] 99 | if pool is not None: 100 | logging.info("Migrating Pool", self.pool_name) 101 | self.target_hook.set_pool(**pool) 102 | else: 103 | raise RuntimeError("Pool not found!") 104 | 105 | 106 | def starship_pools_migration(pools: List[str] = None, **kwargs): 107 | """TaskGroup to fetch and migrate Pools from one Airflow instance to another.""" 108 | with TaskGroup("pools") as tg: 109 | 110 | @task() 111 | def get_pools(): 112 | _pools = StarshipLocalHook().get_pools() 113 | _pools = ( 114 | [k["name"] for k in _pools if k["name"] in pools] 115 | if pools is not None 116 | else [k["name"] for k in _pools] 117 | ) 118 | 119 | if not len(_pools): 120 | raise AirflowSkipException("Nothing to migrate") 121 | return _pools 122 | 123 | pools_result = get_pools() 124 | if airflow.__version__ >= "2.3.0": 125 | StarshipPoolMigrationOperator.partial( 126 | task_id="migrate_pools", **kwargs 127 | ).expand(pool_name=pools_result) 128 | else: 129 | for pool in pools_result.output: 130 | pools_result >> StarshipPoolMigrationOperator( 131 | task_id="migrate_pool_" + pool, pool_name=pool, **kwargs 132 | ) 133 | return tg 134 | 135 | 136 | class StarshipConnectionMigrationOperator(StarshipMigrationOperator): 137 | """Operator to migrate a single Connection from one Airflow instance to another.""" 138 | 139 | def __init__(self, connection_id: Union[str, None] = None, **kwargs): 140 | super().__init__(**kwargs) 141 | self.connection_id = connection_id 142 | 143 | def execute(self, context) -> Any: 144 | logging.info("Getting Connection", self.connection_id) 145 | connection: Union[dict, None] = ( 146 | [ 147 | v 148 | for v in self.source_hook.get_connections() 149 | if v["conn_id"] == self.connection_id 150 | ] 151 | or [None] 152 | )[0] 153 | if connection is not None: 154 | logging.info("Migrating Connection", self.connection_id) 155 | self.target_hook.set_connection(**connection) 156 | else: 157 | raise RuntimeError("Connection not found!") 158 | 159 | 160 | def starship_connections_migration(connections: List[str] = None, **kwargs): 161 | """TaskGroup to fetch and migrate Connections from one Airflow instance to another.""" 162 | with TaskGroup("connections") as tg: 163 | 164 | @task() 165 | def get_connections(): 166 | _connections = StarshipLocalHook().get_connections() 167 | _connections = ( 168 | [k["conn_id"] for k in _connections if k["conn_id"] in connections] 169 | if connections is not None 170 | else [k["conn_id"] for k in _connections] 171 | ) 172 | 173 | if not len(_connections): 174 | raise AirflowSkipException("Nothing to migrate") 175 | return _connections 176 | 177 | connections_result = get_connections() 178 | if airflow.__version__ >= "2.3.0": 179 | StarshipConnectionMigrationOperator.partial( 180 | task_id="migrate_connections", **kwargs 181 | ).expand(connection_id=connections_result) 182 | else: 183 | for connection in connections_result.output: 184 | connections_result >> StarshipConnectionMigrationOperator( 185 | task_id="migrate_connection_" + connection.conn_id, 186 | connection_id=connection, 187 | **kwargs, 188 | ) 189 | return tg 190 | 191 | 192 | class StarshipDagHistoryMigrationOperator(StarshipMigrationOperator): 193 | """Operator to migrate a single DAG from one Airflow instance to another, with it's history.""" 194 | 195 | def __init__( 196 | self, 197 | target_dag_id: str, 198 | unpause_dag_in_target: bool = False, 199 | dag_run_limit: int = 10, 200 | **kwargs, 201 | ): 202 | super().__init__(**kwargs) 203 | self.target_dag_id = target_dag_id 204 | self.unpause_dag_in_target = unpause_dag_in_target 205 | self.dag_run_limit = dag_run_limit 206 | 207 | def execute(self, context): 208 | logging.info("Pausing local DAG for", self.target_dag_id) 209 | self.source_hook.set_dag_is_paused(dag_id=self.target_dag_id, is_paused=True) 210 | # TODO - Poll until all tasks are done 211 | 212 | logging.info("Getting local DAG Runs for", self.target_dag_id) 213 | dag_runs = self.source_hook.get_dag_runs( 214 | dag_id=self.target_dag_id, limit=self.dag_run_limit 215 | ) 216 | if len(dag_runs["dag_runs"]) == 0: 217 | raise AirflowSkipException("No DAG Runs found for " + self.target_dag_id) 218 | 219 | logging.info("Getting local Task Instances for", self.target_dag_id) 220 | task_instances = self.source_hook.get_task_instances( 221 | dag_id=self.target_dag_id, limit=self.dag_run_limit 222 | ) 223 | if len(task_instances["task_instances"]) == 0: 224 | raise AirflowSkipException( 225 | "No Task Instances found for " + self.target_dag_id 226 | ) 227 | 228 | logging.info("Setting target DAG Runs for", self.target_dag_id) 229 | self.target_hook.set_dag_runs(dag_runs=dag_runs["dag_runs"]) 230 | 231 | logging.info("Setting target Task Instances for", self.target_dag_id) 232 | self.target_hook.set_task_instances( 233 | task_instances=task_instances["task_instances"] 234 | ) 235 | 236 | if self.unpause_dag_in_target: 237 | logging.info("Unpausing target DAG for", self.target_dag_id) 238 | self.target_hook.set_dag_is_paused( 239 | dag_id=self.target_dag_id, is_paused=False 240 | ) 241 | 242 | 243 | def starship_dag_history_migration(dag_ids: List[str] = None, **kwargs): 244 | """TaskGroup to fetch and migrate DAGs with their history from one Airflow instance to another.""" 245 | with TaskGroup("dag_history") as tg: 246 | 247 | @task() 248 | def get_dags(): 249 | _dags = StarshipLocalHook().get_dags() 250 | _dags = ( 251 | [ 252 | k["dag_id"] 253 | for k in _dags 254 | if k["dag_id"] in dag_ids 255 | and k["dag_id"] != "StarshipAirflowMigrationDAG" 256 | ] 257 | if dag_ids is not None 258 | else [ 259 | k["dag_id"] 260 | for k in _dags 261 | if k["dag_id"] != "StarshipAirflowMigrationDAG" 262 | ] 263 | ) 264 | 265 | if not len(_dags): 266 | raise AirflowSkipException("Nothing to migrate") 267 | return _dags 268 | 269 | dags_result = get_dags() 270 | if airflow.__version__ >= "2.3.0": 271 | StarshipDagHistoryMigrationOperator.partial( 272 | task_id="migrate_dag_ids", 273 | **( 274 | {"map_index_template": "{{ task.target_dag_id }}"} 275 | if airflow.__version__ >= "2.9.0" 276 | else {} 277 | ), 278 | **kwargs, 279 | ).expand(target_dag_id=dags_result) 280 | else: 281 | for dag_id in dags_result.output: 282 | dags_result >> StarshipDagHistoryMigrationOperator( 283 | task_id="migrate_dag_" + dag_id, target_dag_id=dag_id, **kwargs 284 | ) 285 | return tg 286 | 287 | 288 | # noinspection PyPep8Naming 289 | def StarshipAirflowMigrationDAG( 290 | http_conn_id: str, 291 | variables: List[str] = None, 292 | pools: List[str] = None, 293 | connections: List[str] = None, 294 | dag_ids: List[str] = None, 295 | **kwargs, 296 | ): 297 | """ 298 | DAG to fetch and migrate Variables, Pools, Connections, and DAGs with history from one Airflow instance to another. 299 | """ 300 | dag = DAG( 301 | dag_id="starship_airflow_migration_dag", 302 | schedule="@once", 303 | start_date=datetime(1970, 1, 1), 304 | tags=["migration", "starship"], 305 | default_args={"owner": "Astronomer"}, 306 | doc_md=""" 307 | # Starship Migration DAG 308 | A DAG to migrate Airflow Variables, Pools, Connections, and DAG History from one Airflow instance to another. 309 | 310 | You can use this DAG to migrate all items, or specific items by providing a list of names. 311 | 312 | You can skip migration by providing an empty list. 313 | 314 | ## Setup: 315 | Make a connection in Airflow with the following details: 316 | - **Conn ID**: `starship_default` 317 | - **Conn Type**: `HTTP` 318 | - **Host**: the URL of the homepage of Airflow (excluding `/home` on the end of the URL) 319 | - For example, if your deployment URL is `https://astronomer.astronomer.run/abcdt4ry/home`, you'll use `https://astronomer.astronomer.run/abcdt4ry` 320 | - **Schema**: `https` 321 | - **Extras**: `{"Authorization": "Bearer "}` 322 | 323 | ## Usage: 324 | ```python 325 | from astronomer_starship.providers.starship.operators.starship import ( 326 | StarshipAirflowMigrationDAG, 327 | ) 328 | 329 | globals()["starship_airflow_migration_dag"] = StarshipAirflowMigrationDAG( 330 | http_conn_id="starship_default", 331 | variables=None, # None to migrate all, or ["var1", "var2"] to migrate specific items, or empty list to skip all 332 | pools=None, # None to migrate all, or ["pool1", "pool2"] to migrate specific items, or empty list to skip all 333 | connections=None, # None to migrate all, or ["conn1", "conn2"] to migrate specific items, or empty list to skip all 334 | dag_ids=None, # None to migrate all, or ["dag1", "dag2"] to migrate specific items, or empty list to skip all 335 | ) 336 | ``` 337 | """, # noqa: E501 338 | ) 339 | with dag: 340 | starship_variables_migration( 341 | variables=variables, http_conn_id=http_conn_id, **kwargs 342 | ) 343 | starship_pools_migration(pools=pools, http_conn_id=http_conn_id, **kwargs) 344 | starship_connections_migration( 345 | connections=connections, http_conn_id=http_conn_id, **kwargs 346 | ) 347 | starship_dag_history_migration( 348 | dag_ids=dag_ids, http_conn_id=http_conn_id, **kwargs 349 | ) 350 | return dag 351 | -------------------------------------------------------------------------------- /astronomer_starship/src/App.jsx: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-nested-ternary */ 2 | import React, { useEffect, useReducer } from 'react'; 3 | import { 4 | Box, Button, Divider, Flex, Heading, Icon, 5 | } from '@chakra-ui/react'; 6 | import { GoRocket } from 'react-icons/go'; 7 | import { 8 | Outlet, NavLink, Route, Navigate, createHashRouter, createRoutesFromElements, 9 | } from 'react-router-dom'; 10 | import { RouterProvider } from 'react-router'; 11 | import VariablesPage from './pages/VariablesPage'; 12 | import ConnectionsPage from './pages/ConnectionsPage'; 13 | import PoolsPage from './pages/PoolsPage'; 14 | import EnvVarsPage from './pages/EnvVarsPage'; 15 | import DAGHistoryPage from './pages/DAGHistoryPage'; 16 | import SetupPage from './pages/SetupPage'; 17 | import { 18 | getInitialState, initialState, reducer, 19 | } from './State'; 20 | import './index.css'; 21 | import AppLoading from './component/AppLoading'; 22 | import TelescopePage from './pages/TelescopePage'; 23 | 24 | export default function App() { 25 | const [state, dispatch] = useReducer(reducer, initialState, getInitialState); 26 | useEffect(() => { 27 | localStorage.setItem('state', JSON.stringify(state)); 28 | }, [state]); 29 | const router = createHashRouter( 30 | createRoutesFromElements( 31 | 35 | 40 | 48 | 58 | 68 | 78 | 88 | 98 | 99 | 100 | 101 | 102 | Starship 103 | {' '} 104 | 105 | 106 | By Astronomer 107 | 108 | 109 | 110 | 111 | 112 | 113 | )} 114 | > 115 | } /> 116 | } /> 117 | } /> 118 | } /> 119 | } /> 120 | } /> 121 | } /> 122 | } /> 123 | , 124 | ), 125 | ); 126 | return ( 127 | 128 | ); 129 | } 130 | -------------------------------------------------------------------------------- /astronomer_starship/src/State.jsx: -------------------------------------------------------------------------------- 1 | import qs from 'qs'; 2 | // eslint-disable-next-line import/no-extraneous-dependencies 3 | import merge from 'lodash.merge'; 4 | import { getTargetUrlFromParts } from './util'; 5 | 6 | export function getHashState() { 7 | const o = qs.parse(window.parent.location.search.substring(1)); 8 | return o?.s ? JSON.parse(atob(o.s)) : {}; 9 | } 10 | 11 | export function getInitialState(initial) { 12 | return { ...initial, ...JSON.parse(localStorage.getItem('state')), ...getHashState() }; 13 | } 14 | 15 | /** Initial state of the application 16 | * @typedef {Object} State 17 | * @type {{targetUrl: null}} 18 | */ 19 | export const initialState = { 20 | DEBUG: false, 21 | // ### SETUP PAGE #### 22 | targetUrl: '', 23 | isSetupComplete: false, 24 | isTouched: false, 25 | isValidUrl: false, 26 | urlDeploymentPart: '', 27 | urlOrgPart: '', 28 | isAstro: true, 29 | isStarship: false, 30 | isAirflow: false, 31 | isProductSelected: false, 32 | isTokenTouched: false, 33 | token: null, 34 | deploymentId: null, 35 | telescopeOrganizationId: '', 36 | telescopePresignedUrl: '', 37 | 38 | // Software Specific: 39 | releaseName: null, 40 | workspaceId: null, 41 | 42 | // ### VARIABLES PAGE #### 43 | variablesLocalData: [], 44 | variablesRemoteData: [], 45 | variablesLoading: false, 46 | variablesError: null, 47 | // ### CONNECTIONS PAGE #### 48 | connectionsLocalData: [], 49 | connectionsRemoteData: [], 50 | connectionsLoading: false, 51 | connectionsError: null, 52 | // ### POOLS PAGE #### 53 | poolsLocalData: [], 54 | poolsRemoteData: [], 55 | poolsLoading: false, 56 | poolsError: null, 57 | // ### ENV PAGE #### 58 | envLocalData: [], 59 | envRemoteData: [], 60 | envLoading: false, 61 | envError: null, 62 | organizationId: null, 63 | // ### DAGS PAGE #### 64 | dagsData: {}, 65 | dagsLoading: false, 66 | dagsError: null, 67 | limit: 10, 68 | }; 69 | 70 | /** 71 | * Reducer for the application, handles setting state "type" commands 72 | * @param state 73 | * @param action 74 | * @returns State 75 | */ 76 | export const reducer = (state, action) => { 77 | if (state.DEBUG) { 78 | // eslint-disable-next-line no-console 79 | console.log(`Received action=${JSON.stringify(action)}`); 80 | } 81 | switch (action.type) { 82 | // ### SETUP PAGE #### 83 | case 'set-url': { 84 | return { 85 | ...state, 86 | isTouched: true, 87 | targetUrl: action.targetUrl, 88 | urlDeploymentPart: action.urlDeploymentPart, 89 | urlOrgPart: action.urlOrgPart, 90 | isValidUrl: action.urlOrgPart && action.urlDeploymentPart, 91 | isSetupComplete: state.isStarship && state.isAirflow && state.token && action.urlOrgPart && action.urlDeploymentPart, 92 | }; 93 | } 94 | case 'set-token': { 95 | return { 96 | ...state, 97 | isTokenTouched: true, 98 | token: action.token, 99 | isSetupComplete: state.isStarship && state.isAirflow && action.token && state.isValidUrl, 100 | }; 101 | } 102 | case 'toggle-is-astro': { 103 | return { 104 | ...state, 105 | isAstro: !state.isAstro, 106 | isProductSelected: true, 107 | targetUrl: getTargetUrlFromParts(state.urlOrgPart, state.urlDeploymentPart, !state.isAstro), 108 | token: null, 109 | isSetupComplete: false, 110 | }; 111 | } 112 | case 'set-is-product-selected': { 113 | return { ...state, isProductSelected: true }; 114 | } 115 | case 'set-is-starship': { 116 | return { 117 | ...state, 118 | isStarship: action.isStarship, 119 | isSetupComplete: action.isStarship && state.isAirflow && state.token && state.isValidUrl, 120 | }; 121 | } 122 | case 'set-is-airflow': { 123 | return { 124 | ...state, 125 | isAirflow: action.isAirflow, 126 | isSetupComplete: action.isAirflow && state.isStarship && state.token && state.isValidUrl, 127 | }; 128 | } 129 | case 'set-software-info': { 130 | return { 131 | ...state, 132 | releaseName: action.releaseName, 133 | workspaceId: action.workspaceId, 134 | deploymentId: action.deploymentId, 135 | }; 136 | } 137 | 138 | // ### Telescope ### 139 | case 'set-telescope-org': { 140 | return { 141 | ...state, 142 | telescopeOrganizationId: action.telescopeOrganizationId, 143 | }; 144 | } 145 | case 'set-telescope-presigned-url': { 146 | return { 147 | ...state, 148 | telescopePresignedUrl: action.telescopePresignedUrl, 149 | }; 150 | } 151 | 152 | // ### VARIABLES PAGE #### 153 | case 'set-variables-loading': { 154 | return { 155 | ...state, 156 | variablesLocalData: [], 157 | variablesRemoteData: [], 158 | variablesLoading: true, 159 | variablesError: null, 160 | }; 161 | } 162 | case 'set-variables-data': { 163 | return { 164 | ...state, 165 | variablesLocalData: action.variablesLocalData, 166 | variablesRemoteData: action.variablesRemoteData, 167 | variablesLoading: false, 168 | }; 169 | } 170 | case 'set-variables-error': { 171 | return action.error.response.status === 401 ? { 172 | ...state, 173 | variablesError: action.error, 174 | variablesLoading: false, 175 | isSetupComplete: false, 176 | isTokenTouched: false, 177 | token: null, 178 | } : { ...state, variablesError: action.error }; 179 | } 180 | 181 | // ### CONNECTIONS PAGE #### 182 | case 'set-connections-loading': { 183 | return { 184 | ...state, 185 | connectionsLocalData: [], 186 | connectionsRemoteData: [], 187 | connectionsError: null, 188 | connectionsLoading: true, 189 | }; 190 | } 191 | case 'set-connections-data': { 192 | return { 193 | ...state, 194 | connectionsLocalData: action.connectionsLocalData, 195 | connectionsRemoteData: action.connectionsRemoteData, 196 | connectionsLoading: false, 197 | }; 198 | } 199 | case 'set-connections-error': { 200 | return action.error.response.status === 401 ? { 201 | ...state, 202 | connectionsError: action.error, 203 | connectionsLoading: false, 204 | isSetupComplete: false, 205 | isTokenTouched: false, 206 | token: null, 207 | } : { ...state, connectionsError: action.error }; 208 | } 209 | 210 | // ### POOLS PAGE #### 211 | case 'set-pools-loading': { 212 | return { 213 | ...state, 214 | poolsLocalData: [], 215 | poolsRemoteData: [], 216 | poolsLoading: true, 217 | poolsError: null, 218 | }; 219 | } 220 | case 'set-pools-data': { 221 | return { 222 | ...state, 223 | poolsLocalData: action.poolsLocalData, 224 | poolsRemoteData: action.poolsRemoteData, 225 | poolsLoading: false, 226 | }; 227 | } 228 | case 'set-pools-error': { 229 | return action.error.response.status === 401 ? { 230 | ...state, 231 | poolsError: action.error, 232 | poolsLoading: false, 233 | isSetupComplete: false, 234 | isTokenTouched: false, 235 | token: null, 236 | } : { ...state, poolsError: action.error }; 237 | } 238 | 239 | // ### ENV PAGE #### 240 | case 'set-env-loading': { 241 | return { 242 | ...state, 243 | envLocalData: [], 244 | envRemoteData: [], 245 | envLoading: true, 246 | envError: null, 247 | }; 248 | } 249 | case 'set-env-data': { 250 | return { 251 | ...state, 252 | envLocalData: action.envLocalData, 253 | envRemoteData: action.envRemoteData, 254 | organizationId: action.envRemoteData['ASTRO_ORGANIZATION_ID'] || state.organizationId, 255 | deploymentId: action.envRemoteData['ASTRO_DEPLOYMENT_ID'] || state.deploymentId, 256 | envLoading: false, 257 | }; 258 | } 259 | case 'set-env-error': { 260 | return action.error.response.status === 401 ? { 261 | ...state, 262 | envError: action.error, 263 | envLoading: false, 264 | isSetupComplete: false, 265 | isTokenTouched: false, 266 | token: null, 267 | } : { ...state, envError: action.error }; 268 | } 269 | 270 | // ### DAG PAGE #### 271 | case 'set-dags-loading': { 272 | return { 273 | ...state, 274 | dagsData: {}, 275 | dagsLoading: true, 276 | dagsError: null, 277 | }; 278 | } 279 | case 'set-dags-data': { 280 | return { 281 | ...state, 282 | dagsData: merge(state.dagsData, action.dagsData), 283 | dagsLoading: false, 284 | }; 285 | } 286 | case 'set-dags-error': { 287 | return action.error.response.status === 401 ? { 288 | ...state, 289 | dagsError: action.error, 290 | dagsLoading: false, 291 | isSetupComplete: false, 292 | isTokenTouched: false, 293 | token: null, 294 | } : { ...state, dagsError: action.error }; 295 | } 296 | case 'set-limit': { 297 | return { 298 | ...state, 299 | limit: action.limit, 300 | }; 301 | } 302 | 303 | // ### GENERAL #### 304 | case 'reset': { 305 | return initialState; 306 | } 307 | default: { 308 | // eslint-disable-next-line no-console 309 | console.log(`Received unknown action.type=${action.type}`); 310 | return state; 311 | } 312 | } 313 | }; 314 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/AppLoading.jsx: -------------------------------------------------------------------------------- 1 | import { useNavigation } from 'react-router-dom'; 2 | import { Center, Spinner } from '@chakra-ui/react'; 3 | import React from 'react'; 4 | 5 | export default function AppLoading() { 6 | const navigation = useNavigation(); 7 | return navigation.state === 'loading' ? ( 8 |
9 | 16 |
17 | ) : null; 18 | } 19 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/DataTable.jsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react'; 2 | import { 3 | Table, Thead, Tbody, Tr, Th, Td, chakra, TableContainer, 4 | } from '@chakra-ui/react'; 5 | import { TriangleDownIcon, TriangleUpIcon } from '@chakra-ui/icons'; 6 | import { 7 | useReactTable, 8 | flexRender, 9 | getCoreRowModel, 10 | getSortedRowModel, 11 | } from '@tanstack/react-table'; 12 | 13 | export default function DataTable({ 14 | data, 15 | columns, 16 | }) { 17 | const [sorting, setSorting] = React.useState([]); 18 | const table = useReactTable({ 19 | columns, 20 | data, 21 | getCoreRowModel: getCoreRowModel(), 22 | onSortingChange: setSorting, 23 | getSortedRowModel: getSortedRowModel(), 24 | state: {sorting}, 25 | }); 26 | 27 | return ( 28 | 29 | 30 | 31 | {table.getHeaderGroups().map((headerGroup) => ( 32 | 33 | {headerGroup.headers.map((header) => { 34 | // see https://tanstack.com/table/v8/docs/api/core/column-def#meta to type this correctly 35 | const {meta} = header.column.columnDef; 36 | return ( 37 | 57 | ); 58 | })} 59 | 60 | ))} 61 | 62 | 63 | {table.getRowModel().rows.map((row) => ( 64 | 65 | {row.getVisibleCells().map((cell) => { 66 | // see https://tanstack.com/table/v8/docs/api/core/column-def#meta to type this correctly 67 | const {meta} = cell.column.columnDef; 68 | return ( 69 | 72 | ); 73 | })} 74 | 75 | ))} 76 | 77 |
42 | {flexRender( 43 | header.column.columnDef.header, 44 | header.getContext(), 45 | )} 46 | 47 | 48 | {header.column.getIsSorted() ? ( 49 | header.column.getIsSorted() === 'desc' ? ( 50 | 51 | ) : ( 52 | 53 | ) 54 | ) : null} 55 | 56 |
70 | {flexRender(cell.column.columnDef.cell, cell.getContext())} 71 |
78 |
79 | ); 80 | } 81 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/HiddenValue.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { 3 | Button, Input, InputGroup, InputRightElement, 4 | } from '@chakra-ui/react'; 5 | import PropTypes from 'prop-types'; 6 | 7 | export default function HiddenValue({ value }) { 8 | const [show, setShow] = React.useState(false); 9 | const handleClick = () => setShow(!show); 10 | 11 | if (!value) { 12 | return null; 13 | } 14 | return ( 15 | 16 | 23 | 24 | 27 | 28 | 29 | ); 30 | } 31 | HiddenValue.propTypes = { value: PropTypes.string }; 32 | HiddenValue.defaultProps = { value: '' }; 33 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/MigrateButton.jsx: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-nested-ternary */ 2 | import React, { useState } from 'react'; 3 | import axios from 'axios'; 4 | import { Button, useToast } from '@chakra-ui/react'; 5 | import { MdErrorOutline, MdDeleteForever } from 'react-icons/md'; 6 | import { GoUpload } from 'react-icons/go'; 7 | import PropTypes from 'prop-types'; 8 | 9 | function checkStatus(status, exists) { 10 | if (status === 204) 11 | return false; 12 | return status === 200 || exists; 13 | } 14 | 15 | export default function MigrateButton({ 16 | route, headers, existsInRemote, sendData, isDisabled, 17 | }) { 18 | const [loading, setLoading] = useState(false); 19 | const [error, setError] = useState(null); 20 | const toast = useToast(); 21 | const [exists, setExists] = useState(existsInRemote); 22 | function handleClick() { 23 | setLoading(true); 24 | axios({ 25 | method: exists ? 'delete' : 'post', 26 | url: route, 27 | headers, 28 | data: sendData, 29 | }) 30 | .then((res) => { 31 | setLoading(false); 32 | setExists(checkStatus(res.status, exists)); 33 | toast({ 34 | title: 'Success', 35 | status: 'success', 36 | isClosable: true, 37 | }) 38 | }) 39 | .catch((err) => { 40 | setExists(exists); 41 | setLoading(false); 42 | toast({ 43 | title: err.response?.data?.error || err.response?.data || err.message, 44 | status: 'error', 45 | isClosable: true, 46 | }); 47 | setError(err); 48 | }); 49 | } 50 | return ( 51 | 66 | ); 67 | } 68 | 69 | MigrateButton.propTypes = { 70 | route: PropTypes.string.isRequired, 71 | headers: PropTypes.objectOf(PropTypes.string), 72 | existsInRemote: PropTypes.bool, 73 | // eslint-disable-next-line react/forbid-prop-types 74 | sendData: PropTypes.object.isRequired, 75 | isDisabled: PropTypes.bool, 76 | }; 77 | MigrateButton.defaultProps = { 78 | headers: {}, 79 | existsInRemote: false, 80 | isDisabled: false, 81 | }; 82 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/PageLoading.jsx: -------------------------------------------------------------------------------- 1 | import { 2 | Alert, AlertDescription, AlertIcon, AlertTitle, Center, Spinner, 3 | } from '@chakra-ui/react'; 4 | import React from 'react'; 5 | import PropTypes from 'prop-types'; 6 | 7 | export default function PageLoading({ loading, error }) { 8 | // eslint-disable-next-line no-nested-ternary 9 | return loading ? ( 10 |
11 | 18 |
19 | ) : error ? ( 20 | 21 | 22 | Error Fetching Local Data! 23 | {error.message} 24 | 25 | ) : null; 26 | } 27 | // eslint-disable-next-line react/forbid-prop-types 28 | PageLoading.propTypes = { loading: PropTypes.bool.isRequired, error: PropTypes.object }; 29 | PageLoading.defaultProps = { error: null }; 30 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/StarshipPage.jsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, Divider, 3 | } from '@chakra-ui/react'; 4 | import React from 'react'; 5 | import PropTypes from 'prop-types'; 6 | import PageLoading from './PageLoading'; 7 | import DataTable from './DataTable'; 8 | 9 | export default function StarshipPage({ 10 | description, loading, error, data, columns, 11 | }) { 12 | return ( 13 | 14 | {description} 15 | 16 | {loading || error ? ( 17 | 18 | ) : ( 19 | 23 | )} 24 | 25 | ); 26 | } 27 | StarshipPage.propTypes = { 28 | description: PropTypes.element, 29 | loading: PropTypes.bool, 30 | // eslint-disable-next-line react/forbid-prop-types 31 | error: PropTypes.object, 32 | // eslint-disable-next-line react/forbid-prop-types 33 | data: PropTypes.array, 34 | // eslint-disable-next-line react/forbid-prop-types 35 | columns: PropTypes.array.isRequired, 36 | }; 37 | StarshipPage.defaultProps = { 38 | description: '', 39 | error: null, 40 | loading: false, 41 | data: [], 42 | }; 43 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/TooltipHeader.jsx: -------------------------------------------------------------------------------- 1 | import { IconButton, Tooltip } from '@chakra-ui/react'; 2 | import { QuestionIcon } from '@chakra-ui/icons'; 3 | import React from 'react'; 4 | import PropTypes from 'prop-types'; 5 | 6 | export default function TooltipHeader({ tooltip }) { 7 | return ( 8 | 9 | } /> 10 | 11 | ); 12 | } 13 | TooltipHeader.propTypes = { 14 | tooltip: PropTypes.string.isRequired, 15 | }; 16 | -------------------------------------------------------------------------------- /astronomer_starship/src/component/ValidatedUrlCheckbox.jsx: -------------------------------------------------------------------------------- 1 | import { Checkbox, useBoolean, useToast } from '@chakra-ui/react'; 2 | import React, { useEffect } from 'react'; 3 | import axios from 'axios'; 4 | import PropTypes from 'prop-types'; 5 | import { proxyHeaders, proxyUrl } from '../util'; 6 | 7 | export default function ValidatedUrlCheckbox({ 8 | text, url, valid, setValid, token, ...props 9 | }) { 10 | const [loading, setLoading] = useBoolean(true); 11 | const toast = useToast(); 12 | useEffect(() => { 13 | // noinspection JSCheckFunctionSignatures 14 | axios.get(proxyUrl(url), { headers: proxyHeaders(token) }) 15 | .then((res) => { 16 | // Valid if it's a 200, has data, and is JSON 17 | const isValid = ( 18 | res.status === 200 && 19 | res.data && 20 | (res.headers['content-type'] === 'application/json' || res.data === "OK") 21 | ); 22 | setValid(isValid); 23 | }) 24 | .catch((err) => { 25 | if (err.response.status === 404) { 26 | toast({ 27 | title: 'Not found', 28 | status: 'error', 29 | isClosable: true, 30 | }); 31 | } else { 32 | toast({ 33 | title: err.response?.data?.error || err.message || err.response?.data, 34 | status: 'error', 35 | isClosable: true, 36 | }); 37 | } 38 | setValid(false); 39 | }) 40 | .finally(() => setLoading.off()); 41 | }, [url, token]); 42 | 43 | return ( 44 | 51 | {text} 52 | 53 | ); 54 | } 55 | ValidatedUrlCheckbox.propTypes = { 56 | text: PropTypes.string.isRequired, 57 | url: PropTypes.string.isRequired, 58 | valid: PropTypes.bool.isRequired, 59 | setValid: PropTypes.func.isRequired, 60 | token: PropTypes.string.isRequired, 61 | }; 62 | -------------------------------------------------------------------------------- /astronomer_starship/src/constants.js: -------------------------------------------------------------------------------- 1 | const constants = { 2 | TELESCOPE_ROUTE: '/api/starship/telescope', 3 | ENV_VAR_ROUTE: '/api/starship/env_vars', 4 | POOL_ROUTE: '/api/starship/pools', 5 | CONNECTIONS_ROUTE: '/api/starship/connections', 6 | VARIABLES_ROUTE: '/api/starship/variables', 7 | DAGS_ROUTE: '/api/starship/dags', 8 | DAG_RUNS_ROUTE: '/api/starship/dag_runs', 9 | TASK_INSTANCE_ROUTE: '/api/starship/task_instances', 10 | }; 11 | export default constants; 12 | 13 | export const updateDeploymentVariablesMutation = ` 14 | mutation UpdateDeploymentVariables( 15 | $deploymentUuid:Uuid!, 16 | $releaseName:String!, 17 | $environmentVariables: [InputEnvironmentVariable!]! 18 | ) { 19 | updateDeploymentVariables( 20 | deploymentUuid: $deploymentUuid, 21 | releaseName: $releaseName, 22 | environmentVariables: $environmentVariables 23 | ) { 24 | key 25 | value 26 | isSecret 27 | } 28 | }`; 29 | 30 | export const getDeploymentsQuery = `query deploymentVariables($deploymentUuid: Uuid!, $releaseName: String!) { 31 | deploymentVariables( 32 | deploymentUuid: $deploymentUuid 33 | releaseName: $releaseName 34 | ) { 35 | key 36 | value 37 | isSecret 38 | } 39 | }`; 40 | 41 | export const getWorkspaceDeploymentsQuery = ` 42 | query workspaces { 43 | workspaces { 44 | id 45 | deployments { 46 | id 47 | releaseName 48 | } 49 | } 50 | }`; 51 | -------------------------------------------------------------------------------- /astronomer_starship/src/index.css: -------------------------------------------------------------------------------- 1 | .container:has(.row > .starship-main) { 2 | padding: 0; 3 | width: 100%; 4 | height: 100%; 5 | } 6 | 7 | .starship-main { 8 | flex-grow: 1; 9 | margin: 0; 10 | padding: 0; 11 | border: 0; 12 | overflow: hidden; 13 | } 14 | 15 | .starship-page { 16 | padding: 15px; 17 | border-radius: 1px; 18 | width: 95%; 19 | margin: 30px; 20 | } 21 | 22 | #is-astro:not(:checked) + .chakra-switch__track { 23 | background-color: teal; 24 | } 25 | 26 | #is-astro:checked + .chakra-switch__track { 27 | background-color: mediumpurple; 28 | } 29 | 30 | #is-astro + .chakra-switch__track { 31 | border-radius: 0; 32 | } 33 | 34 | .data-table { 35 | overflow-x: scroll; 36 | } 37 | 38 | #starship-navbar a { 39 | margin: 0 1px; 40 | border-radius: 0; 41 | } 42 | 43 | #starship-navbar .active { 44 | box-shadow: mediumpurple 0 5px 0 0; 45 | } 46 | -------------------------------------------------------------------------------- /astronomer_starship/src/index.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import { ChakraProvider } from '@chakra-ui/react'; 4 | import App from './App'; 5 | 6 | ReactDOM.createRoot(document.getElementById('root')).render( 7 | 8 | 9 | 10 | 11 | , 12 | ); 13 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/ConnectionsPage.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import { createColumnHelper } from '@tanstack/react-table'; 3 | import { 4 | Button, HStack, Spacer, Text, 5 | } from '@chakra-ui/react'; 6 | import PropTypes from 'prop-types'; 7 | import { RepeatIcon } from '@chakra-ui/icons'; 8 | import StarshipPage from '../component/StarshipPage'; 9 | import MigrateButton from '../component/MigrateButton'; 10 | import HiddenValue from '../component/HiddenValue'; 11 | import { 12 | fetchData, localRoute, objectWithoutKey, proxyHeaders, proxyUrl, remoteRoute, 13 | } from '../util'; 14 | import constants from '../constants'; 15 | 16 | const columnHelper = createColumnHelper(); 17 | const passwordColumn = columnHelper.accessor('password', { 18 | id: 'password', cell: (props) => , 19 | }); 20 | const extraColumn = columnHelper.accessor('extra', { 21 | id: 'extra', cell: (props) => , 22 | }); 23 | 24 | function setConnectionsData(localData, remoteData) { 25 | return localData.map( 26 | (d) => ({ 27 | ...d, 28 | exists: remoteData.map( 29 | // eslint-disable-next-line camelcase 30 | ({ conn_id }) => conn_id, 31 | ).includes(d.conn_id), 32 | }), 33 | ); 34 | } 35 | 36 | export default function ConnectionsPage({ state, dispatch }) { 37 | const [data, setData] = useState( 38 | setConnectionsData(state.connectionsLocalData, state.connectionsRemoteData), 39 | ); 40 | const fetchPageData = () => fetchData( 41 | localRoute(constants.CONNECTIONS_ROUTE), 42 | remoteRoute(state.targetUrl, constants.CONNECTIONS_ROUTE), 43 | state.token, 44 | () => dispatch({ type: 'set-connections-loading' }), 45 | (res, rRes) => dispatch({ 46 | type: 'set-connections-data', connectionsLocalData: res.data, connectionsRemoteData: rRes.data, 47 | }), 48 | (err) => dispatch({ type: 'set-connections-error', error: err }), 49 | ); 50 | useEffect(() => fetchPageData(), []); 51 | useEffect( 52 | () => setData(setConnectionsData(state.connectionsLocalData, state.connectionsRemoteData)), 53 | [state], 54 | ); 55 | 56 | // noinspection JSCheckFunctionSignatures 57 | const columns = [ 58 | columnHelper.accessor('conn_id'), 59 | columnHelper.accessor('conn_type'), 60 | columnHelper.accessor('host'), 61 | columnHelper.accessor('port'), 62 | columnHelper.accessor('schema'), 63 | columnHelper.accessor('login'), 64 | passwordColumn, 65 | extraColumn, 66 | columnHelper.display({ 67 | id: 'migrate', 68 | header: 'Migrate', 69 | // eslint-disable-next-line react/no-unstable-nested-components 70 | cell: (info) => ( 71 | 77 | ), 78 | }), 79 | ]; 80 | return ( 81 | 84 | 85 | Airflow Connection objects are used for storing credentials and other information 86 | necessary for connecting to external services. 87 | Connections can be defined via multiple mechanisms, 88 | Starship only migrates values stored via the Airflow UI. 89 | 90 | 91 | 92 | 93 | )} 94 | loading={state.connectionsLoading} 95 | data={data} 96 | columns={columns} 97 | error={state.error} 98 | /> 99 | ); 100 | } 101 | ConnectionsPage.propTypes = { 102 | // eslint-disable-next-line react/forbid-prop-types 103 | state: PropTypes.object.isRequired, 104 | dispatch: PropTypes.func.isRequired, 105 | }; 106 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/DAGHistoryPage.jsx: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-nested-ternary,react/no-unstable-nested-components */ 2 | // noinspection JSUnusedLocalSymbols 3 | import React, { useEffect, useState } from 'react'; 4 | import { createColumnHelper } from '@tanstack/react-table'; 5 | import { 6 | Badge, 7 | Button, 8 | CircularProgress, 9 | FormControl, 10 | HStack, 11 | InputGroup, InputLeftAddon, Link, 12 | NumberDecrementStepper, NumberIncrementStepper, NumberInput, 13 | NumberInputField, NumberInputStepper, Spacer, 14 | Switch, 15 | Tag, 16 | Text, 17 | Tooltip, 18 | useToast, VStack, 19 | } from '@chakra-ui/react'; 20 | import PropTypes from 'prop-types'; 21 | import axios from 'axios'; 22 | import { MdErrorOutline, MdDeleteForever } from 'react-icons/md'; 23 | import { GrDocumentMissing } from 'react-icons/gr'; 24 | import { GoUpload } from 'react-icons/go'; 25 | import humanFormat from 'human-format'; 26 | import { ExternalLinkIcon, RepeatIcon } from '@chakra-ui/icons'; 27 | import StarshipPage from '../component/StarshipPage'; 28 | import { 29 | fetchData, localRoute, proxyHeaders, proxyUrl, remoteRoute, 30 | } from '../util'; 31 | import constants from '../constants'; 32 | import TooltipHeader from '../component/TooltipHeader'; 33 | 34 | // eslint-disable-next-line react/prop-types 35 | function WithTooltip({ isDisabled, children }) { 36 | return isDisabled 37 | ? {children} 38 | : children; 39 | } 40 | 41 | function DAGHistoryMigrateButton({ 42 | url, 43 | token, 44 | dagId, 45 | limit, 46 | existsInRemote, 47 | isDisabled, 48 | dispatch, 49 | }) { 50 | // noinspection DuplicatedCode 51 | const [loadPerc, setLoadPerc] = useState(0); 52 | const [error, setError] = useState(null); 53 | const toast = useToast(); 54 | const [exists, setExists] = useState(existsInRemote); 55 | const percent = 100; 56 | 57 | function handleClick() { 58 | 59 | function deleteRuns() { 60 | setLoadPerc(percent * 0.5); 61 | axios({ 62 | method: 'delete', 63 | url: proxyUrl(url + constants.DAG_RUNS_ROUTE), 64 | headers: proxyHeaders(token), 65 | params: { dag_id: dagId }, 66 | }).then((res) => { 67 | setExists(!(res.status === 204)); 68 | dispatch({ 69 | type: 'set-dags-data', 70 | dagsData: { 71 | [dagId]: { 72 | remote: { 73 | dag_run_count: 0, 74 | }, 75 | }, 76 | }, 77 | }); 78 | setLoadPerc(percent * 1); 79 | setLoadPerc(0); 80 | }).catch((err) => { 81 | setExists(false); 82 | setLoadPerc(percent * 0); 83 | toast({ 84 | title: err.response?.data?.error || err.response?.data || err.message, 85 | status: 'error', 86 | isClosable: true, 87 | }); 88 | setError(err); 89 | }); 90 | } 91 | 92 | if (exists) { 93 | deleteRuns(); 94 | return; 95 | } 96 | const errFn = (err) => { 97 | setExists(false); 98 | // noinspection PointlessArithmeticExpressionJS 99 | setLoadPerc(percent * 0); 100 | toast({ 101 | title: err.response?.data?.error || err.response?.data || err.message, 102 | status: 'error', 103 | isClosable: true, 104 | }); 105 | setError(err); 106 | }; 107 | setLoadPerc(percent * 0.05); 108 | Promise.all([ 109 | // Get both DAG Runs and Task Instances locally 110 | axios.get(localRoute(constants.DAG_RUNS_ROUTE), { params: { dag_id: dagId, limit } }), 111 | axios.get(localRoute(constants.TASK_INSTANCE_ROUTE), { params: { dag_id: dagId, limit } }), 112 | ]).then( 113 | axios.spread((dagRunsRes, taskInstanceRes) => { 114 | setLoadPerc(percent * 0.5); 115 | // Then create DAG Runs 116 | axios.post( 117 | proxyUrl(url + constants.DAG_RUNS_ROUTE), 118 | { dag_runs: dagRunsRes.data.dag_runs }, 119 | { params: { dag_id: dagId }, headers: proxyHeaders(token) }, 120 | ).then((dagRunCreateRes) => { 121 | if (dagRunCreateRes.status !== 200) { 122 | errFn({ err: { response: dagRunCreateRes } }); 123 | return; 124 | } 125 | dispatch({ 126 | type: 'set-dags-data', 127 | dagsData: { 128 | [dagId]: { 129 | remote: { 130 | dag_run_count: dagRunCreateRes.data.dag_run_count, 131 | }, 132 | }, 133 | }, 134 | }); 135 | setLoadPerc(percent * 0.75); 136 | // Then create Task Instances 137 | axios.post( 138 | proxyUrl(url + constants.TASK_INSTANCE_ROUTE), 139 | { task_instances: taskInstanceRes.data.task_instances }, 140 | { params: { dag_id: dagId }, headers: proxyHeaders(token) }, 141 | ).then( 142 | (taskInstanceCreateRes) => { 143 | // noinspection PointlessArithmeticExpressionJS 144 | setLoadPerc(percent * 1); 145 | setLoadPerc(0); 146 | setExists(taskInstanceCreateRes.status === 200); 147 | }, 148 | ).catch(errFn); 149 | }).catch(errFn); 150 | }), 151 | ).catch(errFn); 152 | } 153 | 154 | return ( 155 | 156 | 181 | 182 | ); 183 | } 184 | 185 | DAGHistoryMigrateButton.propTypes = { 186 | url: PropTypes.string.isRequired, 187 | token: PropTypes.string.isRequired, 188 | dagId: PropTypes.string.isRequired, 189 | limit: PropTypes.number, 190 | existsInRemote: PropTypes.bool, 191 | isDisabled: PropTypes.oneOfType([PropTypes.bool, PropTypes.string]), 192 | dispatch: PropTypes.func.isRequired, 193 | }; 194 | DAGHistoryMigrateButton.defaultProps = { 195 | limit: 10, 196 | existsInRemote: false, 197 | isDisabled: false, 198 | }; 199 | 200 | export function setDagData(localData, remoteData, key = 'dag_id') { 201 | const output = {}; 202 | localData.forEach((i) => { 203 | const keyValue = i[key]; 204 | if (!(keyValue in output)) output[keyValue] = {}; 205 | output[keyValue].local = i; 206 | }); 207 | remoteData.forEach((i) => { 208 | const keyValue = i[key]; 209 | if (!(keyValue in output)) { 210 | // eslint-disable-next-line no-console 211 | console.log(`Found dag_id=${keyValue} in Remote missing in Local!`); 212 | } else { 213 | output[keyValue].remote = i; 214 | } 215 | }); 216 | return output; 217 | } 218 | 219 | export default function DAGHistoryPage({ state, dispatch }) { 220 | const columnHelper = createColumnHelper(); 221 | const [data, setData] = useState(Object.values(state.dagsData)); 222 | const fetchPageData = () => fetchData( 223 | localRoute(constants.DAGS_ROUTE), 224 | remoteRoute(state.targetUrl, constants.DAGS_ROUTE), 225 | state.token, 226 | () => dispatch({ type: 'set-dags-loading' }), 227 | (res, rRes) => dispatch({ type: 'set-dags-data', dagsData: setDagData(res.data, rRes.data) }), 228 | (err) => dispatch({ type: 'set-dags-error', error: err }), 229 | dispatch, 230 | ); 231 | 232 | function handlePausedClick(url, token, dagId, isPaused, isLocal) { 233 | dispatch({ 234 | type: 'set-dags-data', 235 | dagsData: { [dagId]: { [isLocal ? 'local' : 'remote']: { is_paused_loading: true } } }, 236 | }); 237 | axios 238 | .patch(url, { dag_id: dagId, is_paused: isPaused }, { headers: proxyHeaders(token) }) 239 | .then((res) => { 240 | // update global state 241 | dispatch({ 242 | type: 'set-dags-data', 243 | dagsData: { 244 | [res.data.dag_id]: { 245 | [isLocal ? 'local' : 'remote']: { 246 | is_paused: res.data.is_paused, 247 | is_paused_loading: false, 248 | }, 249 | }, 250 | }, 251 | }); 252 | }) 253 | .catch((err) => err); 254 | } 255 | 256 | useEffect(() => fetchPageData(), []); 257 | useEffect(() => setData(Object.values(state.dagsData)), [state]); 258 | 259 | // noinspection JSUnusedLocalSymbols,JSCheckFunctionSignatures,JSUnresolvedReference 260 | const columns = [ 261 | columnHelper.accessor( 262 | (row) => row.local.dag_id, 263 | { 264 | id: 'dagId', 265 | header: 'ID', 266 | cell: (info) => ( 267 | 268 | 269 | 273 | {info.getValue()} 274 | 275 | 276 | 277 | 278 | {info.row.original.local.tags.map( 279 | (tag) => {tag}, 280 | )} 281 | 282 | 283 | ), 284 | }, 285 | ), 286 | columnHelper.accessor( 287 | (row) => row.local.schedule_interval, 288 | { 289 | id: 'schedule', 290 | header: 'Schedule', 291 | cell: (info) => {info.getValue()}, 292 | }, 293 | ), 294 | columnHelper.accessor( 295 | (row) => row.local.description, 296 | { 297 | id: 'description', 298 | header: 'Description', 299 | cell: (info) => info.getValue(), 300 | }, 301 | ), 302 | columnHelper.accessor( 303 | (row) => row.local.owners, 304 | { 305 | id: 'owners', 306 | header: 'Owners', 307 | cell: (info) => info.getValue(), 308 | }, 309 | ), 310 | columnHelper.display({ 311 | id: 'local_is_paused', 312 | header: ( 313 | <> 314 | Local 315 | {' '} 316 | 322 | 323 | ), 324 | cell: (info) => ( 325 | <> 326 | 327 | 0 ? 'teal' : 'red'} 332 | > 333 | {humanFormat(info.row.original.local.dag_run_count)} 334 | 335 | 336 | handlePausedClick( 342 | localRoute(constants.DAGS_ROUTE), 343 | state.token, 344 | info.row.original.local.dag_id, 345 | !info.row.original.local.is_paused, 346 | true, 347 | )} 348 | /> 349 | 350 | ), 351 | }), 352 | columnHelper.display({ 353 | id: 'migrate', 354 | header: 'Migrate', 355 | // eslint-disable-next-line react/no-unstable-nested-components 356 | cell: (info) => ( 357 | 370 | ), 371 | 372 | }), 373 | columnHelper.display({ 374 | id: 'remote_is_paused', 375 | header: ( 376 | <> 377 | Remote 378 | {' '} 379 | 385 | 386 | ), 387 | cell: (info) => ((info.row.original.remote || false) ? ( 388 | <> 389 | handlePausedClick( 395 | proxyUrl(state.targetUrl + constants.DAGS_ROUTE), 396 | state.token, 397 | info.row.original.local.dag_id, 398 | !info.row.original.remote?.is_paused, 399 | false, 400 | )} 401 | /> 402 | 403 | 0 ? 'teal' : 'red'} 408 | > 409 | {humanFormat(info.row.original.remote.dag_run_count)} 410 | 411 | 412 | 417 | ( 418 | 419 | ) 420 | 421 | 422 | ) : null), 423 | }), 424 | ]; 425 | return ( 426 | 429 | 430 | DAGs and Task History can be migrated to prevent 431 | Airflow from rescheduling existing runs. 432 | DAGs can be paused or un-paused on either Airflow instance. 433 | 434 | 435 | 436 | 437 | 438 | # DAG Runs 439 | dispatch({ type: 'set-limit', limit: Number(e) })} 442 | > 443 | 444 | 445 | 446 | 447 | 448 | 449 | 450 | 451 | 452 | 453 | 454 | )} 455 | loading={state.dagsLoading} 456 | data={data} 457 | columns={columns} 458 | error={state.dagsError} 459 | resetFn={fetchPageData} 460 | /> 461 | ); 462 | } 463 | DAGHistoryPage.propTypes = { 464 | // eslint-disable-next-line react/forbid-prop-types 465 | state: PropTypes.object.isRequired, 466 | dispatch: PropTypes.func.isRequired, 467 | }; 468 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/EnvVarsPage.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import { createColumnHelper } from '@tanstack/react-table'; 3 | import { Button, HStack, Spacer, Text, useToast, } from '@chakra-ui/react'; 4 | import PropTypes from 'prop-types'; 5 | import axios from 'axios'; 6 | import { MdErrorOutline } from 'react-icons/md'; 7 | import { FaCheck } from 'react-icons/fa'; 8 | import { GoUpload } from 'react-icons/go'; 9 | import { RepeatIcon } from '@chakra-ui/icons'; 10 | import StarshipPage from '../component/StarshipPage'; 11 | import { 12 | fetchData, 13 | getAstroEnvVarRoute, 14 | getHoustonRoute, 15 | localRoute, 16 | proxyHeaders, 17 | proxyUrl, 18 | remoteRoute, 19 | } from '../util'; 20 | import constants, { getDeploymentsQuery, updateDeploymentVariablesMutation } from '../constants'; 21 | import HiddenValue from "../component/HiddenValue.jsx"; 22 | 23 | 24 | function EnvVarMigrateButton({ 25 | route, headers, existsInRemote, sendData, isAstro, deploymentId, releaseName 26 | }) { 27 | const [loading, setLoading] = useState(false); 28 | const [error, setError] = useState(null); 29 | const toast = useToast(); 30 | const [exists, setExists] = useState(existsInRemote); 31 | 32 | const errFn = (err) => { 33 | setExists(false); 34 | setLoading(false); 35 | toast({ 36 | title: err.response?.data?.error || err.response?.data || err.message, 37 | status: 'error', 38 | isClosable: true, 39 | }); 40 | setError(err); 41 | } 42 | 43 | function handleSoftwareClick() { 44 | // POST https://houston.BASEDOMAIN/v1 45 | setLoading(true); 46 | axios.post( 47 | route, 48 | { 49 | operationName: "deploymentVariables", 50 | query: getDeploymentsQuery, 51 | variables: { 52 | "deploymentUuid": deploymentId, 53 | "releaseName": releaseName, 54 | } 55 | }, 56 | { headers } 57 | ) 58 | .then((res) => { 59 | let variables = res.data?.data?.deploymentVariables || []; 60 | // TODO - DEDUPE? Check if key already exists and reject 61 | variables.push(sendData); 62 | axios.post( 63 | route, 64 | { 65 | operationName: "UpdateDeploymentVariables", 66 | query: updateDeploymentVariablesMutation, 67 | variables: { 68 | "deploymentUuid": deploymentId, 69 | "releaseName": releaseName, 70 | "environmentVariables": variables, 71 | } 72 | }, 73 | { headers } 74 | ) 75 | .then((res) => { 76 | setLoading(false); 77 | setExists(res.status === 200); 78 | }) 79 | .catch(errFn); 80 | }) 81 | .catch(errFn); 82 | } 83 | 84 | function handleAstroClick() { 85 | setLoading(true); 86 | // GET/POST https://api.astronomer.io/platform/v1beta1/organizations/:organizationId/deployments/:deploymentId 87 | axios.get(route, { headers }) 88 | .then((res) => { 89 | // TODO - DEDUPE? Check if key already exists and reject 90 | res.data?.environmentVariables.push(sendData); 91 | axios.post(route, res.data, { headers }) 92 | .then((res) => { 93 | setLoading(false); 94 | setExists(res.status === 200); 95 | }) 96 | .catch(errFn); 97 | }) 98 | .catch(errFn); 99 | } 100 | return ( 101 | 116 | ); 117 | } 118 | 119 | EnvVarMigrateButton.propTypes = { 120 | route: PropTypes.string.isRequired, 121 | headers: PropTypes.objectOf(PropTypes.string), 122 | existsInRemote: PropTypes.bool, 123 | isAstro: PropTypes.bool.isRequired, 124 | // eslint-disable-next-line react/forbid-prop-types 125 | sendData: PropTypes.object.isRequired, 126 | deploymentId: PropTypes.string, 127 | releaseName: PropTypes.string, 128 | }; 129 | EnvVarMigrateButton.defaultProps = { 130 | headers: {}, 131 | existsInRemote: false, 132 | deploymentId: null, 133 | releaseName: null, 134 | }; 135 | 136 | const columnHelper = createColumnHelper(); 137 | const valueColumn = columnHelper.accessor('value', { 138 | id: 'value', cell: (props) => , 139 | }); 140 | 141 | function setEnvData(localData, remoteData) { 142 | return Object.entries(localData).map( 143 | ([key, value]) => ({ key, value }), 144 | ).map((d) => ({ 145 | ...d, 146 | exists: d.key in remoteData, 147 | })); 148 | } 149 | 150 | export default function EnvVarsPage({ state, dispatch }) { 151 | const [data, setData] = useState(setEnvData(state.envLocalData, state.envRemoteData)); 152 | const fetchPageData = () => fetchData( 153 | localRoute(constants.ENV_VAR_ROUTE), 154 | remoteRoute(state.targetUrl, constants.ENV_VAR_ROUTE), 155 | state.token, 156 | () => dispatch({ type: 'set-env-loading' }), 157 | (res, rRes) => dispatch({ 158 | type: 'set-env-data', envLocalData: res.data, envRemoteData: rRes.data, 159 | }), 160 | (err) => dispatch({ type: 'set-env-error', error: err }), 161 | ); 162 | useEffect(() => fetchPageData(), []); 163 | useEffect( 164 | () => { 165 | setData(setEnvData(state.envLocalData, state.envRemoteData)) 166 | }, 167 | [state], 168 | ); 169 | // 170 | 171 | // noinspection JSCheckFunctionSignatures 172 | const columns = [ 173 | columnHelper.accessor('key'), 174 | valueColumn, 175 | columnHelper.display({ 176 | id: 'migrate', 177 | header: 'Migrate', 178 | // eslint-disable-next-line react/no-unstable-nested-components 179 | cell: (info) => ( 180 | 197 | ), 198 | }), 199 | ]; 200 | 201 | return ( 202 | 205 | 206 | Environment Variables can be used to set Airflow Configurations, Connections, 207 | Variables, or as values directly accessed from a DAG or Task. 208 | 209 | 210 | 211 | 212 | )} 213 | loading={state.envLoading} 214 | data={data} 215 | columns={columns} 216 | error={state.envError} 217 | resetFn={fetchPageData} 218 | /> 219 | ); 220 | } 221 | EnvVarsPage.propTypes = { 222 | // eslint-disable-next-line react/forbid-prop-types 223 | state: PropTypes.object.isRequired, 224 | dispatch: PropTypes.func.isRequired, 225 | }; 226 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/PoolsPage.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import { 3 | Button, HStack, Spacer, Text, 4 | } from '@chakra-ui/react'; 5 | import { createColumnHelper } from '@tanstack/react-table'; 6 | import PropTypes from 'prop-types'; 7 | import { RepeatIcon } from '@chakra-ui/icons'; 8 | import MigrateButton from '../component/MigrateButton'; 9 | import StarshipPage from '../component/StarshipPage'; 10 | import { 11 | fetchData, localRoute, objectWithoutKey, proxyHeaders, proxyUrl, remoteRoute, 12 | } from '../util'; 13 | import constants from '../constants'; 14 | 15 | const columnHelper = createColumnHelper(); 16 | 17 | function setPoolsData(localData, remoteData) { 18 | return localData.map( 19 | (d) => ({ 20 | ...d, 21 | exists: remoteData.map( 22 | // eslint-disable-next-line camelcase 23 | ({ name }) => name, 24 | ).includes(d.name), 25 | }), 26 | ); 27 | } 28 | 29 | export default function PoolsPage({ state, dispatch }) { 30 | const [data, setData] = useState(setPoolsData(state.poolsLocalData, state.poolsRemoteData)); 31 | const fetchPageData = () => fetchData( 32 | localRoute(constants.POOL_ROUTE), 33 | remoteRoute(state.targetUrl, constants.POOL_ROUTE), 34 | state.token, 35 | () => dispatch({ type: 'set-pools-loading' }), 36 | (res, rRes) => dispatch({ 37 | type: 'set-pools-data', poolsLocalData: res.data, poolsRemoteData: rRes.data, 38 | }), 39 | (err) => dispatch({ type: 'set-pools-error', error: err }), 40 | ); 41 | useEffect(() => fetchPageData(), []); 42 | useEffect( 43 | () => setData(setPoolsData(state.poolsLocalData, state.poolsRemoteData)), 44 | [state], 45 | ); 46 | 47 | // noinspection JSCheckFunctionSignatures 48 | const columns = [ 49 | columnHelper.accessor('name'), 50 | columnHelper.accessor('slots'), 51 | columnHelper.accessor('description'), 52 | columnHelper.display({ 53 | id: 'migrate', 54 | header: 'Migrate', 55 | // eslint-disable-next-line react/no-unstable-nested-components 56 | cell: (info) => ( 57 | 63 | ), 64 | }), 65 | ]; 66 | 67 | return ( 68 | 71 | 72 | Pools are used to limit the number of concurrent tasks of a certain type that 73 | are running. 74 | 75 | 76 | 77 | 78 | )} 79 | loading={state.poolsLoading} 80 | data={data} 81 | columns={columns} 82 | error={state.poolsError} 83 | resetFn={fetchPageData} 84 | /> 85 | ); 86 | } 87 | PoolsPage.propTypes = { 88 | // eslint-disable-next-line react/forbid-prop-types 89 | state: PropTypes.object.isRequired, 90 | dispatch: PropTypes.func.isRequired, 91 | }; 92 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/SetupPage.jsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, 3 | Button, 4 | Divider, 5 | Fade, 6 | FormControl, 7 | FormErrorMessage, 8 | FormHelperText, 9 | FormLabel, 10 | HStack, 11 | Input, 12 | InputGroup, 13 | InputLeftAddon, 14 | InputRightAddon, 15 | InputRightElement, 16 | Link, 17 | SlideFade, 18 | Spacer, 19 | Switch, 20 | Text, 21 | VStack, 22 | } from '@chakra-ui/react'; 23 | import React, { useEffect } from 'react'; 24 | import PropTypes from 'prop-types'; 25 | import { CheckIcon, ExternalLinkIcon, RepeatIcon, } from '@chakra-ui/icons'; 26 | import { IoTelescopeOutline } from 'react-icons/io5'; 27 | import { NavLink } from 'react-router-dom'; 28 | import { getHoustonRoute, getTargetUrlFromParts, proxyHeaders, proxyUrl, tokenUrlFromAirflowUrl } from '../util'; 29 | import ValidatedUrlCheckbox from '../component/ValidatedUrlCheckbox'; 30 | import axios from "axios"; 31 | import { getWorkspaceDeploymentsQuery } from "../constants.js"; 32 | 33 | export default function SetupPage({ state, dispatch }) { 34 | // Get the workspace ID & etc. if it's software and setup is completed 35 | useEffect( 36 | () => { 37 | if ( 38 | state.isSetupComplete && // setup is completed 39 | !state.isAstro && // it's Software 40 | !(state.releaseName && state.workspaceId && state.deploymentId) // one or more of three isn't set 41 | ){ 42 | axios.post( 43 | proxyUrl(getHoustonRoute(state.urlOrgPart)), 44 | { 45 | operationName: "workspaces", 46 | query: getWorkspaceDeploymentsQuery, 47 | variables: {} 48 | }, 49 | { 50 | headers: proxyHeaders(state.token) 51 | } 52 | ) 53 | .then((res) => { 54 | let found = false; 55 | for (let workspace of res.data?.data?.workspaces) { 56 | if (found) break; 57 | for (let deployment of workspace.deployments) { 58 | if (found) break; 59 | if (deployment.releaseName === state.urlDeploymentPart) { 60 | dispatch({ 61 | type: 'set-software-info', 62 | deploymentId: deployment.id, 63 | releaseName: deployment.releaseName, 64 | workspaceId: workspace.id 65 | }); 66 | } 67 | } 68 | } 69 | res.data?.data?.workspaces 70 | }) 71 | .catch((err) => {}); 72 | } 73 | }, 74 | [state], 75 | ); 76 | 77 | return ( 78 | 79 | 80 | Starship is a utility to migrate Airflow metadata between instances 81 | 82 | 90 | 97 | 98 | 99 | 100 | 101 | 102 | {/* ==== PRODUCT SELECTOR ==== */} 103 | 104 | 105 | 106 | 107 | Astronomer Product 108 | 109 | 110 | Software 111 | dispatch({ type: 'toggle-is-astro' })} 117 | /> 118 | Astro 119 | 120 | 121 | 127 | 128 | 129 | 130 | The Astronomer Product you are 131 | migrating to. 132 | 133 | 134 | 135 | {/* ==== URL INPUT ==== */} 136 | 137 | 138 | Airflow URL 139 | {state.isAstro ? ( 140 | // Astro URL Template: https://claaabbbcccddd.astronomer.run/aabbccdd/ 141 | 142 | https:// 143 | dispatch({ 151 | type: 'set-url', 152 | targetUrl: getTargetUrlFromParts( 153 | e.target.value, 154 | state.urlDeploymentPart, 155 | state.isAstro, 156 | ), 157 | urlDeploymentPart: state.urlDeploymentPart, 158 | urlOrgPart: e.target.value, 159 | })} 160 | /> 161 | .astronomer.run/ 162 | dispatch({ 169 | type: 'set-url', 170 | targetUrl: getTargetUrlFromParts( 171 | state.urlOrgPart, 172 | e.target.value, 173 | state.isAstro, 174 | ), 175 | urlOrgPart: state.urlOrgPart, 176 | urlDeploymentPart: e.target.value, 177 | })} 178 | /> 179 | /home 180 | 181 | ) : ( 182 | // Software URL Template: https://deployments.basedomain.com/space-name-1234/airflow/home 183 | 184 | https://deployments. 185 | dispatch({ 192 | type: 'set-url', 193 | targetUrl: getTargetUrlFromParts( 194 | e.target.value, 195 | state.urlDeploymentPart, 196 | state.isAstro, 197 | ), 198 | urlOrgPart: e.target.value, 199 | urlDeploymentPart: state.urlDeploymentPart, 200 | })} 201 | /> 202 | / 203 | dispatch({ 210 | type: 'set-url', 211 | targetUrl: getTargetUrlFromParts( 212 | state.urlOrgPart, 213 | e.target.value, 214 | state.isAstro, 215 | ), 216 | urlOrgPart: state.urlOrgPart, 217 | urlDeploymentPart: e.target.value, 218 | })} 219 | /> 220 | /airflow/home 221 | 222 | )} 223 | 224 | Enter the URL of the Airflow you are migrating to. 225 | 226 | Please fill both parts. 227 | 228 | 229 | 230 | {/* ==== TOKEN INPUT ==== */} 231 | 232 | 233 | Token 234 | 235 | dispatch({ type: 'set-token', token: e.target.value })} 241 | /> 242 | {state.isTokenTouched && state.token ? ( 243 | 244 | 245 | 246 | ) : null} 247 | 248 | {state.isAstro ? ( 249 | 250 | Provide a token: 251 | {' '} 252 | 256 | Organization 257 | 258 | 259 | , 260 | {' '} 261 | 265 | Workspace 266 | 267 | 268 | , 269 | {' '} 270 | 274 | Personal 275 | 276 | 277 | . 278 | 279 | ) : ( 280 | 281 | Provide a token: 282 | {' '} 283 | 287 | Workspace 288 | 289 | 290 | , 291 | {' '} 292 | 296 | Deployment 297 | 298 | 299 | {state.targetUrl.startsWith('https://') && state.isValidUrl ? ( 300 | <> 301 | , 302 | {' '} 303 | 307 | Personal 308 | 309 | 310 | 311 | 312 | ) : null} 313 | . 314 | 315 | )} 316 | Please input a token. 317 | 318 | 319 | 320 | {/* ==== CHECK AIRFLOW AND STARSHIP ==== */} 321 | 322 | 323 | 324 | Target Airflow 325 | 326 | 327 | {state.targetUrl.startsWith('http') && state.token && state.isValidUrl && state.isProductSelected 328 | ? ( 329 | 330 | dispatch({ type: 'set-is-airflow', isAirflow: value })} 336 | url={`${state.targetUrl}/api/v1/health`} 337 | token={state.token} 338 | /> 339 | dispatch({ type: 'set-is-starship', isStarship: value })} 345 | url={`${state.targetUrl}/api/starship/health`} 346 | token={state.token} 347 | /> 348 | 349 | ) : null} 350 | 351 | 352 | 353 | 354 | 355 | 356 | ); 357 | } 358 | // eslint-disable-next-line react/forbid-prop-types 359 | SetupPage.propTypes = { state: PropTypes.object.isRequired, dispatch: PropTypes.func.isRequired }; 360 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/TelescopePage.jsx: -------------------------------------------------------------------------------- 1 | import { 2 | Box, Button, 3 | Divider, FormControl, 4 | FormErrorMessage, 5 | FormHelperText, 6 | FormLabel, Link, 7 | Input, InputGroup, 8 | Text, Tooltip, VStack, CircularProgress, useToast, 9 | } from '@chakra-ui/react'; 10 | import React, { useEffect, useMemo, useState } from 'react'; 11 | import { GoDownload, GoUpload } from 'react-icons/go'; 12 | import axios from "axios"; 13 | import constants from "../constants.js"; 14 | import { localRoute } from "../util.js"; 15 | import { FaCheck } from "react-icons/fa"; 16 | 17 | 18 | export default function TelescopePage({ state, dispatch }) { 19 | const [isUploading, setIsUploading] = useState(false); 20 | const [isUploadComplete, setIsUploadComplete] = useState(false); 21 | const toast = useToast(); 22 | const [route, setRoute] = useState(''); 23 | const [filename, setFilename] = useState(''); 24 | const [error, setError] = useState(null); 25 | useEffect(() => { 26 | const _route = localRoute( 27 | constants.TELESCOPE_ROUTE + 28 | ( 29 | state.telescopeOrganizationId ? ( 30 | `?organization=${state.telescopeOrganizationId}` + 31 | (state.telescopePresignedUrl ? `&presigned_url=${encodeURIComponent(state.telescopePresignedUrl)}` : '') 32 | ) : '' 33 | ) 34 | ) 35 | setRoute(_route); 36 | const _filename = `${state.telescopeOrganizationId}.${(new Date()).toISOString().slice(0,10)}.data.json` 37 | setFilename(_filename); 38 | }, [state]); 39 | return ( 40 | 41 | 42 | Telescope is a tool for gathering metadata from an Airflow instance which can be processed to collect insights. 43 | 44 | 45 | 46 | 47 | 48 | 49 | Organization 50 | 51 | 52 | dispatch({ 58 | type: 'set-telescope-org', 59 | telescopeOrganizationId: e.target.value, 60 | })} 61 | /> 62 | 63 | 64 | Organization name 65 | 66 | 67 | 68 | 69 | Pre-signed URL 70 | 71 | 72 | dispatch({ 78 | type: 'set-telescope-presigned-url', 79 | telescopePresignedUrl: e.target.value, 80 | })} 81 | /> 82 | 83 | 84 | (Optional) Enter a pre-signed URL to submit the report, 85 | or contact an Astronomer Representative to receive one. 86 | 87 | Please fill both parts. 88 | 89 | 90 | 129 | 130 | 131 | 142 | 143 | 144 | 145 | 146 | ); 147 | } 148 | -------------------------------------------------------------------------------- /astronomer_starship/src/pages/VariablesPage.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import { createColumnHelper } from '@tanstack/react-table'; 3 | import { 4 | Button, HStack, Spacer, Text, 5 | } from '@chakra-ui/react'; 6 | import PropTypes from 'prop-types'; 7 | import { RepeatIcon } from '@chakra-ui/icons'; 8 | import MigrateButton from '../component/MigrateButton'; 9 | import StarshipPage from '../component/StarshipPage'; 10 | import { 11 | fetchData, localRoute, objectWithoutKey, proxyHeaders, proxyUrl, remoteRoute, 12 | } from '../util'; 13 | import constants from '../constants'; 14 | 15 | const columnHelper = createColumnHelper(); 16 | 17 | function setVariablesData(localData, remoteData) { 18 | return localData.map( 19 | (d) => ({ 20 | ...d, 21 | exists: remoteData.map( 22 | ({ key }) => key, 23 | ).includes(d.key), 24 | }), 25 | ); 26 | } 27 | 28 | export default function VariablesPage({ state, dispatch }) { 29 | const [data, setData] = useState( 30 | setVariablesData(state.variablesLocalData, state.variablesRemoteData), 31 | ); 32 | const fetchPageData = () => fetchData( 33 | localRoute(constants.VARIABLES_ROUTE), 34 | remoteRoute(state.targetUrl, constants.VARIABLES_ROUTE), 35 | state.token, 36 | () => dispatch({ type: 'set-variables-loading' }), 37 | (res, rRes) => dispatch({ 38 | type: 'set-variables-data', variablesLocalData: res.data, variablesRemoteData: rRes.data, 39 | }), 40 | (err) => dispatch({ type: 'set-variables-error', error: err }), 41 | ); 42 | useEffect(() => fetchPageData(), []); 43 | useEffect( 44 | () => setData(setVariablesData(state.variablesLocalData, state.variablesRemoteData)), 45 | [state], 46 | ); 47 | 48 | // noinspection JSCheckFunctionSignatures 49 | const columns = [ 50 | columnHelper.accessor('key'), 51 | columnHelper.accessor('val'), 52 | // columnHelper.accessor('exists'), 53 | columnHelper.display({ 54 | id: 'migrate', 55 | header: 'Migrate', 56 | // eslint-disable-next-line react/no-unstable-nested-components 57 | cell: (info) => ( 58 | 64 | ), 65 | }), 66 | ]; 67 | return ( 68 | 71 | 72 | Variables are a generic way to store and retrieve arbitrary content or settings 73 | as a simple key value store within Airflow. 74 | Variables can be defined via multiple mechanisms, 75 | Starship only migrates values stored via the Airflow UI. 76 | 77 | 78 | 79 | 80 | )} 81 | loading={state.variablesLoading} 82 | data={data} 83 | columns={columns} 84 | error={state.error} 85 | resetFn={fetchPageData} 86 | /> 87 | ); 88 | } 89 | VariablesPage.propTypes = { 90 | // eslint-disable-next-line react/forbid-prop-types 91 | state: PropTypes.object.isRequired, 92 | dispatch: PropTypes.func.isRequired, 93 | }; 94 | -------------------------------------------------------------------------------- /astronomer_starship/src/util.js: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | 3 | /** 4 | * Returns the PAT URL for a given Airflow URL 5 | * for Astro that's https://cloud.astronomer.io/token 6 | * for Software (like https://deployments.basedomain.com/airflow/...) it's https://basedomain.com/token 7 | * @param targetUrl 8 | * @returns {string} 9 | * 10 | * > tokenUrlFromAirflowUrl('https://cloud.astronomer.io/...') 11 | * 'https://cloud.astronomer.io/token' 12 | */ 13 | export function tokenUrlFromAirflowUrl(targetUrl) { 14 | // Software 15 | if (!targetUrl.includes('astronomer.run')) { 16 | const urlBody = targetUrl.split('://')[1]; 17 | if (urlBody) { 18 | const url = urlBody.split('/', 1)[0] || urlBody; 19 | const basedomain = url.split('deployments.', 2)[1] || url; 20 | return `https://${basedomain}/token`; 21 | } 22 | } 23 | // Astro 24 | return 'https://cloud.astronomer.io/token'; 25 | } 26 | 27 | /** 28 | * Returns the target URL from the URL parts for Astro or Software 29 | * @param urlOrgPart - the org part for astro, or basedomain for software 30 | * @param urlDeploymentPart - the deployment hash for astro or space-name-1234 for software 31 | * @param isAstro - bool for whether it's astro 32 | * @returns {string} - url 33 | */ 34 | export function getTargetUrlFromParts(urlOrgPart, urlDeploymentPart, isAstro) { 35 | return isAstro 36 | ? `https://${urlOrgPart}.astronomer.run/${urlDeploymentPart}` 37 | : `https://deployments.${urlOrgPart}/${urlDeploymentPart}/airflow`; 38 | } 39 | 40 | /** 41 | * Returns the local URL for a given route by splitting at 'starship 42 | * @param route 43 | @returns {string} 44 | */ 45 | export function localRoute(route) { 46 | const localUrl = window.location.href.split('/starship', 1)[0]; 47 | return localUrl + route; 48 | } 49 | 50 | /** 51 | * Returns the remote URL for a given route by combining the target URL and route 52 | * @param targetUrl 53 | * @param route 54 | @returns {string} 55 | */ 56 | export function remoteRoute(targetUrl, route) { 57 | return targetUrl + route; 58 | } 59 | 60 | /** 61 | * Returns the local proxy URL for a given URL (to avoid CORS issues) 62 | * @param url 63 | * @returns {string} 64 | */ 65 | export function proxyUrl(url) { 66 | return localRoute(`/starship/proxy?url=${encodeURIComponent(url)}`); 67 | } 68 | /** 69 | * Returns the headers for the proxy (to avoid CORS issues) 70 | * @param token 71 | * @returns {{STARSHIP_PROXY_TOKEN}} 72 | */ 73 | export function proxyHeaders(token) { 74 | return { 75 | 'Starship-Proxy-Token': token, 76 | }; 77 | } 78 | 79 | /** 80 | * Fetches data from both the local and remote endpoints 81 | * @param localRouteUrl 82 | * @param remoteRouteUrl 83 | * @param token 84 | * @param loadingDispatch - a dispatch route to call to set the loading variable 85 | * @param dataDispatch - dispatch route to call to set the data variables 86 | * @param errorDispatch - dispatch route to call to set the error variable 87 | */ 88 | export function fetchData( 89 | localRouteUrl, 90 | remoteRouteUrl, 91 | token, 92 | loadingDispatch, 93 | dataDispatch, 94 | errorDispatch, 95 | ) { 96 | if (loadingDispatch) { 97 | loadingDispatch(); 98 | } 99 | axios 100 | .get(localRouteUrl) 101 | .then((res) => { 102 | axios 103 | .get(proxyUrl(remoteRouteUrl), { headers: proxyHeaders(token) }) 104 | .then((rRes) => { 105 | if ( 106 | res.status === 200 && res.headers['content-type'] === 'application/json' && 107 | rRes.status === 200 && res.headers['content-type'] === 'application/json' 108 | ){ 109 | dataDispatch(res, rRes) 110 | } else { 111 | errorDispatch('Invalid response'); 112 | } 113 | }) // , dispatch)) 114 | .catch((err) => errorDispatch(err)); // , dispatch)); 115 | }) 116 | .catch((err) => errorDispatch(err)); // , dispatch)); 117 | } 118 | 119 | export function objectWithoutKey(object, key) { 120 | const { [key]: _, ...otherKeys } = object; 121 | return otherKeys; 122 | } 123 | 124 | /** 125 | * Constructs and returns the URL for the Astro Deployment Environment Variable API route 126 | * 127 | * @param {string} organizationId 128 | * @param {string} deploymentId 129 | * @returns {string} - The URL for the Astro Environment Variable service. 130 | */ 131 | export function getAstroEnvVarRoute(organizationId, deploymentId) { 132 | return `https://api.astronomer.io/platform/v1beta1/organizations/${organizationId}/deployments/${deploymentId}`; 133 | } 134 | 135 | /** 136 | * Constructs and returns the URL for a Houston API 137 | * 138 | * @param {string} basedomain 139 | * @returns {string} - The URL for the Houston service. 140 | */ 141 | export function getHoustonRoute(basedomain) { 142 | return `https://houston.${basedomain}/v1/`; 143 | } 144 | -------------------------------------------------------------------------------- /astronomer_starship/starship.py: -------------------------------------------------------------------------------- 1 | import os 2 | import requests 3 | from airflow.plugins_manager import AirflowPlugin 4 | from airflow.www.app import csrf 5 | from flask import Blueprint, request, Response 6 | from flask_appbuilder import BaseView 7 | from flask_appbuilder import expose 8 | 9 | # INCOMPATIBILITY: Airflow 1.10.15 10 | # File "/home/airflow/.local/lib/python3.6/site-packages/astronomer_starship/starship.py", line 9, in 11 | # from airflow.security import permissions 12 | # ImportError: cannot import name 'permissions' 13 | from airflow.security import permissions 14 | from airflow.www import auth 15 | 16 | ALLOWED_PROXY_METHODS = ["GET", "POST", "PATCH", "DELETE"] 17 | 18 | 19 | class Starship(BaseView): 20 | default_view = "main" 21 | 22 | @expose("/") 23 | @auth.has_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)]) 24 | def main(self): 25 | """Main view - just bootstraps the React app.""" 26 | return self.render_template("index.html") 27 | 28 | @expose("/proxy", methods=ALLOWED_PROXY_METHODS) 29 | @csrf.exempt 30 | @auth.has_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG)]) 31 | def proxy(self): 32 | """Proxy for the React app to use to access the Airflow API.""" 33 | request_method = request.method 34 | if request_method not in ALLOWED_PROXY_METHODS: 35 | return Response( 36 | "Method not in " + ", ".join(ALLOWED_PROXY_METHODS), status=405 37 | ) 38 | 39 | request_headers = dict(request.headers) 40 | token = ( 41 | request_headers.get("Starship-Proxy-Token") 42 | or request_headers.get("STARSHIP-PROXY-TOKEN") 43 | or request_headers.get("starship-proxy-token") 44 | or request_headers.get("Starship_Proxy_Token") 45 | or request_headers.get("STARSHIP_PROXY_TOKEN") 46 | or request_headers.get("starship_proxy_token") 47 | ) 48 | if not token: 49 | return Response("No Token Provided", status=400) 50 | request_headers = dict( 51 | Authorization=f"Bearer {token}", 52 | **({"Content-type": "application/json"} if request.data else {}), 53 | ) 54 | 55 | url = request.args.get("url") 56 | if not url: 57 | return Response("No URL Provided", status=400) 58 | 59 | response = requests.request( 60 | request_method, 61 | url, 62 | headers=request_headers, 63 | params=request.args if request.args else None, 64 | **({"data": request.data} if request.data else {}), 65 | ) 66 | response_headers = dict(response.headers) 67 | if not response.ok: 68 | print(response.content) 69 | 70 | if os.getenv("DEBUG", False): 71 | print( 72 | f"url={url}\n" 73 | f"request_method={request_method}\n" 74 | f"request_headers={request_headers}\n" 75 | f"request.data={request.data}\n" 76 | "===========" 77 | f"response_headers={response_headers}\n" 78 | f"response.status_code={response.status_code}\n" 79 | f"response.content={response.content}\n" 80 | ) 81 | response_headers["Starship-Proxy-Status"] = "OK" 82 | return Response( 83 | response.content, status=response.status_code, headers=response_headers 84 | ) 85 | 86 | 87 | starship_view = Starship() 88 | starship_bp = Blueprint( 89 | "starship", 90 | __name__, 91 | static_folder="static", # should be default, just being explicit 92 | template_folder="templates", # should be default, just being explicit 93 | static_url_path="/starship/static", # so static/foo.html is at /starship/static/foo.html 94 | ) 95 | 96 | 97 | class StarshipPlugin(AirflowPlugin): 98 | name = "starship" 99 | flask_blueprints = [starship_bp] 100 | appbuilder_views = [ 101 | { 102 | "name": "Migration Tool 🚀 Starship", 103 | "category": "Astronomer", 104 | "view": starship_view, 105 | } 106 | ] 107 | -------------------------------------------------------------------------------- /astronomer_starship/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends base_template %} 2 | 3 | {% block head_js %} 4 | {{ super() }} 5 | {##} 9 | 10 | {% endblock %} 11 | 12 | {% block head_css %} 13 | {{ super() }} 14 | 15 | {% endblock %} 16 | 17 | {% block page_title %} 18 | Starship Migration Plugin 19 | {% endblock %} 20 | 21 | {% block content %} 22 |
23 | {##} 24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /astronomer_starship/tests/src/pages/DAGHistoryPage.test.js: -------------------------------------------------------------------------------- 1 | // eslint-disable-next-line import/no-extraneous-dependencies 2 | import { expect, test } from 'vitest'; 3 | import { setDagData } from '../../../src/pages/DAGHistoryPage'; 4 | 5 | test('setDagData creates {: {local: {...}, remote: {...}}', () => { 6 | const local = [{ dag_id: 'foo', other: 2 }]; 7 | const remote = [{ dag_id: 'foo', other: 2 }]; 8 | const expected = { 9 | foo: { 10 | local: { dag_id: 'foo', other: 2 }, 11 | remote: { dag_id: 'foo', other: 2 }, 12 | }, 13 | }; 14 | expect(setDagData(local, remote)).toStrictEqual(expected); 15 | }); 16 | -------------------------------------------------------------------------------- /astronomer_starship/vite.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-extraneous-dependencies */ 2 | /// 3 | import { defineConfig } from 'vite'; 4 | import react from '@vitejs/plugin-react'; 5 | 6 | export default defineConfig({ 7 | plugins: [react()], 8 | base: '/starship/static', 9 | build: { 10 | outDir: 'static', 11 | emptyOutDir: true, 12 | // Write all the files without a hash, which prevents cache-busting, 13 | // but means we don't need to modify the template index.html 14 | rollupOptions: { 15 | output: { 16 | entryFileNames: 'assets/[name].js', 17 | chunkFileNames: 'assets/[name].js', 18 | assetFileNames: 'assets/[name].[ext]', 19 | }, 20 | }, 21 | }, 22 | // proxies calls to a running `astro dev` project 23 | server: { 24 | proxy: { 25 | // string shorthand: http://localhost:5173/foo -> http://localhost:4567/foo 26 | '/api': 'http://localhost:8080', 27 | '/starship/proxy': 'http://localhost:8080', 28 | }, 29 | }, 30 | }); 31 | -------------------------------------------------------------------------------- /docs/api.md: -------------------------------------------------------------------------------- 1 | # API 2 | 3 | ## Error Responses 4 | In the event of an error, the API will return a JSON response with an `error` key 5 | and an HTTP `status_code`. The `error` key will contain a message describing the error. 6 | 7 | | **Type** | **Status Code** | **Response Example** | 8 | |-----------------------------------|-----------------|---------------------------------------------------------------------------------------------| 9 | | **Request kwargs - RuntimeError** | 400 | ```{"error": "..."}``` | 10 | | **Request kwargs - Exception** | 500 | ```{"error": "Unknown Error in kwargs_fn - ..."}``` | 11 | | **Unknown Error** | 500 | ```{"error": "Unknown Error", "error_type": ..., "error_message": ..., "kwargs": ...}``` | 12 | | **`POST` Integrity Error** | 409 | ```{"error": "Integrity Error (Duplicate Record?)", "error_message": ..., "kwargs": ...}``` | 13 | | **`POST` Data Error** | 400 | ```{"error": "Data Error", "error_message": ..., "kwargs": ...}``` | 14 | | **`POST` SQL Error** | 400 | ```{"error": "SQL Error", "error_message": ..., "kwargs": ...}``` | 15 | 16 | 17 | ## Airflow Version 18 | ::: astronomer_starship.starship_api.StarshipApi.airflow_version 19 | options: 20 | show_root_toc_entry: false 21 | show_root_heading: false 22 | show_source: false 23 | show_header: false 24 | 25 | ## Health 26 | ::: astronomer_starship.starship_api.StarshipApi.health 27 | options: 28 | show_root_toc_entry: false 29 | show_root_heading: false 30 | show_source: false 31 | show_header: false 32 | 33 | ## Environment Variables 34 | ::: astronomer_starship.starship_api.StarshipApi.env_vars 35 | options: 36 | show_root_toc_entry: false 37 | show_root_heading: false 38 | show_source: false 39 | show_header: false 40 | 41 | 42 | ## Variable 43 | ::: astronomer_starship.starship_api.StarshipApi.variables 44 | options: 45 | show_root_toc_entry: false 46 | show_root_heading: false 47 | show_source: false 48 | show_header: false 49 | 50 | ## Pools 51 | ::: astronomer_starship.starship_api.StarshipApi.pools 52 | options: 53 | show_root_toc_entry: false 54 | show_root_heading: false 55 | show_source: false 56 | show_header: false 57 | 58 | ## Connections 59 | ::: astronomer_starship.starship_api.StarshipApi.connections 60 | options: 61 | show_root_toc_entry: false 62 | show_root_heading: false 63 | show_source: false 64 | show_header: false 65 | 66 | ## DAGs 67 | ::: astronomer_starship.starship_api.StarshipApi.dags 68 | options: 69 | show_root_toc_entry: false 70 | show_root_heading: false 71 | show_source: false 72 | show_header: false 73 | 74 | ## DAG Runs 75 | ::: astronomer_starship.starship_api.StarshipApi.dag_runs 76 | options: 77 | show_root_toc_entry: false 78 | show_root_heading: false 79 | show_source: false 80 | show_header: false 81 | 82 | ## Task Instances 83 | ::: astronomer_starship.starship_api.StarshipApi.task_instances 84 | options: 85 | show_root_toc_entry: false 86 | show_root_heading: false 87 | show_source: false 88 | show_header: false 89 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 |

2 | Logo of Spaceship 7 |

8 |

9 | Astronomer Starship can send your Airflow workloads to new places! 10 |

11 | 12 | ## What is it? 13 | 14 | Starship is a utility to migrate Airflow metadata such as Airflow Variables, 15 | Connections, Environment Variables, Pools, and DAG History between two Airflow instances. 16 | 17 |

18 | Logo of Spaceship 23 |

24 | 25 | ## Installation 26 | ```shell 27 | pip install astronomer-starship 28 | ``` 29 | 30 | ## Usage 31 | 1. Create a [Workspace](https://docs.astronomer.io/astro/manage-workspaces) in [Astro](https://cloud.astronomer.io/) or [Software](https://docs.astronomer.io/software) to hold Astro Deployments 32 | 2. [Create an Astro Deployment](https://docs.astronomer.io/astro/create-deployment) matching the source Airflow deployment configuration as possible 33 | 3. Run `astro dev init` with the [Astro CLI](https://docs.astronomer.io/astro/cli/overview) to create a [Astro Project](https://docs.astronomer.io/astro/cli/develop-project) locally in your terminal 34 | 4. Add any DAGs to the `/dags` folder in the Astro Project 35 | 5. Complete any additional setup required to convert your existing Airflow deployment to an Astro Project 36 | 5. [Install Starship](#installation) (and any additional Python Dependencies) to the Astro Project 37 | 6. [Install Starship](#installation) to your existing Airflow Deployment 38 | 4. [Deploy the Astro Project](https://docs.astronomer.io/astro/cli/astro-deploy) to the Astro Deployment with `astro deploy` 39 | 7. In the Airflow UI of the source Airflow deployment, navigate to the new `Astronomer` menu and select the `Migration Tool 🚀` option 40 | 8. Follow the UI prompts to migrate, or if needed, look at the instructions to use the Operator 41 | 42 | ## Compatability 43 | 44 | | Source | Compatible | 45 | |---------------------|------------------------| 46 | | Airflow 1 | ❌ | 47 | | GCC 1 - Airflow 2.x | [Operator](./operator) | 48 | | GCC 2 - Airflow 2.x | ✅ | 49 | | MWAA v2.0.2 | [Operator](./operator) | 50 | | MWAA ≥ v2.2.2 | ✅ | 51 | | OSS Airflow VM | ✅ | 52 | | Astronomer Products | ✅ | 53 | 54 | 55 | ## FAQ 56 | - **I'm on Airflow 1, can I use Starship?** 57 | 58 | _No, Starship is only compatible with Airflow 2.x and above_, see [Compatibility](#compatability) 59 | 60 | - **I'm on Airflow>=2.7 and can't test connections?** 61 | 62 | _You must have `AIRFLOW__CORE__TEST_CONNECTION` set. See notes [here](https://airflow.apache.org/docs/apache-airflow/stable/release_notes.html#disable-default-allowing-the-testing-of-connections-in-ui-api-and-cli-32052)_ 63 | 64 | - **I'm using Google Cloud Composer 2.x and Airflow 2.x and do not see the `Astronomer` menu and/or the Starship Airflow Plugin?** 65 | 66 | _Run the following to ensure you are a privileged user._ 67 | ``` 68 | gcloud config set project 69 | gcloud composer environments run --location users add-role -- -e -r Admin 70 | ``` 71 | 72 | ## Security Notice 73 | This project is an Airflow Plugin that adds custom API routes. Ensure your environments are correctly secured. 74 | 75 | --- 76 | 77 | **Artwork** 78 | Starship logo [by Lorenzo](https://thenounproject.com/lorenzo.verdenelli/) used with permission 79 | from [The Noun Project](https://thenounproject.com/icon/starship-6088295/) 80 | under [Creative Commons](https://creativecommons.org/licenses/by/3.0/us/legalcode). 81 | -------------------------------------------------------------------------------- /docs/migration_source/gcc.md: -------------------------------------------------------------------------------- 1 | # Google Cloud Composer 2 | 3 | ## Compatability 4 | 5 | | Source | Compatible | 6 | |---------------------|------------------------| 7 | | Airflow 1 | ❌ | 8 | | GCC 1 - Airflow 2.x | [Operator](./operator) | 9 | | GCC 2 - Airflow 2.x | ✅ | 10 | 11 | ## Notes 12 | 13 | You must be an Admin to see Plugins on GCC. 14 | 15 | ## Installation 16 | 1. Navigate to your [Environments](https://console.cloud.google.com/composer/environments/) 17 | 2. Go to PyPi Packages 18 | ![PyPi Packages Tab](./gcc_pkg.png) 19 | 3. Click `+ Add Package` and put `astronomer-starship` under `Package name` 20 | ![PyPi Packages Tab, Adding a Package](./gcc_pkg_starship.png) 21 | 22 | 23 | ## FAQ 24 | - **I'm using Google Cloud Composer 2.x and Airflow 2.x and do not see the `Astronomer` menu and/or the Starship Airflow Plugin?** 25 | 26 | _Run the following to ensure you are a privileged user._ 27 | ``` 28 | gcloud config set project 29 | gcloud composer environments run --location users add-role -- -e -r Admin 30 | ``` 31 | -------------------------------------------------------------------------------- /docs/migration_source/gcc_pkg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/docs/migration_source/gcc_pkg.png -------------------------------------------------------------------------------- /docs/migration_source/gcc_pkg_starship.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/docs/migration_source/gcc_pkg_starship.png -------------------------------------------------------------------------------- /docs/migration_source/mwaa.md: -------------------------------------------------------------------------------- 1 | # (AWS) Managed Apache Airflow 2 | 3 | ## Compatability 4 | 5 | | Source | Compatible | 6 | |---------------------|------------------------| 7 | | Airflow 1 | ❌ | 8 | | MWAA v2.0.2 | [Operator](./operator) | 9 | | MWAA ≥ v2.2.2 | ✅ | 10 | 11 | ## Installation 12 | 1. Navigate to your [Environments](https://console.aws.amazon.com/mwaa/home) 13 | 2. Download your existing `requirements.txt` 14 | 3. Add `astronomer-starship` to the file, save it, and re-upload it to S3 15 | 4. Click `Edit`, and pick the newer version of your Requirements File 16 | ![MWAA Requirements](./mwaa_pkg.png) 17 | 5. Click `Next`, then eventually `Save`, and then wait for your deployment to restart and dependencies to install 18 | -------------------------------------------------------------------------------- /docs/migration_source/mwaa_pkg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/docs/migration_source/mwaa_pkg.png -------------------------------------------------------------------------------- /docs/operator.md: -------------------------------------------------------------------------------- 1 | # Starship Migration DAG 2 | The `StarshipAirflowMigrationDAG` can be used to migrate Airflow Variables, Pools, Connections, 3 | and DAG History from one Airflow instance to another. 4 | 5 | The `StarshipAirflowMigrationDAG` should be used in instances where the **source** Airflow Webserver 6 | is unable to correctly host a Plugin. The Target must still have a functioning Starship Plugin installed, 7 | be running the same version of Airflow, and have the same set of DAGs deployed. 8 | 9 | The `StarshipAirflowMigrationDAG` should be used if migrating from a 10 | Google Cloud Composer 1 (with Airflow 2.x) or MWAA v2.0.2 environment. 11 | These environments do not support webserver plugins and will require using the `StarshipAirflowMigrationDAG` 12 | to migrate data. 13 | 14 | ## Installation 15 | Add the following line to your `requirements.txt` in your source environment: 16 | 17 | ``` 18 | astronomer-starship[provider] 19 | ``` 20 | 21 | ## Setup 22 | Make a connection in Airflow with the following details: 23 | - **Conn ID**: `starship_default` 24 | - **Conn Type**: `HTTP` 25 | - **Host**: the URL of the homepage of Airflow (excluding `/home` on the end of the URL) 26 | - For example, if your deployment URL is `https://astronomer.astronomer.run/abcdt4ry/home`, you'll use `https://astronomer.astronomer.run/abcdt4ry` 27 | - **Schema**: `https` 28 | - **Extras**: `{"Authorization": "Bearer "}` 29 | 30 | ## Usage 31 | 1. Add the following DAG to your source environment: 32 | 33 | ```python title="dags/starship_airflow_migration_dag.py" 34 | from astronomer_starship.providers.starship.operators.starship import StarshipAirflowMigrationDAG 35 | 36 | globals()['starship_airflow_migration_dag'] = StarshipAirflowMigrationDAG(http_conn_id="starship_default") 37 | ``` 38 | 39 | 2. Unpause the DAG in the Airflow UI 40 | 3. Once the DAG successfully runs, your connections, variables, and environment variables should all be migrated to Astronomer 41 | 42 | ## Configuration 43 | 44 | The `StarshipAirflowMigrationDAG` can be configured as follows: 45 | 46 | ```python 47 | StarshipAirflowMigrationDAG( 48 | http_conn_id="starship_default", 49 | variables=None, # None to migrate all, or ["var1", "var2"] to migrate specific items, or empty list to skip all 50 | pools=None, # None to migrate all, or ["pool1", "pool2"] to migrate specific items, or empty list to skip all 51 | connections=None, # None to migrate all, or ["conn1", "conn2"] to migrate specific items, or empty list to skip all 52 | dag_ids=None, # None to migrate all, or ["dag1", "dag2"] to migrate specific items, or empty list to skip all 53 | ) 54 | ``` 55 | 56 | You can use this DAG to migrate all items, or specific items by providing a list of names. 57 | 58 | You can skip migration by providing an empty list. 59 | 60 | ## Python API 61 | 62 | ### Hooks 63 | 64 | ::: astronomer_starship.providers.starship.hooks.starship 65 | options: 66 | heading_level: 4 67 | show_root_toc_entry: false 68 | show_root_heading: false 69 | inherited_members: true 70 | show_source: false 71 | 72 | ### Operators, TaskGroups, DAG 73 | 74 | ::: astronomer_starship.providers.starship.operators.starship 75 | options: 76 | heading_level: 4 77 | show_root_toc_entry: false 78 | show_root_heading: false 79 | inherited_members: true 80 | show_source: false 81 | -------------------------------------------------------------------------------- /docs/starship.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/docs/starship.png -------------------------------------------------------------------------------- /docs/starship.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env just --justfile 2 | set dotenv-load := true 3 | SRC_DIR := "astronomer_starship" 4 | DOCS_DIR := "docs" 5 | VERSION := `echo $(python -c 'from astronomer_starship import __version__; print(__version__)')` 6 | 7 | default: 8 | @just --choose 9 | 10 | # Print this help text 11 | help: 12 | @just --list 13 | 14 | # Install pre-commit 15 | install-precommit: 16 | pre-commit install 17 | 18 | 19 | # install frontend requirements 20 | install-frontend: 21 | cd astronomer_starship && npm install 22 | 23 | # install backend requirements 24 | install-backend EDITABLE="": 25 | pip install {{EDITABLE}} '.[dev]' 26 | 27 | # Install the project 28 | install: clean install-frontend install-backend 29 | 30 | # Run container test via docker 31 | test-run-container-test IMAGE="apache/airflow:2.3.4": 32 | docker run --entrypoint=bash -e DOCKER_TEST=True \ 33 | -v "$HOME/starship:/usr/local/airflow/starship:rw" \ 34 | {{IMAGE}} -- \ 35 | /usr/local/airflow/starship/tests/docker_test/run_container_test.sh \ 36 | {{IMAGE}} 37 | 38 | # Test Starship Python API 39 | test-backend: 40 | pytest -c pyproject.toml 41 | 42 | # Test Starship Python API with coverage 43 | test-backend-with-coverage: 44 | pytest -c pyproject.toml --cov=./ --cov-report=xml 45 | 46 | # Test Starship Webapp Frontend 47 | test-frontend: 48 | cd astronomer_starship && npx vitest run 49 | 50 | # Test the frontend and retest while watching for changes 51 | test-frontend-watch: 52 | cd astronomer_starship && npx vitest watch 53 | 54 | # Run unit tests 55 | test: test-frontend test-backend 56 | 57 | # Run unit tests with coverage 58 | test-with-coverage: test-frontend test-backend-with-coverage 59 | 60 | # Run integration tests 61 | test-integration $MANUAL_TESTS="true": 62 | @just test 63 | 64 | # Test CICD setup with `act` 65 | test-cicd: 66 | act pull-request -W .github/workflows/checks.yml --container-architecture linux/amd64 67 | 68 | # Lint the frontend code 69 | lint-frontend: 70 | cd astronomer_starship && npx eslint .. --ext js,jsx --report-unused-disable-directives --max-warnings 0 71 | 72 | # Lint the backend code 73 | lint-backend: 74 | ruff -c pyproject.toml 75 | 76 | # Run all linting 77 | lint: lint-frontend lint-backend 78 | 79 | # Build Starship Webapp Frontend 80 | build-frontend: clean-frontend-build 81 | cd astronomer_starship && npx vite build 82 | 83 | # Build the frontend and rebuild while watching for changes 84 | build-frontend-watch: 85 | cd astronomer_starship && npx vite build --watch 86 | 87 | # Build Starship Package 88 | build-backend: clean-backend-build 89 | python -m build 90 | 91 | # Build the project 92 | build: install clean build-frontend build-backend 93 | 94 | # Clean up any temp and dependency directories 95 | clean-backend-build: 96 | rm -rf dist dist 97 | rm -rf *.egg-info 98 | 99 | # Clean artifacts created after building the frontend 100 | clean-frontend-build: 101 | rm -rf astronomer_starship/static 102 | 103 | # Clean artifacts used by the frontend 104 | clean-frontend-install: 105 | rm -rf astronomer_starship/node_modules 106 | 107 | # Clean everything 108 | clean: clean-backend-build clean-frontend-build clean-frontend-install 109 | 110 | # Tag as v$(.__version__) and push to GH 111 | tag: 112 | # Delete tag if it already exists 113 | git tag -d v{{VERSION}} || true 114 | # Tag and push 115 | git tag v{{VERSION}} 116 | 117 | # Deploy the project 118 | deploy-tag: tag 119 | git push origin v{{VERSION}} 120 | 121 | # Deploy the project 122 | deploy: deploy-tag 123 | 124 | # Upload to TestPyPi for testing (note: you can only use each version once) 125 | upload-testpypi: clean install build 126 | python -m twine check dist/* 127 | TWINE_USER=${TWINE_USER} TWINE_PASS=${TWINE_PASS} python -m twine upload --repository testpypi dist/* 128 | 129 | # Upload to PyPi - DO NOT USE THIS, GHA DOES THIS AUTOMATICALLY 130 | upload-pypi: clean install build 131 | python -m twine check dist/* 132 | TWINE_USER=${TWINE_USER} TWINE_PASS=${TWINE_PASS} python -m twine upload dist/* 133 | 134 | # create a test project at a path 135 | create-test TESTPATH: 136 | #!/usr/bin/env bash 137 | set -euxo pipefail 138 | if [[ -d {{TESTPATH}} ]] 139 | then 140 | echo "starship_scratch already exists" 141 | else 142 | mkdir -p {{TESTPATH}} 143 | ln -sf $(pwd) {{ TESTPATH }}/starship 144 | cd {{TESTPATH}} 145 | astro dev init <<< 'y' 146 | echo "COPY --chown=astro:astro --chmod=777 starship /usr/local/starship" >> Dockerfile 147 | echo "USER root" >> Dockerfile 148 | echo "RUN pip install --upgrade pip && pip install -e '/usr/local/starship'" >> Dockerfile 149 | echo "USER astro" >> Dockerfile 150 | echo "# ALTERNATIVELY, COMMENT THIS OUT AND ADD TO REQUIREMENTS.TXT" >> Dockerfile 151 | echo "# --extra-index-url=https://test.pypi.org/simple/" >> requirements.txt 152 | echo "# astronomer-starship==?.?.?" >> requirements.txt 153 | echo "version: \"3.1\"" >> docker-compose.override.yml 154 | echo "services:" >> docker-compose.override.yml 155 | echo " webserver:" >> docker-compose.override.yml 156 | echo " volumes:" >> docker-compose.override.yml 157 | echo " - ./starship:/usr/local/starship:rw" >> docker-compose.override.yml 158 | fi 159 | 160 | ## (Start/Restart) a test astro project with Starship installed 161 | start-test TESTPATH START="start": 162 | cd {{TESTPATH}} && tar --exclude='node_modules' -czh . | docker build -t test - && astro dev {{START}} -i test 163 | just logs-test {{TESTPATH}} 164 | 165 | ## (Start/Restart) a test astro project with Starship installed without symlinks 166 | start-test-no-symlink TESTPATH START="start": 167 | cd {{TESTPATH}} && astro dev {{START}} 168 | 169 | ## (Stop/Kill) a test astro project with Starship installed 170 | stop-test TESTPATH STOP="stop": 171 | cd {{TESTPATH}} && astro dev {{STOP}} 172 | 173 | # Deploy a test astro project with Starship installed 174 | deploy-test TESTPATH: 175 | cd {{TESTPATH}} && tar --exclude='node_modules' -czh . | docker build -t test --platform=linux/x86_64 - && astro deploy -i test 176 | 177 | # Get logs from a test astro project with Starship installed 178 | logs-test TESTPATH: 179 | cd {{TESTPATH}} && astro dev logs -f 180 | 181 | # Deploy a test astro project with Starship installed without symlinks 182 | deploy-test-no-symlink TESTPATH: 183 | cd {{TESTPATH}} && astro deploy 184 | 185 | # Serve Webapp on localhost 186 | serve-frontend: build 187 | cd astronomer_starship && npx vite 188 | 189 | # Restart just the webserver container (e.g. to reload the plugin) 190 | restart-webserver: 191 | docker restart $(docker container ls --filter name=webserver --format="{{{{.ID}}") \ 192 | && docker logs -f $(docker container ls --filter name=webserver --format="{{{{.ID}}") --since 1m 193 | 194 | 195 | # Update the baseline for detect-secrets / pre-commit 196 | update-secrets: 197 | detect-secrets scan > .secrets.baseline # pragma: allowlist secret 198 | 199 | # Render and serve documentation locally 200 | serve-docs: 201 | mkdocs serve -w {{DOCS_DIR}} -w {{SRC_DIR}} 202 | 203 | # Build documentation locally (likely unnecessary) 204 | build-docs: clean 205 | mkdocs build 206 | 207 | # Deploy the documentation to GitHub Pages 208 | deploy-docs UPSTREAM="origin": clean 209 | mkdocs gh-deploy -r {{UPSTREAM}} 210 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Starship 2 | repo_url: https://github.com/astronomer/starship/ 3 | edit_uri: edit/main/docs/ 4 | site_url: https://astronomer.github.io/starship/ 5 | 6 | theme: 7 | name: material 8 | 9 | logo: starship.svg 10 | favicon: starship.png 11 | 12 | palette: 13 | # Palette toggle for light mode 14 | - media: "(prefers-color-scheme: light)" 15 | scheme: default 16 | primary: black 17 | toggle: 18 | icon: material/brightness-7 19 | name: Switch to dark mode 20 | 21 | # Palette toggle for dark mode 22 | - media: "(prefers-color-scheme: dark)" 23 | scheme: slate 24 | primary: black 25 | toggle: 26 | icon: material/brightness-4 27 | name: Switch to light mode 28 | 29 | font: 30 | text: Roboto 31 | code: Roboto Mono 32 | 33 | features: 34 | - navigation.path 35 | - navigation.sections 36 | - navigation.tabs 37 | - navigation.tabs.sticky 38 | - navigation.top 39 | - navigation.instant 40 | - navigation.instant.progress 41 | - toc.follow 42 | - toc.integrate 43 | - content.tabs.link 44 | - content.action.edit 45 | - content.code.copy 46 | - search.share 47 | - search.highlight 48 | - search.suggest 49 | 50 | markdown_extensions: 51 | - toc: 52 | permalink: true 53 | 54 | - pymdownx.superfences 55 | - pymdownx.highlight: 56 | use_pygments: true 57 | anchor_linenums: true 58 | 59 | plugins: 60 | - search 61 | - mkdocstrings: 62 | handlers: 63 | python: 64 | options: 65 | docstring_style: sphinx 66 | show_source: true 67 | show_root_heading: true 68 | separate_signature: true 69 | show_signature_annotations: true 70 | signature_crossrefs: true 71 | unwrap_annotated: true 72 | show_bases: false 73 | show_object_full_path: false 74 | show_symbol_type_toc: true 75 | merge_init_into_class: true 76 | parameter_headings: true 77 | summary: true 78 | group_by_category: true 79 | copyright: "Apache Airflow® is a trademark of the Apache Software Foundation" 80 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "astronomer-starship" 7 | dynamic = ["version"] 8 | description = "Migrations to Astro" 9 | authors = [ 10 | { name = "Fritz Davenport", email = "fritz@astronomer.io" }, 11 | { name = "CETA Team", email = "ceta@astronomer.io" }, 12 | { name = "Astronomer", email = "humans@astronomer.io" } 13 | ] 14 | readme = "README.md" 15 | license = { text = "PROPRIETARY" } 16 | requires-python = ">=3.6" 17 | dependencies = [] 18 | 19 | [project.urls] 20 | Homepage = "https://astronomer.io" 21 | Source = "https://github.com/astronomer/starship/" 22 | 23 | [tool.setuptools] 24 | include-package-data = false 25 | 26 | [tool.setuptools.package-data] 27 | "astronomer_starship" = ["*.js", "*.html", "*.css"] 28 | "astronomer_starship.static" = ["*.js", "*.html", "*.css"] 29 | "astronomer_starship.static.assets" = ["*.js", "*.html", "*.css"] 30 | "astronomer_starship.templates" = ["*.js", "*.html", "*.css"] 31 | 32 | [tool.setuptools.dynamic] 33 | version = { attr = "astronomer_starship.__version__" } 34 | 35 | [tool.setuptools.packages.find] 36 | include = ["astronomer_starship", "astronomer_starship.*"] 37 | exclude = [ 38 | "*venv*", "*venv*.*", 39 | "*tests.*", "*tests", 40 | "*build", "*build.*", 41 | "*dist", "*dist.*", 42 | "*node_modules", "*node_modules.*", 43 | ] 44 | 45 | [project.optional-dependencies] 46 | provider = [ 47 | "apache-airflow-providers-http" 48 | ] 49 | 50 | dev = [ 51 | # package 52 | "twine", 53 | "build", 54 | 55 | # test 56 | "apache-airflow", 57 | "pytest>=7", 58 | "pytest-cov>=4.0", 59 | "pytest-integration>=0.2", 60 | "pytest-sugar>=0.9", 61 | 62 | "docker>=6", 63 | 64 | # docs 65 | "mkdocs", 66 | "mkdocs-material", 67 | "mkdocstrings[python]", 68 | "pygments", 69 | 70 | # precommit 71 | "pre-commit>=3.3; python_version>'3.7'", 72 | "detect-secrets>=1.4; python_version>'3.6'", 73 | 74 | # lint 75 | "black>=22", 76 | "ruff>=0.0.261; python_version>'3.6'", 77 | "pylint>=2", 78 | "isort>=5", 79 | ] 80 | 81 | 82 | # for pip installing this pyproject.toml 83 | [project.entry-points."airflow.plugins"] 84 | "starship" = "astronomer_starship.starship:StarshipPlugin" 85 | "starship_api" = "astronomer_starship.starship_api:StarshipAPIPlugin" 86 | 87 | [project.entry-points.apache_airflow_provider] 88 | provider_info = "astronomer_starship.__init__:get_provider_info" 89 | 90 | [tool.black] 91 | # https://github.com/psf/black 92 | color = true 93 | 94 | [tool.bandit] 95 | exclude_dirs = ["tests"] 96 | skips = [ 97 | "B301", 98 | "B403", 99 | "B310", # urlopen in Aeroscope 100 | "B608" # SQL Injection in DAG History Migration 101 | ] 102 | 103 | [tool.ruff] 104 | line-length = 120 105 | 106 | [tool.pytest.ini_options] 107 | norecursedirs = [ 108 | "tests/docker_test/", "tests/resources/", "hooks", "*.egg", ".eggs", 109 | "dist", "build", "docs", ".tox", ".git", "__pycache__" 110 | ] 111 | doctest_optionflags = ["NUMBER", "NORMALIZE_WHITESPACE", "IGNORE_EXCEPTION_DETAIL"] 112 | 113 | addopts = [ 114 | "--strict-markers", 115 | "--tb=short", 116 | "--disable-warnings", 117 | "--no-header", 118 | "--doctest-modules", 119 | "--doctest-continue-on-failure", 120 | ] 121 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/astronomer/starship/0b1c6cafc4f09e5917072ae0e97dc8568e1b1d0a/tests/__init__.py -------------------------------------------------------------------------------- /tests/api_integration_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Dict, Any 3 | import pytest 4 | import requests 5 | from tests.conftest import manual_tests 6 | from astronomer_starship.compat.starship_compatability import ( 7 | StarshipCompatabilityLayer, 8 | get_test_data, 9 | ) 10 | 11 | URLS_AND_TOKENS = { 12 | "localhost": ("http://localhost:8080", None), 13 | } 14 | 15 | 16 | def get_extras(deployment_url: str, token: str) -> Dict[str, Any]: 17 | """Use Bearer auth if astro else admin:admin if local""" 18 | return ( 19 | { 20 | "headers": { 21 | # "Content-Type": "application/json", 22 | "Authorization": f"Bearer {token}", 23 | } 24 | } 25 | if "localhost" not in deployment_url 26 | else { 27 | # "headers": {"Content-Type": "application/json"}, 28 | "auth": ("admin", "admin"), 29 | } 30 | ) 31 | 32 | 33 | def set_and_get(route, test_input, token, url): 34 | # noinspection DuplicatedCode 35 | actual = requests.post(f"{url}/{route}", json=test_input, **get_extras(url, token)) 36 | assert actual.status_code in [200, 409], actual.text 37 | if actual.status_code == 409: 38 | assert ( 39 | actual.json()["error"] == "Integrity Error (Duplicate Record?)" 40 | ), actual.text 41 | else: 42 | assert actual.json() == test_input, actual.text 43 | # Get One 44 | actual = requests.get(f"{url}/{route}", **get_extras(url, token)) 45 | assert actual.status_code == 200, actual.text 46 | assert test_input in actual.json(), actual.text 47 | 48 | 49 | def delete(route, test_input, token, url): 50 | actual = requests.post( 51 | f"{url}/{route}", params=test_input, **get_extras(url, token) 52 | ) 53 | assert actual.status_code == 204, actual.text 54 | 55 | 56 | @pytest.fixture( 57 | params=list(URLS_AND_TOKENS.values()), 58 | ids=list(URLS_AND_TOKENS.keys()), 59 | ) 60 | def url_and_token_and_starship(request): 61 | (url, token) = request.param 62 | version = requests.get( 63 | f"{url}/api/starship/airflow_version", **get_extras(url, token) 64 | ).text 65 | return url, token, StarshipCompatabilityLayer(airflow_version=version) 66 | 67 | 68 | @manual_tests 69 | def test_integration_variables(url_and_token_and_starship): 70 | (url, token, starship) = url_and_token_and_starship 71 | route = "api/starship/variables" 72 | test_input = get_test_data(method="POST", attrs=starship.variable_attrs()) 73 | set_and_get(route, test_input, token, url) 74 | delete(route, test_input, token, url) 75 | 76 | 77 | @manual_tests 78 | def test_integration_pools(url_and_token_and_starship): 79 | (url, token, starship) = url_and_token_and_starship 80 | route = "api/starship/pools" 81 | test_input = get_test_data(method="POST", attrs=starship.pool_attrs()) 82 | set_and_get(route, test_input, token, url) 83 | delete(route, test_input, token, url) 84 | 85 | 86 | @manual_tests 87 | def test_integration_connections(url_and_token_and_starship): 88 | (url, token, starship) = url_and_token_and_starship 89 | route = "api/starship/connections" 90 | test_input = get_test_data(method="POST", attrs=starship.connection_attrs()) 91 | set_and_get(route, test_input, token, url) 92 | delete(route, test_input, token, url) 93 | 94 | 95 | @manual_tests 96 | def test_integration_dags(url_and_token_and_starship): 97 | (url, token, starship) = url_and_token_and_starship 98 | route = "api/starship/dags" 99 | expected_patch = get_test_data(method="PATCH", attrs=starship.dag_attrs()) 100 | 101 | # Create One 102 | actual = requests.patch( 103 | f"{url}/{route}", json=expected_patch, **get_extras(url, token) 104 | ) 105 | # noinspection DuplicatedCode 106 | assert actual.status_code in [200, 409], actual.text 107 | if actual.status_code == 409: 108 | assert actual.text == '{"error": "Duplicate Record"}', actual.text 109 | else: 110 | assert actual.json() == expected_patch, actual.text 111 | 112 | # Get Many 113 | expected_get = get_test_data(attrs=starship.dag_attrs()) 114 | actual = requests.get(f"{url}/{route}", **get_extras(url, token)) 115 | assert actual.status_code == 200, actual.text 116 | for dag in actual.json(): 117 | if dag["dag_id"] == expected_get["dag_id"]: 118 | assert dag["dag_id"] == expected_get["dag_id"], actual.text 119 | assert ( 120 | dag["schedule_interval"] == expected_get["schedule_interval"] 121 | ), actual.text 122 | assert dag["is_paused"] == expected_get["is_paused"], actual.text 123 | assert dag["description"] == expected_get["description"], actual.text 124 | assert dag["owners"] == expected_get["owners"], actual.text 125 | assert sorted(dag["tags"]) == sorted(expected_get["tags"]), actual.text 126 | 127 | 128 | @manual_tests 129 | def test_integration_dag_runs_and_task_instances(url_and_token_and_starship): 130 | (url, token, starship) = url_and_token_and_starship 131 | route = "api/starship/dag_runs" 132 | run_id = "manual__1970-01-01T00:00:00+00:00" 133 | dag_id = "dag_0" 134 | 135 | # delete dag 136 | requests.delete(f"{url}/api/v1/dags/{dag_id}", **get_extras(url, token)) 137 | 138 | test_input = get_test_data(method="POST", attrs=starship.dag_runs_attrs()) 139 | test_input = json.loads(json.dumps(test_input, default=str)) 140 | 141 | # Set DAG Runs 142 | actual = requests.post(f"{url}/{route}", json=test_input, **get_extras(url, token)) 143 | assert actual.status_code in [200, 409], actual.text 144 | if actual.status_code == 409: 145 | assert ( 146 | actual.json()["error"] == "Integrity Error (Duplicate Record?)" 147 | ), actual.text 148 | else: 149 | # This key gets deleted 150 | del test_input["dag_runs"][0]["conf"] 151 | assert actual.json()["dag_runs"] == test_input["dag_runs"], actual.text 152 | 153 | # Get DAG Runs 154 | actual = requests.get(f"{url}/{route}?dag_id={dag_id}", **get_extras(url, token)) 155 | assert actual.status_code == 200, actual.text 156 | actual_dag_runs = [ 157 | dag_run for dag_run in actual.json()["dag_runs"] if dag_run["run_id"] == run_id 158 | ] 159 | assert len(actual_dag_runs) == 1, actual.json() 160 | if "conf" in actual_dag_runs[0]: 161 | del actual_dag_runs[0]["conf"] 162 | actual_dag_run = actual_dag_runs[0] 163 | actual_dag_run = { 164 | k: ( 165 | v.replace("1970-01-01T00", "1970-01-01 00") 166 | if isinstance(v, str) and k != "run_id" 167 | else v 168 | ) 169 | for k, v in actual_dag_run.items() 170 | } 171 | assert test_input["dag_runs"][0] == actual_dag_run, actual_dag_run 172 | 173 | # Delete test 174 | delete(route, test_input, token, url) 175 | 176 | route = "api/starship/task_instances" 177 | 178 | test_input = get_test_data(method="POST", attrs=starship.task_instances_attrs()) 179 | test_input = json.loads(json.dumps(test_input, default=str)) 180 | 181 | # Set Task Instances 182 | actual = requests.post(f"{url}/{route}", json=test_input, **get_extras(url, token)) 183 | assert actual.status_code in [200, 409], actual.text 184 | if actual.status_code == 409: 185 | assert ( 186 | actual.json()["error"] == "Integrity Error (Duplicate Record?)" 187 | ), actual.text 188 | else: 189 | assert ( 190 | actual.json()["task_instances"] == test_input["task_instances"] 191 | ), actual.text 192 | 193 | # Get Task Instances 194 | actual = requests.get(f"{url}/{route}?dag_id={dag_id}", **get_extras(url, token)) 195 | assert actual.status_code == 200, actual.text 196 | actual_task_instances = [ 197 | task_instance 198 | for task_instance in actual.json()["task_instances"] 199 | if task_instance["run_id"] == test_input["task_instances"][0]["run_id"] 200 | ] 201 | assert len(actual_task_instances) == 1, actual.json() 202 | actual_task_instance = actual_task_instances[0] 203 | actual_task_instance = { 204 | k: ( 205 | v.replace("1970-01-01T00", "1970-01-01 00") 206 | if isinstance(v, str) and k != "run_id" 207 | else v 208 | ) 209 | for k, v in actual_task_instance.items() 210 | } 211 | # gets blanked out 212 | test_input["task_instances"][0]["executor_config"] = None 213 | 214 | if "trigger_timeout" in actual_task_instance: 215 | del actual_task_instance["trigger_timeout"] 216 | if "trigger_timeout" in test_input["task_instances"][0]: 217 | del test_input["task_instances"][0]["trigger_timeout"] 218 | 219 | assert actual_task_instance == test_input["task_instances"][0], actual_task_instance 220 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from pathlib import Path 4 | 5 | manual_tests = pytest.mark.skipif( 6 | not bool(os.getenv("MANUAL_TESTS")), reason="requires env setup" 7 | ) 8 | 9 | 10 | @pytest.fixture(scope="session") 11 | def project_root() -> Path: 12 | return Path(__file__).parent.parent 13 | -------------------------------------------------------------------------------- /tests/docker_test/README.md: -------------------------------------------------------------------------------- 1 | # Docker Container Tests 2 | 3 | This special pytest suite is called from `validation_test.py:test_docker_pytest`. 4 | 5 | They don't run unless the `DOCKER_TEST=true` environment variable is set, which that `test_docker_pytest` sets. 6 | 7 | Additionally, you can run a single test against an image manually with 8 | ```shell 9 | just test-run-container-test 10 | ``` 11 | 12 | ## The Tests 13 | Docker containers are spawned in parallel with different images, 14 | - `starship` is volume-mounted into the container (and a unique `./build` directory) 15 | - [./run_container_test.sh](./run_container_test.sh) is called with the image name as an argument 16 | - The script upgrades pip (sometimes required), installs pytest, and installs starship from the volume mount, exiting unsuccessfully if that didn't succeed 17 | - it copies a DAG to what it thinks is the `/dag` folder, and exits unsuccessfully if it can't 18 | - it runs `airflow db init`, and exits unsuccessfully if it can't 19 | - it runs `airflow webserver --workers 1` and `airflow scheduler`, and looks for `Listening at: http://0.0.0.0:8080`. It times out if that doesn't occur within 300s 20 | - It tries to import the DAGBag directly in python (this doesn't seem to work, likely due to config stuff, no big, serves as a delay for the scheduler to do the import) 21 | - it runs the `pytest` suite in the container, and exits unsuccessfully if it doesn't pass 22 | 23 | # Troubleshooting 24 | **if you see:** 25 | ```shell 26 | error: [Errno 2] No such file or directory: 'build/bdist.linux-aarch64/wheel/astronomer_starship-2.0.0-py3.7.egg-info' 27 | ... 28 | ERROR: Failed building wheel for astronomer-starship 29 | ``` 30 | do a quick `just clean-backend-build` locally. 31 | Sometimes permissions get weird. 32 | The pytest that launches containers should do this ahead of testing 33 | -------------------------------------------------------------------------------- /tests/docker_test/dag.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from airflow import DAG 3 | from airflow.operators.bash import BashOperator 4 | 5 | with DAG( 6 | dag_id="dag_0", 7 | tags=["foo", "bar"], 8 | is_paused_upon_creation=True, 9 | default_args={"owner": "baz"}, 10 | schedule_interval="@once", 11 | start_date=datetime(1970, 1, 1), 12 | ) as dag: 13 | BashOperator(task_id="operator_0", bash_command="echo hi") 14 | -------------------------------------------------------------------------------- /tests/docker_test/docker_test.py: -------------------------------------------------------------------------------- 1 | """NOTE: These tests run _inside docker containers_ generated from the validation_test.py file.""" 2 | import json 3 | import os 4 | import pytest 5 | 6 | from http import HTTPStatus 7 | 8 | from astronomer_starship.compat.starship_compatability import ( 9 | StarshipCompatabilityLayer, 10 | get_test_data, 11 | ) 12 | 13 | docker_test = pytest.mark.skipif( 14 | not bool(os.getenv("DOCKER_TEST")), reason="Not inside Docker container under test" 15 | ) 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def starship(): 20 | return StarshipCompatabilityLayer() 21 | 22 | 23 | @docker_test 24 | def test_variables(starship): 25 | test_input = get_test_data(method="POST", attrs=starship.variable_attrs()) 26 | actual = starship.set_variable(**test_input) 27 | assert actual == test_input, actual 28 | 29 | actual = starship.get_variables() 30 | assert test_input in actual, actual 31 | 32 | test_input = get_test_data(method="DELETE", attrs=starship.variable_attrs()) 33 | actual = starship.delete_variable(**test_input) 34 | assert actual.status_code == HTTPStatus.NO_CONTENT, actual 35 | 36 | 37 | @docker_test 38 | def test_pools(starship): 39 | from copy import copy 40 | 41 | test_input = get_test_data(method="POST", attrs=starship.pool_attrs()) 42 | expected = copy(test_input) 43 | 44 | # switch "pool" to "name" 45 | test_input["pool"] = test_input["name"] 46 | del test_input["name"] 47 | 48 | actual = starship.set_pool(**test_input) 49 | assert actual == expected, actual 50 | 51 | actual = starship.get_pools() 52 | assert expected in actual, actual 53 | 54 | test_input = get_test_data(method="DELETE", attrs=starship.pool_attrs()) 55 | actual = starship.delete_pool(**test_input) 56 | assert actual.status_code == HTTPStatus.NO_CONTENT, actual 57 | 58 | 59 | @docker_test 60 | def test_connections(starship): 61 | test_input = get_test_data(method="POST", attrs=starship.connection_attrs()) 62 | actual = starship.set_connection(**test_input) 63 | assert actual == test_input, actual 64 | 65 | actual = starship.get_connections() 66 | assert test_input in actual, actual 67 | 68 | test_input = get_test_data(method="DELETE", attrs=starship.connection_attrs()) 69 | actual = starship.delete_connection(**test_input) 70 | assert actual.status_code == HTTPStatus.NO_CONTENT, actual 71 | 72 | 73 | @docker_test 74 | def test_dags(starship): 75 | test_input = get_test_data(method="PATCH", attrs=starship.dag_attrs()) 76 | actual = starship.set_dag_is_paused(**test_input) 77 | assert actual == test_input, actual 78 | 79 | test_input = get_test_data(attrs=starship.dag_attrs()) 80 | actual = starship.get_dags() 81 | actual_dags = [dag for dag in actual if dag["dag_id"] == test_input["dag_id"]] 82 | assert len(actual_dags) == 1, actual_dags 83 | 84 | # not predictable, so remove it 85 | del actual_dags[0]["fileloc"] 86 | del test_input["fileloc"] 87 | 88 | # not predictable (sorting), so remove it 89 | del actual_dags[0]["tags"] 90 | del test_input["tags"] 91 | 92 | assert actual_dags[0] == test_input, actual_dags[0] 93 | 94 | 95 | @docker_test 96 | def test_dag_runs_and_task_instances(starship): 97 | test_input = get_test_data(method="POST", attrs=starship.dag_runs_attrs()) 98 | dag_id = test_input["dag_runs"][0]["dag_id"] 99 | 100 | # Set Dag Runs 101 | actual = starship.set_dag_runs(**test_input) 102 | expected = dict({"dag_run_count": 1}, **test_input) 103 | assert actual == expected, actual 104 | 105 | # Get Dag Runs 106 | run_id = test_input["dag_runs"][0]["run_id"] 107 | actual = starship.get_dag_runs(dag_id) 108 | actual_dag_runs = [ 109 | dag_run for dag_run in actual["dag_runs"] if dag_run["run_id"] == run_id 110 | ] 111 | assert len(actual_dag_runs) == 1, actual 112 | if "conf" in actual_dag_runs[0]: 113 | del actual_dag_runs[0]["conf"] 114 | assert json.dumps(actual_dag_runs[0], default=str) in json.dumps( 115 | test_input["dag_runs"], default=str 116 | ), actual_dag_runs 117 | 118 | # Set Task Instances 119 | test_input = get_test_data(method="POST", attrs=starship.task_instances_attrs()) 120 | actual = starship.set_task_instances(**test_input) 121 | assert actual == test_input, actual 122 | 123 | # Get Task Instances 124 | actual = starship.get_task_instances(dag_id) 125 | actual_task_instances = actual["task_instances"] 126 | assert len(actual_task_instances) == 1, actual 127 | test_input["task_instances"][0]["executor_config"] = None 128 | if "trigger_timeout" in actual_task_instances[0]: 129 | del actual_task_instances[0]["trigger_timeout"] 130 | if "trigger_timeout" in test_input["task_instances"][0]: 131 | del test_input["task_instances"][0]["trigger_timeout"] 132 | assert json.dumps(actual_task_instances, default=str) in json.dumps( 133 | test_input["task_instances"], default=str 134 | ), actual_task_instances 135 | 136 | test_input = get_test_data(method="DELETE", attrs=starship.dag_runs_attrs()) 137 | actual = starship.delete_dag_runs(**test_input) 138 | assert actual.status_code == HTTPStatus.NO_CONTENT, actual 139 | -------------------------------------------------------------------------------- /tests/docker_test/run_container_test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | IMAGE=$1 4 | WHEEL=$2 5 | 6 | echo -e "[STARSHIP-INSTALL-START image=$IMAGE]" 7 | pushd /usr/local/airflow/ 8 | INSTALL_OUTPUT=$( \ 9 | python -m pip install --upgrade pip && \ 10 | python -m pip install pytest $2 2>&1 \ 11 | ) 12 | echo -e $INSTALL_OUTPUT 13 | popd 14 | 15 | # if we don't find 'Successfully installed' or if we find 'ERROR' 16 | grep -q 'Successfully installed astronomer-starship' <<< $INSTALL_OUTPUT 17 | SUCCESSFULLY_INSTALLED=$? 18 | grep -q 'ERROR' <<< $INSTALL_OUTPUT 19 | ERROR=$? 20 | if [ $SUCCESSFULLY_INSTALLED -eq 0 ] && [ $ERROR -ne 0 ]; then 21 | echo -e "[STARSHIP-INSTALL-SUCCESS image=$IMAGE]" 22 | else 23 | echo -e "[STARSHIP-INSTALL-ERROR image=$IMAGE]" 24 | exit 1 25 | fi 26 | 27 | echo -e "[STARSHIP-COPY-DAG-START image=$IMAGE]" 28 | mkdir -p dags || exit 1 29 | cp /usr/local/airflow/starship/tests/docker_test/dag.py dags || exit 1 30 | ls -R dags 31 | echo -e "[STARSHIP-COPY-DAG-SUCCESS image=$IMAGE]" 32 | 33 | echo "[STARSHIP-DB-INIT-START image=$IMAGE]" 34 | airflow db init || exit 1 35 | echo -e "[STARSHIP-DB-INIT-SUCCESS image=$IMAGE]" 36 | 37 | echo "[STARSHIP-AIRFLOW-STARTUP-START image=$IMAGE]" 38 | touch airflow.log 39 | touch airflow.scheduler.log 40 | airflow webserver --workers 1 2>&1 | tee -a airflow.log & 41 | airflow scheduler 2>&1 | tee -a airflow-scheduler.log & 42 | 43 | ( timeout --signal=SIGINT 300 tail -f -n0 airflow.log & ) | grep -q "Listening at: http://0.0.0.0:8080" 44 | if [ $? -eq 0 ]; then 45 | echo -e "[STARSHIP-AIRFLOW-STARTUP-SUCCESS image=$IMAGE]" 46 | else 47 | echo -e "[STARSHIP-AIRFLOW-STARTUP-ERROR image=$IMAGE]" 48 | exit 1 49 | fi 50 | 51 | echo -e "[STARSHIP-AIRFLOW-DAGBAG-START image=$IMAGE]" 52 | python -c "from airflow.models.dagbag import DagBag; print(DagBag(store_serialized_dags=True).dags)" 53 | if [ $? -eq 0 ]; then 54 | echo -e "[STARSHIP-AIRFLOW-DAGBAG-SUCCESS image=$IMAGE]" 55 | else 56 | echo -e "[STARSHIP-AIRFLOW-DAGBAG-ERROR image=$IMAGE]" 57 | exit 1 58 | fi 59 | 60 | echo -e "[STARSHIP-PYTEST-START image=$IMAGE]" 61 | pytest \ 62 | --no-header --disable-warnings --tb=short --strict-markers \ 63 | /usr/local/airflow/starship/tests/docker_test/docker_test.py 64 | if [ $? -eq 0 ]; then 65 | echo -e "[STARSHIP-PYTEST-SUCCESS image=$IMAGE]" 66 | else 67 | echo -e "[STARSHIP-PYTEST-ERROR image=$IMAGE]" 68 | exit 1 69 | fi 70 | 71 | #standalone | Airflow is ready 72 | # webserver | [2024-02-08 23:22:33 +0000] [3228] [INFO] Listening at: http://0.0.0.0:8080 (3228) 73 | -------------------------------------------------------------------------------- /tests/e2e/gcc/config/pip/pip.conf: -------------------------------------------------------------------------------- 1 | [global] 2 | extra-index-url=https://test.pypi.org/simple/ 3 | -------------------------------------------------------------------------------- /tests/e2e/justfile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env just --justfile 2 | 3 | mwaa_create NAME="mwaa": 4 | aws cloudformation create-stack --stack-name {{NAME}} --template-body file://mwaa.yaml --capabilities CAPABILITY_IAM 5 | 6 | mwaa_destroy NAME="mwaa": 7 | aws cloudformation delete-stack --stack-name {{NAME}} 8 | 9 | mwaa_upload BUCKET="mwaa-environmentbucket-t5uda12zvurr": 10 | aws s3 cp requirements.txt s3://{{BUCKET}}/requirements.txt 11 | 12 | gcc_permission NAME="composer" REGION="us-central1" USER: 13 | gcloud composer environments run {{NAME}} --location {{REGION}} users add-role -- -e {{USER}} -r Admin 14 | 15 | gcc_webserver_restart NAME="composer" REGION="us-central1": 16 | gcloud beta composer environments restart-web-server {{NAME}} --location {{REGION}} 17 | 18 | create_astro_deployment NAME="mwaa" VERSION PATH: 19 | astro deployment create -n {{NAME}} -v {{VERSION}} 20 | cd {{PATH}} && astro dev init 21 | -------------------------------------------------------------------------------- /tests/e2e/mwaa/mwaa.yaml: -------------------------------------------------------------------------------- 1 | # https://docs.aws.amazon.com/mwaa/latest/userguide/quick-start.html#quick-start-createstack 2 | AWSTemplateFormatVersion: "2010-09-09" 3 | 4 | Parameters: 5 | 6 | EnvironmentName: 7 | Description: An environment name that is prefixed to resource names 8 | Type: String 9 | Default: starship 10 | 11 | VpcCIDR: 12 | Description: The IP range (CIDR notation) for this VPC 13 | Type: String 14 | Default: 10.192.0.0/16 15 | 16 | PublicSubnet1CIDR: 17 | Description: The IP range (CIDR notation) for the public subnet in the first Availability Zone 18 | Type: String 19 | Default: 10.192.10.0/24 20 | 21 | PublicSubnet2CIDR: 22 | Description: The IP range (CIDR notation) for the public subnet in the second Availability Zone 23 | Type: String 24 | Default: 10.192.11.0/24 25 | 26 | PrivateSubnet1CIDR: 27 | Description: The IP range (CIDR notation) for the private subnet in the first Availability Zone 28 | Type: String 29 | Default: 10.192.20.0/24 30 | PrivateSubnet2CIDR: 31 | Description: The IP range (CIDR notation) for the private subnet in the second Availability Zone 32 | Type: String 33 | Default: 10.192.21.0/24 34 | MaxWorkerNodes: 35 | Description: The maximum number of workers that can run in the environment 36 | Type: Number 37 | Default: 2 38 | DagProcessingLogs: 39 | Description: Log level for DagProcessing 40 | Type: String 41 | Default: INFO 42 | SchedulerLogsLevel: 43 | Description: Log level for SchedulerLogs 44 | Type: String 45 | Default: INFO 46 | TaskLogsLevel: 47 | Description: Log level for TaskLogs 48 | Type: String 49 | Default: INFO 50 | WorkerLogsLevel: 51 | Description: Log level for WorkerLogs 52 | Type: String 53 | Default: INFO 54 | WebserverLogsLevel: 55 | Description: Log level for WebserverLogs 56 | Type: String 57 | Default: INFO 58 | 59 | Resources: 60 | ##################################################################################################################### 61 | # CREATE VPC 62 | ##################################################################################################################### 63 | 64 | VPC: 65 | Type: AWS::EC2::VPC 66 | Properties: 67 | CidrBlock: !Ref VpcCIDR 68 | EnableDnsSupport: true 69 | EnableDnsHostnames: true 70 | Tags: 71 | - Key: Name 72 | Value: MWAAEnvironment 73 | 74 | InternetGateway: 75 | Type: AWS::EC2::InternetGateway 76 | Properties: 77 | Tags: 78 | - Key: Name 79 | Value: MWAAEnvironment 80 | 81 | InternetGatewayAttachment: 82 | Type: AWS::EC2::VPCGatewayAttachment 83 | Properties: 84 | InternetGatewayId: !Ref InternetGateway 85 | VpcId: !Ref VPC 86 | 87 | PublicSubnet1: 88 | Type: AWS::EC2::Subnet 89 | Properties: 90 | VpcId: !Ref VPC 91 | AvailabilityZone: !Select [ 0, !GetAZs '' ] 92 | CidrBlock: !Ref PublicSubnet1CIDR 93 | MapPublicIpOnLaunch: true 94 | Tags: 95 | - Key: Name 96 | Value: !Sub ${EnvironmentName} Public Subnet (AZ1) 97 | 98 | PublicSubnet2: 99 | Type: AWS::EC2::Subnet 100 | Properties: 101 | VpcId: !Ref VPC 102 | AvailabilityZone: !Select [ 1, !GetAZs '' ] 103 | CidrBlock: !Ref PublicSubnet2CIDR 104 | MapPublicIpOnLaunch: true 105 | Tags: 106 | - Key: Name 107 | Value: !Sub ${EnvironmentName} Public Subnet (AZ2) 108 | 109 | PrivateSubnet1: 110 | Type: AWS::EC2::Subnet 111 | Properties: 112 | VpcId: !Ref VPC 113 | AvailabilityZone: !Select [ 0, !GetAZs '' ] 114 | CidrBlock: !Ref PrivateSubnet1CIDR 115 | MapPublicIpOnLaunch: false 116 | Tags: 117 | - Key: Name 118 | Value: !Sub ${EnvironmentName} Private Subnet (AZ1) 119 | 120 | PrivateSubnet2: 121 | Type: AWS::EC2::Subnet 122 | Properties: 123 | VpcId: !Ref VPC 124 | AvailabilityZone: !Select [ 1, !GetAZs '' ] 125 | CidrBlock: !Ref PrivateSubnet2CIDR 126 | MapPublicIpOnLaunch: false 127 | Tags: 128 | - Key: Name 129 | Value: !Sub ${EnvironmentName} Private Subnet (AZ2) 130 | 131 | NatGateway1EIP: 132 | Type: AWS::EC2::EIP 133 | DependsOn: InternetGatewayAttachment 134 | Properties: 135 | Domain: vpc 136 | 137 | NatGateway2EIP: 138 | Type: AWS::EC2::EIP 139 | DependsOn: InternetGatewayAttachment 140 | Properties: 141 | Domain: vpc 142 | 143 | NatGateway1: 144 | Type: AWS::EC2::NatGateway 145 | Properties: 146 | AllocationId: !GetAtt NatGateway1EIP.AllocationId 147 | SubnetId: !Ref PublicSubnet1 148 | 149 | NatGateway2: 150 | Type: AWS::EC2::NatGateway 151 | Properties: 152 | AllocationId: !GetAtt NatGateway2EIP.AllocationId 153 | SubnetId: !Ref PublicSubnet2 154 | 155 | PublicRouteTable: 156 | Type: AWS::EC2::RouteTable 157 | Properties: 158 | VpcId: !Ref VPC 159 | Tags: 160 | - Key: Name 161 | Value: !Sub ${EnvironmentName} Public Routes 162 | 163 | DefaultPublicRoute: 164 | Type: AWS::EC2::Route 165 | DependsOn: InternetGatewayAttachment 166 | Properties: 167 | RouteTableId: !Ref PublicRouteTable 168 | DestinationCidrBlock: 0.0.0.0/0 169 | GatewayId: !Ref InternetGateway 170 | 171 | PublicSubnet1RouteTableAssociation: 172 | Type: AWS::EC2::SubnetRouteTableAssociation 173 | Properties: 174 | RouteTableId: !Ref PublicRouteTable 175 | SubnetId: !Ref PublicSubnet1 176 | 177 | PublicSubnet2RouteTableAssociation: 178 | Type: AWS::EC2::SubnetRouteTableAssociation 179 | Properties: 180 | RouteTableId: !Ref PublicRouteTable 181 | SubnetId: !Ref PublicSubnet2 182 | 183 | 184 | PrivateRouteTable1: 185 | Type: AWS::EC2::RouteTable 186 | Properties: 187 | VpcId: !Ref VPC 188 | Tags: 189 | - Key: Name 190 | Value: !Sub ${EnvironmentName} Private Routes (AZ1) 191 | 192 | DefaultPrivateRoute1: 193 | Type: AWS::EC2::Route 194 | Properties: 195 | RouteTableId: !Ref PrivateRouteTable1 196 | DestinationCidrBlock: 0.0.0.0/0 197 | NatGatewayId: !Ref NatGateway1 198 | 199 | PrivateSubnet1RouteTableAssociation: 200 | Type: AWS::EC2::SubnetRouteTableAssociation 201 | Properties: 202 | RouteTableId: !Ref PrivateRouteTable1 203 | SubnetId: !Ref PrivateSubnet1 204 | 205 | PrivateRouteTable2: 206 | Type: AWS::EC2::RouteTable 207 | Properties: 208 | VpcId: !Ref VPC 209 | Tags: 210 | - Key: Name 211 | Value: !Sub ${EnvironmentName} Private Routes (AZ2) 212 | 213 | DefaultPrivateRoute2: 214 | Type: AWS::EC2::Route 215 | Properties: 216 | RouteTableId: !Ref PrivateRouteTable2 217 | DestinationCidrBlock: 0.0.0.0/0 218 | NatGatewayId: !Ref NatGateway2 219 | 220 | PrivateSubnet2RouteTableAssociation: 221 | Type: AWS::EC2::SubnetRouteTableAssociation 222 | Properties: 223 | RouteTableId: !Ref PrivateRouteTable2 224 | SubnetId: !Ref PrivateSubnet2 225 | 226 | SecurityGroup: 227 | Type: AWS::EC2::SecurityGroup 228 | Properties: 229 | GroupName: "mwaa-security-group" 230 | GroupDescription: "Security group with a self-referencing inbound rule." 231 | VpcId: !Ref VPC 232 | 233 | SecurityGroupIngress: 234 | Type: AWS::EC2::SecurityGroupIngress 235 | Properties: 236 | GroupId: !Ref SecurityGroup 237 | IpProtocol: "-1" 238 | SourceSecurityGroupId: !Ref SecurityGroup 239 | 240 | EnvironmentBucket: 241 | Type: AWS::S3::Bucket 242 | Properties: 243 | VersioningConfiguration: 244 | Status: Enabled 245 | PublicAccessBlockConfiguration: 246 | BlockPublicAcls: true 247 | BlockPublicPolicy: true 248 | IgnorePublicAcls: true 249 | RestrictPublicBuckets: true 250 | 251 | ##################################################################################################################### 252 | # CREATE MWAA 253 | ##################################################################################################################### 254 | 255 | MwaaEnvironment: 256 | Type: AWS::MWAA::Environment 257 | DependsOn: MwaaExecutionPolicy 258 | Properties: 259 | Name: !Sub "${AWS::StackName}-MwaaEnvironment" 260 | SourceBucketArn: !GetAtt EnvironmentBucket.Arn 261 | ExecutionRoleArn: !GetAtt MwaaExecutionRole.Arn 262 | DagS3Path: dags 263 | NetworkConfiguration: 264 | SecurityGroupIds: 265 | - !GetAtt SecurityGroup.GroupId 266 | SubnetIds: 267 | - !Ref PrivateSubnet1 268 | - !Ref PrivateSubnet2 269 | WebserverAccessMode: PUBLIC_ONLY 270 | MaxWorkers: !Ref MaxWorkerNodes 271 | LoggingConfiguration: 272 | DagProcessingLogs: 273 | LogLevel: !Ref DagProcessingLogs 274 | Enabled: true 275 | SchedulerLogs: 276 | LogLevel: !Ref SchedulerLogsLevel 277 | Enabled: true 278 | TaskLogs: 279 | LogLevel: !Ref TaskLogsLevel 280 | Enabled: true 281 | WorkerLogs: 282 | LogLevel: !Ref WorkerLogsLevel 283 | Enabled: true 284 | WebserverLogs: 285 | LogLevel: !Ref WebserverLogsLevel 286 | Enabled: true 287 | SecurityGroup: 288 | Type: AWS::EC2::SecurityGroup 289 | Properties: 290 | VpcId: !Ref VPC 291 | GroupDescription: !Sub "Security Group for Amazon MWAA Environment ${AWS::StackName}-MwaaEnvironment" 292 | GroupName: !Sub "airflow-security-group-${AWS::StackName}-MwaaEnvironment" 293 | 294 | SecurityGroupIngress: 295 | Type: AWS::EC2::SecurityGroupIngress 296 | Properties: 297 | GroupId: !Ref SecurityGroup 298 | IpProtocol: "-1" 299 | SourceSecurityGroupId: !Ref SecurityGroup 300 | 301 | SecurityGroupEgress: 302 | Type: AWS::EC2::SecurityGroupEgress 303 | Properties: 304 | GroupId: !Ref SecurityGroup 305 | IpProtocol: "-1" 306 | CidrIp: "0.0.0.0/0" 307 | 308 | MwaaExecutionRole: 309 | Type: AWS::IAM::Role 310 | Properties: 311 | AssumeRolePolicyDocument: 312 | Version: 2012-10-17 313 | Statement: 314 | - Effect: Allow 315 | Principal: 316 | Service: 317 | - airflow-env.amazonaws.com 318 | - airflow.amazonaws.com 319 | Action: 320 | - "sts:AssumeRole" 321 | Path: "/service-role/" 322 | 323 | MwaaExecutionPolicy: 324 | DependsOn: EnvironmentBucket 325 | Type: AWS::IAM::ManagedPolicy 326 | Properties: 327 | Roles: 328 | - !Ref MwaaExecutionRole 329 | PolicyDocument: 330 | Version: 2012-10-17 331 | Statement: 332 | - Effect: Allow 333 | Action: airflow:PublishMetrics 334 | Resource: 335 | - !Sub "arn:aws:airflow:${AWS::Region}:${AWS::AccountId}:environment/${EnvironmentName}" 336 | - Effect: Deny 337 | Action: s3:ListAllMyBuckets 338 | Resource: 339 | - !Sub "${EnvironmentBucket.Arn}" 340 | - !Sub "${EnvironmentBucket.Arn}/*" 341 | 342 | - Effect: Allow 343 | Action: 344 | - "s3:GetObject*" 345 | - "s3:GetBucket*" 346 | - "s3:List*" 347 | Resource: 348 | - !Sub "${EnvironmentBucket.Arn}" 349 | - !Sub "${EnvironmentBucket.Arn}/*" 350 | - Effect: Allow 351 | Action: 352 | - logs:DescribeLogGroups 353 | Resource: "*" 354 | 355 | - Effect: Allow 356 | Action: 357 | - logs:CreateLogStream 358 | - logs:CreateLogGroup 359 | - logs:PutLogEvents 360 | - logs:GetLogEvents 361 | - logs:GetLogRecord 362 | - logs:GetLogGroupFields 363 | - logs:GetQueryResults 364 | - logs:DescribeLogGroups 365 | Resource: 366 | - !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:airflow-${AWS::StackName}*" 367 | - Effect: Allow 368 | Action: cloudwatch:PutMetricData 369 | Resource: "*" 370 | - Effect: Allow 371 | Action: 372 | - sqs:ChangeMessageVisibility 373 | - sqs:DeleteMessage 374 | - sqs:GetQueueAttributes 375 | - sqs:GetQueueUrl 376 | - sqs:ReceiveMessage 377 | - sqs:SendMessage 378 | Resource: 379 | - !Sub "arn:aws:sqs:${AWS::Region}:*:airflow-celery-*" 380 | - Effect: Allow 381 | Action: 382 | - kms:Decrypt 383 | - kms:DescribeKey 384 | - "kms:GenerateDataKey*" 385 | - kms:Encrypt 386 | NotResource: !Sub "arn:aws:kms:*:${AWS::AccountId}:key/*" 387 | Condition: 388 | StringLike: 389 | "kms:ViaService": 390 | - !Sub "sqs.${AWS::Region}.amazonaws.com" 391 | Outputs: 392 | VPC: 393 | Description: A reference to the created VPC 394 | Value: !Ref VPC 395 | 396 | PublicSubnets: 397 | Description: A list of the public subnets 398 | Value: !Join [ ",", [ !Ref PublicSubnet1, !Ref PublicSubnet2 ]] 399 | 400 | PrivateSubnets: 401 | Description: A list of the private subnets 402 | Value: !Join [ ",", [ !Ref PrivateSubnet1, !Ref PrivateSubnet2 ]] 403 | 404 | PublicSubnet1: 405 | Description: A reference to the public subnet in the 1st Availability Zone 406 | Value: !Ref PublicSubnet1 407 | 408 | PublicSubnet2: 409 | Description: A reference to the public subnet in the 2nd Availability Zone 410 | Value: !Ref PublicSubnet2 411 | 412 | PrivateSubnet1: 413 | Description: A reference to the private subnet in the 1st Availability Zone 414 | Value: !Ref PrivateSubnet1 415 | 416 | PrivateSubnet2: 417 | Description: A reference to the private subnet in the 2nd Availability Zone 418 | Value: !Ref PrivateSubnet2 419 | 420 | SecurityGroupIngress: 421 | Description: Security group with self-referencing inbound rule 422 | Value: !Ref SecurityGroupIngress 423 | 424 | MwaaApacheAirflowUI: 425 | Description: MWAA Environment 426 | Value: !Sub "https://${MwaaEnvironment.WebserverUrl}" 427 | -------------------------------------------------------------------------------- /tests/validation_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | from pathlib import Path 4 | 5 | from asyncio import FIRST_EXCEPTION, Future 6 | from concurrent import futures 7 | from concurrent.futures import ThreadPoolExecutor 8 | 9 | import pytest 10 | from docker.errors import ImageNotFound 11 | 12 | IS_ARM = os.uname().machine == "arm64" 13 | 14 | ASTRO_IMAGES = [ 15 | # Not used, harder to install on 16 | "quay.io/astronomer/ap-airflow:2.0.2-buster-onbuild", 17 | "quay.io/astronomer/ap-airflow:2.1.4-buster-onbuild", 18 | "quay.io/astronomer/ap-airflow:2.2.5-onbuild", 19 | "quay.io/astronomer/ap-airflow:2.3.4-onbuild", 20 | "quay.io/astronomer/astro-runtime:4.2.8", 21 | "quay.io/astronomer/ap-airflow:2.4.3-onbuild", 22 | "quay.io/astronomer/astro-runtime:5.4.0", 23 | "quay.io/astronomer/astro-runtime:6.6.0", 24 | "quay.io/astronomer/astro-runtime:7.6.0", 25 | "quay.io/astronomer/astro-runtime:8.5.0", 26 | "quay.io/astronomer/astro-runtime:9.2.0", 27 | ] 28 | 29 | IMAGES = [ 30 | "apache/airflow:slim-2.10.3", 31 | "apache/airflow:slim-2.9.3", 32 | "apache/airflow:slim-2.8.1", 33 | "apache/airflow:slim-2.7.3", 34 | "apache/airflow:slim-2.6.0", 35 | "apache/airflow:slim-2.5.3", 36 | "apache/airflow:slim-2.4.0", 37 | "apache/airflow:2.3.4", 38 | "apache/airflow:2.2.4", 39 | "apache/airflow:2.1.3", 40 | "apache/airflow:2.0.2", 41 | # # "apache/airflow:1.10.15", 42 | # # "apache/airflow:1.10.10", 43 | ] 44 | 45 | 46 | def skip_no_docker(has_docker): 47 | """Skips this test if we don't have docker""" 48 | if not has_docker: 49 | pytest.skip("skipped, no docker") 50 | 51 | 52 | @pytest.fixture 53 | def local_version(project_root): 54 | from astronomer_starship import __version__ 55 | 56 | return __version__ 57 | 58 | 59 | @pytest.fixture(scope="session") 60 | def docker_client(): 61 | import docker 62 | 63 | return docker.from_env() 64 | 65 | 66 | @pytest.fixture(scope="session") 67 | def has_docker(): 68 | from shutil import which 69 | 70 | return which("docker") is not None 71 | 72 | 73 | def run_in_container( 74 | docker_client, 75 | image: str, 76 | command: str, 77 | volumes: list, 78 | environment: dict, 79 | platform: str, 80 | ) -> tuple[int, bytes]: 81 | from docker.models.containers import Container 82 | 83 | container: Container = docker_client.containers.run( 84 | image=image, 85 | platform=platform, 86 | # user="root", 87 | entrypoint="/bin/bash", 88 | environment=environment, 89 | command=command, 90 | volumes=volumes, 91 | stdout=True, 92 | stderr=True, 93 | detach=True, 94 | ) 95 | exit_code = container.wait() 96 | logs = container.logs() 97 | container.remove() 98 | return exit_code, logs 99 | 100 | 101 | def test_docker_pytest(has_docker, docker_client, project_root, local_version): 102 | skip_no_docker(has_docker) 103 | version = local_version.replace("v", "") 104 | 105 | with ThreadPoolExecutor() as executor: 106 | 107 | def run_test_for_image(image: str): 108 | if IS_ARM: 109 | try: 110 | docker_client.images.pull(image, platform="linux/amd64") 111 | platform = "linux/amd64" 112 | except ImageNotFound: 113 | docker_client.images.pull(image, platform="linux/x86_64") 114 | platform = "linux/x86_64" 115 | else: 116 | docker_client.images.pull(image, platform="linux/x86_64") 117 | platform = "linux/x86_64" 118 | 119 | exit_code, logs = run_in_container( 120 | docker_client, 121 | image=image, 122 | platform=platform, 123 | command=f"/usr/local/airflow/starship/tests/docker_test/run_container_test.sh " 124 | f"{image} " 125 | f"starship/dist/astronomer_starship-{version}-py3-none-any.whl", 126 | volumes=[ 127 | f"{project_root}/dist/:/usr/local/airflow/starship/dist/:rw", 128 | f"{project_root}/tests/docker_test:/usr/local/airflow/starship/tests/docker_test:rw", 129 | ], 130 | environment={ 131 | "AIRFLOW__SCHEDULER__USE_JOB_SCHEDULE": "False", 132 | "DOCKER_TEST": "True", 133 | "PYTHON_PATH": "$PYTHON_PATH:/usr/local/airflow/:/op/airflow/", 134 | }, 135 | ) 136 | if exit_code["StatusCode"] != 0: 137 | print(f"[IMAGE={image}] exit code: {exit_code}\n{logs.decode()}") 138 | log_file_name = f'{image.rsplit(":", maxsplit=1)[-1]}.test.log' 139 | Path(log_file_name).write_bytes(logs) 140 | assert exit_code == {"StatusCode": 0}, f"exit code: {exit_code}\n{logs}" 141 | assert ( 142 | b"[STARSHIP-PYTEST-SUCCESS" in logs 143 | ), f"Looking for success in {exit_code}\n{logs}" 144 | 145 | print("Building...") 146 | build_logs = subprocess.run( 147 | "just build-backend", shell=True, capture_output=True, check=True 148 | ) 149 | print(build_logs) 150 | tests = [executor.submit(run_test_for_image, image) for image in IMAGES] 151 | for test in futures.wait(tests, return_when=FIRST_EXCEPTION)[0]: 152 | test: Future 153 | if test.exception(): 154 | raise test.exception() 155 | 156 | 157 | def test_version(local_version): 158 | import requests 159 | 160 | package = "astronomer-starship" 161 | 162 | releases = requests.get(f"https://pypi.org/pypi/{package}/json").json()["releases"] 163 | shipped_versions = [] 164 | [shipped_versions.append(version) for version, details in releases.items()] 165 | 166 | assert local_version not in shipped_versions 167 | --------------------------------------------------------------------------------