├── .gitignore ├── LICENSE ├── README.md ├── package-lock.json ├── package.json ├── public ├── component_library.yaml ├── favicon.ico ├── index.html ├── logo192.png ├── logo24.png ├── logo48.png ├── logo512.png ├── manifest.json ├── pipeline_library.yaml └── robots.txt ├── src ├── App.css ├── App.tsx ├── AppFooter.tsx ├── DragNDrop │ ├── AppSettingsDialog.tsx │ ├── ArgumentsEditor.tsx │ ├── ArgumentsEditorDialog.tsx │ ├── ComponentLibrary.tsx │ ├── ComponentSearch.tsx │ ├── ComponentTaskNode.tsx │ ├── DraggableComponent.tsx │ ├── GoogleCloud.tsx │ ├── GraphComponentExporter.tsx │ ├── GraphComponentLink.tsx │ ├── GraphComponentSpecFlow.tsx │ ├── KubeflowPipelinesSubmitter.tsx │ ├── PipelineAutoSaver.ts │ ├── PipelineLibrary.tsx │ ├── PipelineSubmitter.tsx │ ├── SamplePipelineLibrary.tsx │ ├── Sidebar.tsx │ ├── UserComponentLibrary.tsx │ ├── VertexAiExporter.tsx │ ├── dnd.css │ ├── index.tsx │ └── testData │ │ └── name_collision_test.pipeline.component.yaml ├── appSettings.ts ├── cacheUtils.ts ├── compilers │ ├── Argo │ │ ├── argo-ui │ │ │ └── src │ │ │ │ └── models │ │ │ │ └── kubernetes.ts │ │ ├── argo-workflows │ │ │ └── ui │ │ │ │ └── src │ │ │ │ └── models │ │ │ │ └── workflows.ts │ │ ├── argoCompiler.test.ts │ │ ├── argoCompiler.ts │ │ └── testData │ │ │ ├── Data_passing_pipeline │ │ │ └── argo_workflow.yaml │ │ │ ├── Name_collision_pipeline │ │ │ └── argo_workflow.yaml │ │ │ └── XGBoost_pipeline │ │ │ └── argo_workflow.yaml │ ├── GoogleCloudVertexAIPipelines │ │ ├── testData │ │ │ ├── Data_passing_pipeline │ │ │ │ └── google_cloud_vertex_pipeline.json │ │ │ ├── Name_collision_pipeline │ │ │ │ └── google_cloud_vertex_pipeline.json │ │ │ └── XGBoost_pipeline │ │ │ │ └── google_cloud_vertex_pipeline.json │ │ ├── vertexAiCompiler.test.ts │ │ ├── vertexAiCompiler.ts │ │ └── vertexPipelineSpec.ts │ └── testData │ │ ├── Data_passing_pipeline │ │ └── pipeline.component.yaml │ │ ├── Name_collision_pipeline │ │ └── pipeline.component.yaml │ │ └── XGBoost_pipeline │ │ └── pipeline.component.yaml ├── componentSpec.ts ├── componentStore.ts ├── github.ts ├── index.css ├── index.tsx ├── logo.svg ├── react-app-env.d.ts ├── reportWebVitals.ts ├── service-worker.ts ├── serviceWorkerRegistration.ts ├── setupTests.ts ├── userDataMigration.ts └── utils.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cloud Pipelines Editor 2 | 3 | Cloud Pipelines Editor is a web app that allows the users to build and run Machine Learning pipelines using drag and drop without having to set up development environment. 4 | 5 | ## Video 6 | 7 | Please take a look at the short video demonstrating the visual pipeline editor. 8 | 9 | [Cloud Pipelines - Build machine learning pipelines without writing code](https://www.youtube.com/watch?v=7g22nupCDes) 10 | [![image](https://user-images.githubusercontent.com/1829149/127566707-fceb9e41-1126-4588-b94a-c69e87fe0488.png)](https://www.youtube.com/watch?v=7g22nupCDes) 11 | 12 | ## Demo 13 | 14 | [Demo](https://cloud-pipelines.net/pipeline-editor) 15 | 16 | The early alpha version of the Cloud Pipelines Editor app shown in this video is now available at . The app is open and standalone. No registration is required. 17 | 18 | Please check it out and report any bugs you find using [GitHub Issues](https://github.com/Cloud-Pipelines/pipeline-editor/issues). 19 | 20 | The app is under active development, so expect some breakages as I work on the app and do not rely on the app for production. 21 | 22 | App features: 23 | 24 | * Build pipeline using drag and drop 25 | * Edit component arguments 26 | * Submit the pipeline to [Google Cloud Vertex Pipelines](https://cloud.google.com/vertex-ai/docs/pipelines/) for execution. 27 | * Fully compatible with the Kubeflow Pipelines' components (`component.yaml` files) You can find some components here: [Ark-kun/pipeline_components](https://github.com/Ark-kun/pipeline_components/tree/master/components) or [kubeflow/pipelines/components](https://github.com/kubeflow/pipelines/tree/master/components#index-of-components) 28 | * Preloaded component library 29 | * User component library (add private components) 30 | * Component search 31 | * Import and export pipelines 32 | 33 | There are many features that I want to add, but I want to prioritize them based on your feedback. 34 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pipeline-editor", 3 | "homepage": "https://cloud-pipelines.net/pipeline-editor", 4 | "version": "0.1.0", 5 | "description": "Cloud Pipelines Editor is a web app that allows the users to build and run Machine Learning pipelines using drag and drop without having to set up development environment.", 6 | "keywords": [ 7 | "ML", 8 | "AI", 9 | "Machine Learning", 10 | "pipelines", 11 | "containers", 12 | "cloud", 13 | "Kubernetes", 14 | "MLOps" 15 | ], 16 | "bugs": "https://github.com/Cloud-Pipelines/pipeline-editor/issues", 17 | "license": "Apache-2.0", 18 | "author": { 19 | "name": "Alexey Volkov", 20 | "email": "alexey.volkov+oss@ark-kun.com", 21 | "url": "http://ark-kun.com/" 22 | }, 23 | "repository": "github:Cloud-Pipelines/pipeline-editor", 24 | "private": true, 25 | "dependencies": { 26 | "@material-ui/core": "^4.12.2", 27 | "@material-ui/icons": "^4.11.2", 28 | "@testing-library/jest-dom": "^5.14.1", 29 | "@testing-library/react": "^11.2.7", 30 | "@testing-library/user-event": "^12.8.3", 31 | "@types/gapi": "^0.0.39", 32 | "@types/jest": "^26.0.23", 33 | "@types/js-yaml": "^4.0.1", 34 | "@types/react": "^17.0.2", 35 | "@types/react-dom": "^17.0.2", 36 | "gh-pages": "^3.2.2", 37 | "js-yaml": "^4.1.0", 38 | "localforage": "^1.9.0", 39 | "react-dropzone": "^11.3.4", 40 | "react-flow-renderer": "^9.6.0", 41 | "react-scripts": "4.0.3", 42 | "typescript": "^4.3.4", 43 | "web-vitals": "^1.1.2" 44 | }, 45 | "scripts": { 46 | "predeploy": "npm run build", 47 | "deploy": "gh-pages -d build", 48 | "predeploy-prod": "npm run build", 49 | "deploy-prod": "gh-pages -d build --dest pipeline-editor --repo https://github.com/Cloud-Pipelines/pipeline-editor-github-pages-prod.git", 50 | "start": "react-scripts start", 51 | "build": "react-scripts build", 52 | "test": "react-scripts test", 53 | "eject": "react-scripts eject" 54 | }, 55 | "eslintConfig": { 56 | "extends": [ 57 | "react-app", 58 | "react-app/jest" 59 | ] 60 | }, 61 | "browserslist": { 62 | "production": [ 63 | ">0.2%", 64 | "not dead", 65 | "not op_mini all" 66 | ], 67 | "development": [ 68 | "last 1 chrome version", 69 | "last 1 firefox version", 70 | "last 1 safari version" 71 | ] 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /public/component_library.yaml: -------------------------------------------------------------------------------- 1 | annotations: {} 2 | folders: 3 | - name: "Quick start" 4 | components: 5 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/60a2612541ec08c6a85c237d2ec7525b12543a43/components/datasets/Chicago_Taxi_Trips/component.yaml" 6 | - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/2463ecda532517462590d75e6e14a8af6b55869a/components/datasets/Chicago_Taxi_Trips/component.yaml" 7 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/XGBoost/Train/component.yaml" 8 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/XGBoost/Predict/component.yaml" 9 | 10 | - name: "Datasets" 11 | components: 12 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/8dda6ec74d859a0112907fab8bc987a177b9fa4b/components/datasets/Chicago_Taxi_Trips/component.yaml" 13 | # Has modified default column set. TODO: Perhaps replace with a graph component. 14 | - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/2463ecda532517462590d75e6e14a8af6b55869a/components/datasets/Chicago_Taxi_Trips/component.yaml" 15 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/datasets/HuggingFace/Load_dataset/component.yaml" 16 | 17 | - name: "Data manipulation" 18 | folders: 19 | - name: JSON 20 | components: 21 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/dcf4fdde4876e8d76aa0131ad4d67c47b2b5591a/components/json/Get_element_by_index/component.yaml" 22 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/dcf4fdde4876e8d76aa0131ad4d67c47b2b5591a/components/json/Get_element_by_key/component.yaml" 23 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/json/Query/component.yaml" 24 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/json/Build_dict/component.yaml" 25 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/json/Build_list/component.yaml" 26 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/aecac18d4023c73c561d7f21192253e9593b9932/components/json/Build_list_of_strings/component.yaml" 27 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/bb9d7518b3a23e945c8cc1663942063c6b92c20f/components/json/Build_list_of_integers/component.yaml" 28 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/bb9d7518b3a23e945c8cc1663942063c6b92c20f/components/json/Build_list_of_floats/component.yaml" 29 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/json/Combine_lists/component.yaml" 30 | components: 31 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/0f0650b8446277b10f7ab48d220e413eef04ec69/components/pandas/Select_columns/in_CSV_format/component.yaml" 32 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/151411a5b719916b47505cd21c4541c1a5b62400/components/pandas/Fill_all_missing_values/in_CSV_format/component.yaml" 33 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/0c7b4ea8c7048cc5cd59c161bcbfa5b742738e99/components/pandas/Binarize_column/in_CSV_format/component.yaml" 34 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/pandas/Transform_DataFrame/in_CSV_format/component.yaml" 35 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/dataset_manipulation/split_data_into_folds/in_CSV/component.yaml" 36 | 37 | - name: "Upload/Download" 38 | components: 39 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/web/Download/component.yaml" 40 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/google-cloud/storage/download/component.yaml" 41 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/6210648f30b2b3a8c01cc10be338da98300efb6b/components/google-cloud/storage/upload_to_unique_uri/component.yaml" 42 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/6210648f30b2b3a8c01cc10be338da98300efb6b/components/google-cloud/storage/upload_to_explicit_uri/component.yaml" 43 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/cca2d8569d01b527df10c629258be04d52eacc43/components/Download_and_upload/IPFS/Download/component.yaml" 44 | 45 | - name: ML frameworks 46 | folders: 47 | - name: "Scikit learn" 48 | components: 49 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/f807e02b54d4886c65a05f40848fd51c72407f40/components/ML_frameworks/Scikit_learn/Train_linear_regression_model/from_CSV/component.yaml" 50 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/cb44b75c9c062fcc40c2b905b2024b4493dbc62b/components/ML_frameworks/Scikit_learn/Train_logistic_regression_model/from_CSV/component.yaml" 51 | 52 | - name: "XGBoost" 53 | components: 54 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/XGBoost/Train/component.yaml" 55 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/XGBoost/Predict/component.yaml" 56 | 57 | - name: "PyTorch" 58 | components: 59 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/PyTorch/Create_fully_connected_network/component.yaml" 60 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/PyTorch/Train_PyTorch_model/from_CSV/component.yaml" 61 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/PyTorch/Convert_to_OnnxModel_from_PyTorchScriptModule/component.yaml" 62 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/46d51383e6554b7f3ab4fd8cf614d8c2b422fb22/components/PyTorch/Create_PyTorch_Model_Archive/with_base_handler/component.yaml" 63 | 64 | - name: "Tensorflow" 65 | components: 66 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/f3a9769d35a057c31a498e0667cae2e4a830c5b0/components/tensorflow/Create_fully_connected_network/component.yaml" 67 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/c504a4010348c50eaaf6d4337586ccc008f4dcef/components/tensorflow/Train_model_using_Keras/on_CSV/component.yaml" 68 | 69 | - name: "CatBoost" 70 | components: 71 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/Train_regression/from_CSV/component.yaml" 72 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/Train_classifier/from_CSV/component.yaml" 73 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/Predict_values/from_CSV/component.yaml" 74 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/Predict_classes/from_CSV/component.yaml" 75 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/Predict_class_probabilities/from_CSV/component.yaml" 76 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/CatBoost/convert_CatBoostModel_to_ONNX/component.yaml" 77 | 78 | - name: "Vowpal_Wabbit" 79 | components: 80 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/a2a629e776d5fa0204ce71370cab23282d3e4278/components/ML_frameworks/Vowpal_Wabbit/Create_JSON_dataset/from_CSV/component.yaml" 81 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/a2a629e776d5fa0204ce71370cab23282d3e4278/components/ML_frameworks/Vowpal_Wabbit/Train_regression_model/from_VowpalWabbitJsonDataset/component.yaml" 82 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/a2a629e776d5fa0204ce71370cab23282d3e4278/components/ML_frameworks/Vowpal_Wabbit/Predict/from_VowpalWabbitJsonDataset/component.yaml" 83 | 84 | - name: TFX 85 | components: 86 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml" 87 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/StatisticsGen/component.yaml" 88 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/SchemaGen/component.yaml" 89 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/ExampleValidator/component.yaml" 90 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/Transform/component.yaml" 91 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/Trainer/component.yaml" 92 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/deprecated/tfx/Evaluator/component.yaml" 93 | 94 | - name: "Google Cloud" 95 | folders: 96 | - name: Vertex AI 97 | folders: 98 | - name: AutoML 99 | components: 100 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/47f3621344c884666a926c8a15d77562f1cc5e0a/components/google-cloud/Vertex_AI/AutoML/Tables/Create_dataset/from_CSV/component.yaml" 101 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/00d020c29a144cee7fd35f2d05053addb942f536/components/google-cloud/Vertex_AI/AutoML/Tables/Create_dataset/from_GCS/component.yaml" 102 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/00d020c29a144cee7fd35f2d05053addb942f536/components/google-cloud/Vertex_AI/AutoML/Tables/Create_dataset/from_BigQuery/component.yaml" 103 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/ab85ecc9c30d4d68a2993ca87861f5e531a4f41b/components/google-cloud/Vertex_AI/AutoML/Tables/Train_model/component.yaml" 104 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/a31b7c9652646f2cd035a0b3a23e0723c632521b/components/google-cloud/Vertex_AI/AutoML/Tables/Get_model_tuning_trials/component.yaml" 105 | - name: Models 106 | components: 107 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/2c24c0c0730c818b89f676c4dc5c9d6cb90ab01d/components/google-cloud/Vertex_AI/Models/Upload_XGBoost_model/component.yaml" 108 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/25dc317e649a19a53139a08ccbe496a248693fe4/components/google-cloud/Vertex_AI/Models/Upload_Scikit-learn_pickle_model/component.yaml" 109 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/719783ef44c04348ea23e247a93021d91cfe602d/components/google-cloud/Vertex_AI/Models/Upload_Tensorflow_model/component.yaml" 110 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d1e7a3ccf8f8e0324e15922d6fd90d667fc5281b/components/google-cloud/Vertex_AI/Models/Upload_PyTorch_model_archive/component.yaml" 111 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/b2cdd60fe93d609111729ef64e79a8b8a2713435/components/google-cloud/Vertex_AI/Models/Deploy_to_endpoint/component.yaml" 112 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d45e011ad8b62b4fe36c12289a624e5e1573c68d/components/google-cloud/Vertex_AI/Models/Export/to_GCS/component.yaml" 113 | - name: Storage 114 | components: 115 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/google-cloud/storage/download/component.yaml" 116 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/6210648f30b2b3a8c01cc10be338da98300efb6b/components/google-cloud/storage/upload_to_explicit_uri/component.yaml" 117 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/6210648f30b2b3a8c01cc10be338da98300efb6b/components/google-cloud/storage/upload_to_unique_uri/component.yaml" 118 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/google-cloud/storage/list/component.yaml" 119 | - name: AI Platform (legacy) 120 | folders: 121 | - name: Optimizer 122 | components: 123 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/1b87c0bdfde5d7ec039401af8561783432731402/components/google-cloud/Optimizer/Suggest_parameter_sets_based_on_measurements/component.yaml" 124 | # # !!! Google Cloud AI Platform AutoML API cannot be called from Google Cloud Vertex AI Pipelines (~by design) 125 | # - name: AutoML 126 | # components: 127 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/3862f752bb825bb8cdcae842f0b294794028376b/components/google-cloud/AutoML/Tables/Create_dataset/from_CSV/component.yaml" 128 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/46b2a4ef4ac530404958839ae397b9fd533cf154/components/google-cloud/AutoML/Tables/Create_dataset/from_GCS/component.yaml" 129 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/247a0e0cf55c28b978c7f8e44fcedba356c12e93/components/google-cloud/AutoML/Tables/Create_dataset/from_BigQuery/component.yaml" 130 | # - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/d8c4cf5e6403bc65bcf8d606e6baf87e2528a3dc/components/gcp/automl/create_model_for_tables/component.yaml" 131 | -------------------------------------------------------------------------------- /public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/public/favicon.ico -------------------------------------------------------------------------------- /public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 14 | 15 | 16 | 20 | 21 | 30 | Cloud Pipelines - Pipeline editor 31 | 32 | 33 | 34 | 35 | 41 | 42 | 43 | 44 |
45 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/public/logo192.png -------------------------------------------------------------------------------- /public/logo24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/public/logo24.png -------------------------------------------------------------------------------- /public/logo48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/public/logo48.png -------------------------------------------------------------------------------- /public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/public/logo512.png -------------------------------------------------------------------------------- /public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "Pipeline editor", 3 | "name": "Cloud Pipelines - Pipeline editor", 4 | "icons": [ 5 | { 6 | "src": "logo48.png", 7 | "sizes": "48x48", 8 | "type": "image/png" 9 | }, 10 | { 11 | "src": "favicon.ico", 12 | "sizes": "48x48", 13 | "type": "image/x-icon" 14 | }, 15 | { 16 | "src": "logo192.png", 17 | "type": "image/png", 18 | "sizes": "192x192" 19 | }, 20 | { 21 | "src": "logo512.png", 22 | "type": "image/png", 23 | "sizes": "512x512" 24 | } 25 | ], 26 | "start_url": ".", 27 | "display": "standalone", 28 | "theme_color": "#000000", 29 | "background_color": "#ffffff" 30 | } 31 | -------------------------------------------------------------------------------- /public/pipeline_library.yaml: -------------------------------------------------------------------------------- 1 | annotations: {} 2 | components: 3 | - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/2edfd25b5ee3a4aa149c24a225a50041fbd3662d/components/XGBoost/_samples/sample_pipeline.pipeline.component.yaml" 4 | - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/237cd6bc0b6db26f615c22897be20aad77270b50/components/PyTorch/_samples/Train_fully-connected_network.pipeline.component.yaml" 5 | - url: "https://raw.githubusercontent.com/Ark-kun/pipeline_components/44b0543525ab6149ce995a411f88997e7131a53d/components/google-cloud/Vertex_AI/AutoML/Tables/_samples/VertexAI.AutoML.Tables.pipeline.component.yaml" 6 | - url: "https://raw.githubusercontent.com/Ark-kun/pipelines/2765b13699ac28de523f499eeaa9eb2ed9b8798a/components/deprecated/tfx/_samples/TFX.pipeline.component.yaml" 7 | -------------------------------------------------------------------------------- /public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /src/App.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cloud-Pipelines/pipeline-editor/17501ccc1a4e865fa4c38e1434afb0910c127f41/src/App.css -------------------------------------------------------------------------------- /src/App.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import './App.css'; 10 | import DnDFlow from './DragNDrop/index'; 11 | import AppFooter from "./AppFooter" 12 | 13 | function App() { 14 | return ( 15 |
16 | 17 | 18 |
19 | ); 20 | } 21 | 22 | export default App; 23 | -------------------------------------------------------------------------------- /src/AppFooter.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { Link } from "@material-ui/core"; 10 | 11 | function AppFooter() { 12 | return ( 13 |
23 |
24 | 31 | About 32 | 33 | 40 | Give feedback 41 | 42 | 49 | Privacy policy 50 | 51 |
52 |
53 | ); 54 | } 55 | 56 | export default AppFooter; 57 | -------------------------------------------------------------------------------- /src/DragNDrop/AppSettingsDialog.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2022 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2022 Alexey Volkov 7 | */ 8 | 9 | import { 10 | Button, 11 | Dialog, 12 | DialogActions, 13 | DialogContent, 14 | DialogTitle, 15 | TextField, 16 | } from "@material-ui/core"; 17 | import { useState } from "react"; 18 | import { getMutableAppSettings } from "../appSettings"; 19 | 20 | type AppSettingsDialogProps = { 21 | isOpen: boolean; 22 | handleClose: () => void; 23 | }; 24 | 25 | const AppSettingsDialog = ({ isOpen, handleClose }: AppSettingsDialogProps) => { 26 | const appSettings = getMutableAppSettings(); 27 | 28 | const [componentLibraryUrl, setComponentLibraryUrl] = useState( 29 | appSettings.componentLibraryUrl.value 30 | ); 31 | const [pipelineLibraryUrl, setPipelineLibraryUrl] = useState( 32 | appSettings.pipelineLibraryUrl.value 33 | ); 34 | const [defaultPipelineUrl, setDefaultPipelineUrl] = useState( 35 | appSettings.defaultPipelineUrl.value 36 | ); 37 | const [componentFeedUrls, setComponentFeedUrls] = useState( 38 | appSettings.componentFeedUrls.value 39 | ); 40 | const [gitHubSearchLocations, setGitHubSearchLocations] = useState( 41 | appSettings.gitHubSearchLocations.value 42 | ); 43 | const [googleCloudOAuthClientId, setGoogleCloudOAuthClientId] = useState( 44 | appSettings.googleCloudOAuthClientId.value 45 | ); 46 | 47 | const handleSave = () => { 48 | appSettings.componentLibraryUrl.value = componentLibraryUrl; 49 | appSettings.pipelineLibraryUrl.value = pipelineLibraryUrl; 50 | appSettings.defaultPipelineUrl.value = defaultPipelineUrl; 51 | appSettings.componentFeedUrls.value = componentFeedUrls; 52 | appSettings.gitHubSearchLocations.value = gitHubSearchLocations; 53 | appSettings.googleCloudOAuthClientId.value = googleCloudOAuthClientId; 54 | handleClose(); 55 | }; 56 | 57 | const handleReset = () => { 58 | setComponentLibraryUrl(appSettings.componentLibraryUrl.resetToDefault()); 59 | setPipelineLibraryUrl(appSettings.pipelineLibraryUrl.resetToDefault()); 60 | setDefaultPipelineUrl(appSettings.defaultPipelineUrl.resetToDefault()); 61 | setComponentFeedUrls(appSettings.componentFeedUrls.resetToDefault()); 62 | setGitHubSearchLocations( 63 | appSettings.gitHubSearchLocations.resetToDefault() 64 | ); 65 | setGoogleCloudOAuthClientId( 66 | appSettings.googleCloudOAuthClientId.resetToDefault() 67 | ); 68 | }; 69 | 70 | return ( 71 | 72 | Settings 73 | 74 | {/* Application settings */} 75 | setComponentLibraryUrl(e.target.value)} 83 | /> 84 | setPipelineLibraryUrl(e.target.value)} 92 | /> 93 | setDefaultPipelineUrl(e.target.value)} 101 | /> 102 | setComponentFeedUrls(e.target.value.split("\n"))} 111 | /> 112 | setGitHubSearchLocations(e.target.value.split("\n"))} 121 | /> 122 | setGoogleCloudOAuthClientId(e.target.value)} 130 | /> 131 | 132 | 133 | 136 | 139 | 142 | 143 | 144 | ); 145 | }; 146 | 147 | export default AppSettingsDialog; 148 | -------------------------------------------------------------------------------- /src/DragNDrop/ArgumentsEditor.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { ArgumentType, ComponentSpec, TypeSpecType } from "../componentSpec"; 10 | 11 | interface ArgumentsEditorProps { 12 | componentSpec: ComponentSpec; 13 | componentArguments: Record; 14 | setComponentArguments: (args: Record) => void; 15 | shrinkToWidth?: boolean; 16 | } 17 | 18 | const getPatternForTypeSpec = (typeSpec?: TypeSpecType) => { 19 | // TODO: Implement 20 | return undefined; 21 | }; 22 | 23 | const typeSpecToString = (typeSpec?: TypeSpecType): string => { 24 | if (typeSpec === undefined) { 25 | return "Any"; 26 | } 27 | if (typeof typeSpec === "string") { 28 | return typeSpec; 29 | } 30 | return JSON.stringify(typeSpec); 31 | }; 32 | 33 | const ArgumentsEditor = ({ 34 | componentSpec, 35 | componentArguments, 36 | setComponentArguments, 37 | shrinkToWidth = false, 38 | }: ArgumentsEditorProps) => { 39 | return ( 40 |
51 | {(componentSpec.inputs ?? []).map((inputSpec) => { 52 | const inputName = inputSpec.name; 53 | let value: string | undefined = undefined; 54 | let placeholder: string | undefined = undefined; 55 | const argument = componentArguments[inputName]; 56 | if (argument === undefined) { 57 | value = inputSpec.default; 58 | } else { 59 | if (typeof argument === "string") { 60 | value = argument; 61 | } else if ("taskOutput" in argument) { 62 | placeholder = ``; 63 | } else if ("graphInput" in argument) { 64 | placeholder = ``; 65 | } else { 66 | placeholder = ""; 67 | } 68 | } 69 | 70 | const argumentIsRequiredButMissing = 71 | !(inputName in componentArguments) && 72 | inputSpec.optional !== true && 73 | inputSpec.default === undefined; 74 | 75 | const typeSpecString = 76 | typeSpecToString(inputSpec.type) + 77 | (inputSpec.optional === true ? "?" : ""); 78 | 79 | const inputTitle = `${inputName} (${typeSpecString})\n${ 80 | inputSpec.description || "" 81 | }`; 82 | 83 | return ( 84 |
90 | 117 | { 131 | componentArguments[inputName] = e.target.value; 132 | setComponentArguments({ ...componentArguments }); 133 | }} 134 | /> 135 |
143 | 154 |
155 |
156 | ); 157 | })} 158 |
159 | ); 160 | }; 161 | 162 | export default ArgumentsEditor; 163 | -------------------------------------------------------------------------------- /src/DragNDrop/ArgumentsEditorDialog.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useState } from "react"; 10 | import { ArgumentType, TaskSpec } from "../componentSpec"; 11 | import ArgumentsEditor from "./ArgumentsEditor"; 12 | 13 | interface ArgumentsEditorDialogProps { 14 | taskSpec: TaskSpec; 15 | closeEditor?: () => void; 16 | setArguments?: (args: Record) => void; 17 | } 18 | 19 | const ArgumentsEditorDialog = ({ 20 | taskSpec, 21 | closeEditor, 22 | setArguments, 23 | }: ArgumentsEditorDialogProps) => { 24 | const [currentArguments, setCurrentArguments] = useState< 25 | Record 26 | >({ ...taskSpec.arguments }); 27 | 28 | const componentSpec = taskSpec.componentRef.spec; 29 | if (componentSpec === undefined) { 30 | console.error( 31 | "ArgumentsEditor called with missing taskSpec.componentRef.spec", 32 | taskSpec 33 | ); 34 | return <>; 35 | } 36 | 37 | return ( 38 |
{ 40 | e.preventDefault(); 41 | }} 42 | // Does not work 43 | // draggable={false} 44 | style={{ 45 | position: "fixed", 46 | background: "white", 47 | border: "1px solid black", 48 | borderRadius: "4px", 49 | padding: "15px", 50 | // Does not work 51 | // zIndex: 11, 52 | }} 53 | > 54 | Input arguments for {componentSpec.name} 55 | 60 | 63 | 72 | 73 | ); 74 | }; 75 | 76 | export default ArgumentsEditorDialog; 77 | -------------------------------------------------------------------------------- /src/DragNDrop/ComponentLibrary.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useEffect, useState } from "react"; 10 | import { 11 | DownloadDataType, 12 | downloadDataWithCache, 13 | loadObjectFromYamlData, 14 | } from "../cacheUtils"; 15 | import { ComponentReference } from "../componentSpec"; 16 | import { 17 | ComponentReferenceWithSpec, 18 | fullyLoadComponentRef, 19 | } from "../componentStore"; 20 | import DraggableComponent from "./DraggableComponent"; 21 | 22 | export type ComponentLibraryFolder = { 23 | name: string; 24 | folders: ComponentLibraryFolder[]; 25 | components: ComponentReference[]; 26 | }; 27 | 28 | export type ComponentLibraryStruct = { 29 | annotations?: { 30 | [k: string]: unknown; 31 | }; 32 | folders: ComponentLibraryFolder[]; 33 | }; 34 | 35 | export const isValidComponentLibraryStruct = ( 36 | obj: object 37 | ): obj is ComponentLibraryStruct => "folders" in obj; 38 | 39 | interface DraggableComponentRowProps { 40 | componentRef: ComponentReference; 41 | downloadData: DownloadDataType; 42 | } 43 | 44 | export const DraggableComponentRow = ({ 45 | componentRef, 46 | downloadData = downloadDataWithCache, 47 | }: DraggableComponentRowProps) => { 48 | const [componentRefWithSpec, setComponentRefWithSpec] = useState< 49 | ComponentReferenceWithSpec | undefined 50 | >(undefined); 51 | useEffect(() => { 52 | // TODO: Validate the component 53 | // Loading the component (preloading the graph component children as well). 54 | fullyLoadComponentRef(componentRef, downloadData).then( 55 | setComponentRefWithSpec 56 | ); 57 | }, [componentRef, downloadData]); 58 | 59 | if (componentRefWithSpec === undefined) { 60 | return
Loading...
; 61 | } else { 62 | return ; 63 | } 64 | }; 65 | 66 | export const FoldersAndComponentsVis = ({ 67 | folder, 68 | isOpen = false, 69 | downloadData = downloadDataWithCache, 70 | }: { 71 | folder: ComponentLibraryFolder; 72 | isOpen?: boolean; 73 | downloadData: DownloadDataType; 74 | }) => { 75 | return ( 76 | <> 77 | {folder.folders && 78 | Array.from(folder.folders).map((componentFolder, index) => ( 79 | 85 | ))} 86 | {folder.components && 87 | Array.from(folder.components).map((componentReference) => ( 88 | 97 | ))} 98 | 99 | ); 100 | }; 101 | 102 | export const SingleFolderVis = ({ 103 | folder, 104 | isOpen = false, 105 | downloadData = downloadDataWithCache, 106 | }: { 107 | folder: ComponentLibraryFolder; 108 | isOpen?: boolean; 109 | downloadData: DownloadDataType; 110 | }) => { 111 | return ( 112 |
122 | causes the summary marker and the text to be on different lines. 128 | textOverflow: "ellipsis", 129 | //maxWidth: "90%", 130 | overflow: "hidden", 131 | whiteSpace: "nowrap", 132 | //display: "block", 133 | }} 134 | title={folder.name} 135 | > 136 | {folder.name} 137 | 138 | 143 |
144 | ); 145 | }; 146 | 147 | export const ComponentLibraryVisFromStruct = ({ 148 | componentLibraryStruct, 149 | downloadData = downloadDataWithCache, 150 | }: { 151 | componentLibraryStruct: ComponentLibraryStruct; 152 | downloadData: DownloadDataType; 153 | }) => { 154 | return ( 155 | <> 156 | {Array.from(componentLibraryStruct.folders).map( 157 | (componentFolder, index) => ( 158 | 164 | ) 165 | )} 166 | 167 | ); 168 | }; 169 | 170 | const loadComponentLibraryStructFromData = async (data: ArrayBuffer) => { 171 | const componentLibrary = loadObjectFromYamlData(data); 172 | if (!isValidComponentLibraryStruct(componentLibrary)) { 173 | throw Error( 174 | `Invalid Component library data structure: ${componentLibrary}` 175 | ); 176 | } 177 | return componentLibrary; 178 | }; 179 | 180 | const loadComponentLibraryStructFromUrl = async ( 181 | url: string, 182 | downloadData: DownloadDataType = downloadDataWithCache 183 | ) => { 184 | const componentLibrary = await downloadData( 185 | url, 186 | loadComponentLibraryStructFromData 187 | ); 188 | return componentLibrary; 189 | }; 190 | 191 | interface ComponentLibraryVisFromUrlProps { 192 | url: string; 193 | downloadData: DownloadDataType; 194 | } 195 | 196 | const ComponentLibraryVisFromUrl = ({ 197 | url, 198 | downloadData = downloadDataWithCache, 199 | }: ComponentLibraryVisFromUrlProps) => { 200 | const [componentLibraryStruct, setComponentLibraryStruct] = useState< 201 | ComponentLibraryStruct | undefined 202 | >(); 203 | 204 | useEffect(() => { 205 | if (componentLibraryStruct === undefined) { 206 | (async () => { 207 | try { 208 | const loadedComponentLibrary = 209 | await loadComponentLibraryStructFromUrl(url, downloadData); 210 | setComponentLibraryStruct(loadedComponentLibrary); 211 | } catch (err) { 212 | console.error(err); 213 | } 214 | })(); 215 | } 216 | }, [componentLibraryStruct, url, downloadData]); 217 | 218 | return componentLibraryStruct === undefined ? ( 219 | <>"The library is not loaded" 220 | ) : ( 221 | 225 | ); 226 | }; 227 | 228 | export default ComponentLibraryVisFromUrl; 229 | -------------------------------------------------------------------------------- /src/DragNDrop/ComponentSearch.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useState } from "react"; 10 | import { DownloadDataType, downloadDataWithCache } from "../cacheUtils"; 11 | import { ComponentReference } from "../componentSpec"; 12 | import { 13 | isComponentDbEmpty, 14 | refreshComponentDb, 15 | searchComponentsByName, 16 | } from "../github"; 17 | import DraggableComponent from "./DraggableComponent"; 18 | 19 | interface ComponentSearchProps { 20 | componentFeedUrls?: string[], 21 | gitHubSearchLocations?: string[], 22 | downloadData: DownloadDataType; 23 | } 24 | 25 | const SearchPanel = ({ 26 | componentFeedUrls, 27 | gitHubSearchLocations, 28 | downloadData = downloadDataWithCache, 29 | }: ComponentSearchProps) => { 30 | const [error, setError] = useState(undefined); 31 | const [firstTime, setFirstTime] = useState(true); 32 | const [isLoaded, setIsLoaded] = useState(false); 33 | const [query, setQuery] = useState(""); 34 | const [items, setItems] = useState([]); 35 | 36 | const onQueryChange = (e: any) => { 37 | setQuery(e.target.value); 38 | }; 39 | 40 | async function fetchData( 41 | query: string, 42 | ) { 43 | // If the DB is populated, return results immediately, then refresh the DB and update the results. 44 | try { 45 | if (!(await isComponentDbEmpty())) { 46 | const componentRefs = await searchComponentsByName(query); 47 | setIsLoaded(true); 48 | setItems(componentRefs); 49 | } else { 50 | console.debug("Component DB is empty. Need to populate the DB first."); 51 | } 52 | await refreshComponentDb( 53 | { 54 | ComponentFeedUrls: componentFeedUrls, 55 | GitHubSearchLocations: gitHubSearchLocations, 56 | }, 57 | downloadData 58 | ); 59 | setIsLoaded(true); 60 | const componentRefs = await searchComponentsByName(query); 61 | setItems(componentRefs); 62 | } catch (error: any) { 63 | setError(error.message); 64 | } 65 | } 66 | 67 | const onSubmit = (e: React.FormEvent) => { 68 | e.preventDefault(); 69 | if (query !== "") { 70 | setFirstTime(false); 71 | fetchData(query); 72 | } 73 | (window as any).gtag?.("event", "ComponentSearch_search", {}); 74 | }; 75 | 76 | let results = ; 77 | if (firstTime) { 78 | results =
Enter search query
; 79 | } else if (error !== undefined) { 80 | results =
Error: {error}
; 81 | } else if (!firstTime && !isLoaded) { 82 | results =
Searching...
; 83 | } else if (items !== undefined) { 84 | const componentElements = items.map((componentRef) => ( 85 | 89 | )); 90 | results = <>{componentElements}; 91 | } 92 | return ( 93 |
94 |
95 | 96 | 97 |
98 |
{results}
99 |
100 | ); 101 | }; 102 | 103 | export default SearchPanel; 104 | -------------------------------------------------------------------------------- /src/DragNDrop/ComponentTaskNode.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { CSSProperties, memo, useState } from 'react'; 10 | import { 11 | ArgumentType, 12 | InputSpec, 13 | OutputSpec, 14 | TaskSpec, 15 | } from '../componentSpec'; 16 | 17 | import { Handle, Position, Node, NodeProps, HandleType } from 'react-flow-renderer'; 18 | 19 | import ArgumentsEditorDialog from './ArgumentsEditorDialog'; 20 | 21 | const inputHandlePosition = Position.Top; 22 | const outputHandlePosition = Position.Bottom; 23 | 24 | type InputOrOutputSpec = InputSpec | OutputSpec; 25 | 26 | const MISSING_ARGUMENT_CLASS_NAME = "missing-argument"; 27 | 28 | const NODE_WIDTH_IN_PX = 180; 29 | 30 | export const isComponentTaskNode = (node: Node): node is Node => 31 | node.type === "task" && node.data !== undefined && "taskSpec" in node.data; 32 | 33 | function generateHandles( 34 | ioSpecs: InputOrOutputSpec[], 35 | handleType: HandleType, 36 | position: Position, 37 | idPrefix: string, 38 | inputsWithMissingArguments?: string[], 39 | ): JSX.Element[] { 40 | let handleComponents = []; 41 | const numHandles = ioSpecs.length; 42 | for (let i = 0; i < numHandles; i++) { 43 | const ioSpec = ioSpecs[i]; 44 | const id = idPrefix + ioSpec.name; 45 | const relativePosition = (i + 1) / (numHandles + 1); 46 | const positionPercentString = String(100 * relativePosition) + "%"; 47 | const style = 48 | position === Position.Top || position === Position.Bottom 49 | ? { left: positionPercentString } 50 | : { top: positionPercentString }; 51 | // TODO: Handle complex type specs 52 | const ioTypeName = ioSpec.type?.toString() ?? "Any"; 53 | let classNames = [`handle_${idPrefix}${ioTypeName}`.replace(/ /g, "_")]; 54 | const isInvalid = (inputsWithMissingArguments ?? []).includes(ioSpec.name); 55 | if (isInvalid) { 56 | classNames.push(MISSING_ARGUMENT_CLASS_NAME); 57 | } 58 | classNames = classNames.map((className) => className.replace(/ /g, "_")); 59 | 60 | const [labelClasses, labelStyle] = generateLabelStyle(position, numHandles); 61 | const handleTitle = 62 | ioSpec.name + " : " + ioTypeName + "\n" + (ioSpec.description || ""); 63 | handleComponents.push( 64 | 74 |
75 | {ioSpec.name} 76 |
77 |
78 | ); 79 | } 80 | return handleComponents; 81 | } 82 | 83 | 84 | function generateLabelStyle( 85 | position: Position, 86 | numHandles: number 87 | ): [string, CSSProperties] { 88 | let maxLabelWidthPx = NODE_WIDTH_IN_PX; 89 | // By default, we want to place the label on the same side of the handle as the handle is on the side of the node. 90 | let labelClasses = "label"; 91 | // When there are too many inputs/outputs, we need to move the label so it starts from the handle. 92 | // Based on my tests, we always want this for >4 handles (top/bottom), so the rotated default placement is never used at all. 93 | 94 | if (position === Position.Top || position === Position.Bottom) { 95 | if (numHandles > 1) { 96 | // For single handle max width is the node width, while the formula would give half of that 97 | maxLabelWidthPx = NODE_WIDTH_IN_PX / (numHandles + 1); 98 | } 99 | //if (numHandles > 4) { 100 | if (maxLabelWidthPx < 35) { 101 | maxLabelWidthPx = 50; 102 | labelClasses += " label-angled"; 103 | } 104 | } else { 105 | maxLabelWidthPx = 60; 106 | } 107 | 108 | const labelStyle: CSSProperties = { maxWidth: `${maxLabelWidthPx}px` }; 109 | return [labelClasses, labelStyle]; 110 | } 111 | 112 | function generateInputHandles(inputSpecs: InputSpec[], inputsWithInvalidArguments?: string[]): JSX.Element[] { 113 | return generateHandles(inputSpecs, "target", inputHandlePosition, "input_", inputsWithInvalidArguments); 114 | } 115 | 116 | function generateOutputHandles(outputSpecs: OutputSpec[]): JSX.Element[] { 117 | return generateHandles(outputSpecs, "source", outputHandlePosition, "output_"); 118 | } 119 | 120 | export interface ComponentTaskNodeProps { 121 | taskSpec: TaskSpec, 122 | taskId?: string, 123 | setArguments?: (args: Record) => void; 124 | }; 125 | 126 | const ComponentTaskNode = ({ data }: NodeProps) => { 127 | const [isArgumentsEditorOpen, setIsArgumentsEditorOpen] = useState(false); 128 | 129 | const taskSpec = data.taskSpec; 130 | const componentSpec = taskSpec.componentRef.spec; 131 | if (componentSpec === undefined) { 132 | return (<>); 133 | } 134 | 135 | const label = componentSpec.name ?? ""; 136 | let title = "Task ID: " + data.taskId; 137 | if (componentSpec.name) { 138 | title += "\nComponent: " + componentSpec.name; 139 | } 140 | if (taskSpec.componentRef.url) { 141 | title += "\nUrl: " + taskSpec.componentRef.url; 142 | } 143 | if (taskSpec.componentRef.digest) { 144 | title += "\nDigest: " + taskSpec.componentRef.digest; 145 | } 146 | if (componentSpec.description) { 147 | title += "\nDescription: " + componentSpec.description; 148 | } 149 | const inputsWithInvalidArguments = (componentSpec.inputs ?? []) 150 | .filter( 151 | (inputSpec) => 152 | inputSpec.optional !== true && 153 | inputSpec.default === undefined && 154 | !(inputSpec.name in (taskSpec.arguments ?? {})) 155 | ) 156 | .map((inputSpec) => inputSpec.name); 157 | const inputHandles = generateInputHandles(componentSpec.inputs ?? [], inputsWithInvalidArguments); 158 | const outputHandles = generateOutputHandles(componentSpec.outputs ?? []); 159 | const handleComponents = inputHandles.concat(outputHandles); 160 | 161 | const closeArgumentsEditor = () => { 162 | setIsArgumentsEditorOpen(false); 163 | } 164 | 165 | return ( 166 |
{ 168 | setIsArgumentsEditorOpen(!isArgumentsEditorOpen); 169 | }} 170 | title={title} 171 | > 172 | {label} 173 | {handleComponents} 174 | {isArgumentsEditorOpen && ( 175 | 180 | )} 181 |
182 | ); 183 | }; 184 | 185 | export default memo(ComponentTaskNode); 186 | -------------------------------------------------------------------------------- /src/DragNDrop/DraggableComponent.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { DragEvent } from "react"; 10 | 11 | import { ComponentReference, TaskSpec } from "../componentSpec"; 12 | 13 | const onDragStart = (event: DragEvent, nodeData: object) => { 14 | event.dataTransfer.setData("application/reactflow", JSON.stringify(nodeData)); 15 | event.dataTransfer.setData( 16 | "DragStart.offset", 17 | JSON.stringify({ 18 | offsetX: event.nativeEvent.offsetX, 19 | offsetY: event.nativeEvent.offsetY, 20 | }) 21 | ); 22 | event.dataTransfer.effectAllowed = "move"; 23 | }; 24 | 25 | interface DraggableComponentProps 26 | extends React.DetailedHTMLProps< 27 | React.HTMLAttributes, 28 | HTMLDivElement 29 | > { 30 | componentReference: ComponentReference; 31 | } 32 | 33 | const DraggableComponent = ({ 34 | componentReference, 35 | ...props 36 | }: DraggableComponentProps) => { 37 | let title = componentReference.spec?.name || ""; 38 | if (componentReference.url) { 39 | title += "\nUrl: " + componentReference.url; 40 | } 41 | if (componentReference.digest) { 42 | title += "\nDigest: " + componentReference.digest; 43 | } 44 | if (componentReference.spec?.description) { 45 | title += "\nDescription: " + componentReference.spec?.description; 46 | } 47 | return ( 48 |
{ 52 | const taskSpec: TaskSpec = { 53 | componentRef: componentReference, 54 | }; 55 | return onDragStart(event, { task: taskSpec }); 56 | }} 57 | title={title} 58 | {...props} 59 | > 60 | {componentReference.spec?.name ?? "Component"} 61 |
62 | ); 63 | }; 64 | 65 | export default DraggableComponent; 66 | -------------------------------------------------------------------------------- /src/DragNDrop/GoogleCloud.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | /* global gapi */ 10 | 11 | import { useEffect, useState } from 'react'; 12 | 13 | import { ComponentSpec } from '../componentSpec'; 14 | import { buildVertexPipelineJobFromGraphComponent } from '../compilers/GoogleCloudVertexAIPipelines/vertexAiCompiler' 15 | import { PipelineJob } from '../compilers/GoogleCloudVertexAIPipelines/vertexPipelineSpec'; 16 | 17 | const LOCAL_STORAGE_GCS_OUTPUT_DIRECTORY_KEY = "GoogleCloudSubmitter/gcsOutputDirectory"; 18 | const LOCAL_STORAGE_PROJECT_ID_KEY = "GoogleCloudSubmitter/projectId"; 19 | const LOCAL_STORAGE_REGION_KEY = "GoogleCloudSubmitter/region"; 20 | const LOCAL_STORAGE_PROJECT_IDS_KEY = "GoogleCloudSubmitter/projectIds"; 21 | 22 | const VERTEX_AI_PIPELINES_REGIONS = [ 23 | 'us-central1', 24 | 'us-east1', 25 | 'us-west1', 26 | 'europe-west1', 27 | 'europe-west2', 28 | 'europe-west4', 29 | 'asia-east1', 30 | 'asia-southeast1', 31 | 'northamerica-northeast1', 32 | ]; 33 | 34 | const VERTEX_AI_PIPELINES_DEFAULT_REGION = 'us-central1'; 35 | 36 | const authorizeGoogleCloudClient = async ( 37 | clientId: string, 38 | scopes: string[], 39 | immediate = false, // Setting immediate to true prevents auth window showing every time. But it needs to be false the first time (when cookies are not set). 40 | ) => { 41 | return new Promise( 42 | (resolve, reject) => { 43 | gapi.auth.authorize( 44 | { 45 | client_id: clientId, 46 | scope: scopes, 47 | immediate: immediate, 48 | }, 49 | (authResult) => { 50 | // console.debug("authorizeGoogleCloudClient: called back"); 51 | if (authResult === undefined) { 52 | console.error("authorizeGoogleCloudClient failed"); 53 | reject("gapi.auth.authorize result is undefined"); 54 | } else if (authResult.error) { 55 | console.error( 56 | "authorizeGoogleCloudClient failed", 57 | authResult.error 58 | ); 59 | reject(authResult.error); 60 | } else { 61 | // console.debug("authorizeGoogleCloudClient: Success"); 62 | // Working around the Google Auth bug: The request succeeds, but the returned token does not have the requested scopes. 63 | // See https://github.com/google/google-api-javascript-client/issues/743 64 | const receivedScopesString = (authResult as any).scope as string | undefined; 65 | const receivedScopes = receivedScopesString?.split(" "); 66 | if (receivedScopes === undefined || !scopes.every((scope) => receivedScopes.includes(scope))) { 67 | const errorMessage = `Authorization call succeeded, but the returned scopes are ${receivedScopesString}`; 68 | console.error(errorMessage); 69 | reject(errorMessage); 70 | } else { 71 | resolve(authResult); 72 | } 73 | } 74 | } 75 | ); 76 | } 77 | ); 78 | }; 79 | 80 | export const ensureGoogleCloudAuthorizesScopes = async ( 81 | googleCloudOAuthClientId: string, 82 | scopes: string[] 83 | ) => { 84 | try { 85 | // console.debug('Before ensureGoogleCloudAuthorizesScopes(immediate=true)'); 86 | const oauthToken = await authorizeGoogleCloudClient( 87 | googleCloudOAuthClientId, 88 | scopes, 89 | true, 90 | ); 91 | // console.debug('After ensureGoogleCloudAuthorizesScopes(immediate=true)'); 92 | (window as any).gtag?.("event", "GoogleCloud_auth", { 93 | result: "succeeded", 94 | immediate: "true" 95 | }); 96 | return oauthToken; 97 | } catch (err) { 98 | // console.error('ensureGoogleCloudAuthorizesScopes(immediate=true)', err); 99 | try { 100 | const oauthToken = await authorizeGoogleCloudClient( 101 | googleCloudOAuthClientId, 102 | scopes, 103 | false 104 | ); 105 | (window as any).gtag?.("event", "GoogleCloud_auth", { 106 | result: "succeeded", 107 | immediate: "false" 108 | }); 109 | return oauthToken; 110 | } catch (err) { 111 | // console.error('ensureGoogleCloudAuthorizesScopes(immediate=false)', err); 112 | (window as any).gtag?.("event", "GoogleCloud_auth", { 113 | result: "failed", 114 | immediate: "false" 115 | }); 116 | } 117 | } 118 | }; 119 | 120 | const cloudresourcemanagerListProjects = async ( 121 | googleCloudOAuthClientId: string 122 | ) => { 123 | await ensureGoogleCloudAuthorizesScopes( 124 | googleCloudOAuthClientId, 125 | ["https://www.googleapis.com/auth/cloud-platform"] 126 | ); 127 | const response = await gapi.client.request({ 128 | path: "https://cloudresourcemanager.googleapis.com/v1/projects/", 129 | }); 130 | return response.result; 131 | } 132 | 133 | const aiplatformCreatePipelineJob = async ( 134 | projectId: string, 135 | region = "us-central1", 136 | pipelineJob: Record, 137 | googleCloudOAuthClientId: string, 138 | pipelineJobId?: string, 139 | ) => { 140 | await ensureGoogleCloudAuthorizesScopes( 141 | googleCloudOAuthClientId, 142 | ["https://www.googleapis.com/auth/cloud-platform"] 143 | ); 144 | const response = await gapi.client.request({ 145 | path: `https://${region}-aiplatform.googleapis.com/v1beta1/projects/${projectId}/locations/${region}/pipelineJobs?pipelineJobId=${pipelineJobId}`, 146 | method: "POST", 147 | body: JSON.stringify(pipelineJob), 148 | }); 149 | (window as any).gtag?.("event", "GoogleCloud_submit_pipeline_job", { 150 | result: "succeeded" 151 | }); 152 | return response.result; 153 | } 154 | 155 | interface GoogleCloudSubmitterProps { 156 | componentSpec?: ComponentSpec, 157 | pipelineArguments?: Map, 158 | googleCloudOAuthClientId: string; 159 | }; 160 | 161 | const GoogleCloudSubmitter = ({ 162 | componentSpec, 163 | pipelineArguments, 164 | googleCloudOAuthClientId 165 | }: GoogleCloudSubmitterProps) => { 166 | const [projects, setProjects] = useState( 167 | () => JSON.parse(window.localStorage?.getItem(LOCAL_STORAGE_PROJECT_IDS_KEY) ?? "[]") 168 | ); 169 | const [project, setProject] = useState( 170 | () => window.localStorage?.getItem(LOCAL_STORAGE_PROJECT_ID_KEY) ?? "" 171 | ); // undefined causes error: https://reactjs.org/docs/forms.html#controlled-components https://stackoverflow.com/a/47012342 172 | const [region, setRegion] = useState( 173 | () => window.localStorage?.getItem(LOCAL_STORAGE_REGION_KEY) ?? VERTEX_AI_PIPELINES_DEFAULT_REGION 174 | ); 175 | const [error, setError] = useState(""); 176 | const [gcsOutputDirectory, setGcsOutputDirectory] = useState( 177 | () => window.localStorage?.getItem(LOCAL_STORAGE_GCS_OUTPUT_DIRECTORY_KEY) ?? "" 178 | ); 179 | const [pipelineJobWebUrl, setPipelineJobWebUrl] = useState< 180 | string | undefined 181 | >(undefined); 182 | const [compilationError, setCompilationError] = useState( 183 | undefined 184 | ); 185 | const [vertexPipelineJob, setVertexPipelineJob] = useState< 186 | PipelineJob | undefined 187 | >(undefined); 188 | const [vertexPipelineJsonBlobUrl, setVertexPipelineJsonBlobUrl] = useState< 189 | string | undefined 190 | >(undefined); 191 | 192 | useEffect(() => { 193 | if (componentSpec !== undefined) { 194 | try { 195 | const vertexPipelineJob = buildVertexPipelineJobFromGraphComponent( 196 | componentSpec, 197 | gcsOutputDirectory, 198 | pipelineArguments 199 | ); 200 | setCompilationError(undefined); 201 | vertexPipelineJob.labels = { 202 | sdk: "cloud-pipelines-editor", 203 | "cloud-pipelines-editor-version": "0-0-1", 204 | }; 205 | setVertexPipelineJob(vertexPipelineJob); 206 | const vertexPipelineJobJson = JSON.stringify( 207 | vertexPipelineJob, 208 | undefined, 209 | 2 210 | ); 211 | const vertexPipelineJsonBlobUrl = URL.createObjectURL( 212 | new Blob([vertexPipelineJobJson], { type: "application/json" }) 213 | ); 214 | setVertexPipelineJsonBlobUrl(vertexPipelineJsonBlobUrl); 215 | } catch (err) { 216 | const errorMessage = 217 | typeof err === "object" && err instanceof Error 218 | ? err.toString() 219 | : String(err); 220 | setCompilationError(errorMessage); 221 | setVertexPipelineJob(undefined); 222 | setVertexPipelineJsonBlobUrl(undefined); 223 | } 224 | } 225 | }, [componentSpec, pipelineArguments, gcsOutputDirectory]); 226 | 227 | const readyToSubmit = 228 | project !== "" && region !== "" && vertexPipelineJob !== undefined; 229 | 230 | return ( 231 |
{ 233 | e.preventDefault(); 234 | if (vertexPipelineJob === undefined) { 235 | return; 236 | } 237 | setPipelineJobWebUrl(undefined); 238 | try { 239 | // setItem might throw exception on iOS in incognito mode 240 | try { 241 | window.localStorage?.setItem(LOCAL_STORAGE_GCS_OUTPUT_DIRECTORY_KEY, gcsOutputDirectory); 242 | window.localStorage?.setItem(LOCAL_STORAGE_PROJECT_ID_KEY, project); 243 | window.localStorage?.setItem(LOCAL_STORAGE_REGION_KEY, region); 244 | } catch(err) { 245 | console.error("GoogleCloudSubmitter: Error writing properties to the localStorage", err); 246 | } 247 | const displayName = ( 248 | (componentSpec?.name ?? "Pipeline") + 249 | " " + 250 | new Date().toISOString().replace("T", " ").replace("Z", "") 251 | ).substring(0, 127); 252 | const desiredPipelineJobId = displayName 253 | .toLowerCase() 254 | .replace(/[^-a-z0-9]/g, "-") 255 | .replace(/^-+/, ""); // No leading dashes 256 | vertexPipelineJob.displayName = displayName; 257 | const result = await aiplatformCreatePipelineJob( 258 | project, 259 | region, 260 | vertexPipelineJob, 261 | googleCloudOAuthClientId, 262 | desiredPipelineJobId 263 | ); 264 | const pipelineJobName: string = result.name; 265 | const pipelineJobId = pipelineJobName.split('/').slice(-1)[0]; 266 | const pipelineJobWebUrl = `https://console.cloud.google.com/vertex-ai/locations/${region}/pipelines/runs/${pipelineJobId}?project=${project}`; 267 | setPipelineJobWebUrl(pipelineJobWebUrl); 268 | setError(""); 269 | } catch (err: any) { 270 | console.error(err); 271 | setError(err?.result?.error?.message ?? "Error"); 272 | (window as any).gtag?.("event", "GoogleCloud_submit_pipeline_job", { 273 | result: "failed" 274 | }); 275 | } 276 | }} 277 | > 278 |
282 | 283 | setProject(e.target.value)} 291 | /> 292 | 293 | {projects.map((projectId) => ( 294 | 297 | 324 |
325 |
329 | 330 | setRegion(e.target.value)} 337 | /> 338 | 339 | {VERTEX_AI_PIPELINES_REGIONS.map((region) => ( 340 | 343 |
344 |
348 | 349 | setGcsOutputDirectory(e.target.value)} 355 | /> 356 |
357 |
361 | 366 | {pipelineJobWebUrl && Job} 367 |
368 | {vertexPipelineJsonBlobUrl !== undefined && ( 369 |
374 | Or download the{" "} 375 | 376 | pipeline_job.json 377 | {" "} 378 | file, then go to{" "} 379 | 380 | Vertex Pipelines 381 | {" "} 382 | and{" "} 383 | 384 | create a new run 385 | 386 | . 387 |
388 | )} 389 | {compilationError &&
{compilationError}
} 390 | {error &&
Error: {error}
} 391 |
392 | ); 393 | }; 394 | 395 | export default GoogleCloudSubmitter; 396 | -------------------------------------------------------------------------------- /src/DragNDrop/GraphComponentExporter.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { 10 | useStoreState, 11 | } from "react-flow-renderer"; 12 | 13 | import { ComponentSpec } from "../componentSpec"; 14 | import { augmentComponentSpec } from './GraphComponentSpecFlow' 15 | import { componentSpecToYaml } from "../componentStore"; 16 | 17 | interface GraphComponentExporterProps { 18 | componentSpec: ComponentSpec, 19 | } 20 | 21 | const GraphComponentExporter = ({ 22 | componentSpec, 23 | }: GraphComponentExporterProps) => { 24 | const nodes = useStoreState((store) => store.nodes); 25 | 26 | let componentText = ""; 27 | try { 28 | const graphComponent = augmentComponentSpec(componentSpec, nodes, false, true); 29 | componentText = componentSpecToYaml(graphComponent); 30 | } catch(err) { 31 | componentText = String(err); 32 | } 33 | 34 | const componentTextBlob = new Blob([componentText], { type: "text/yaml" }); // Or application/x-yaml (which leads to downloading) 35 | const downloadLink = component.yaml 36 | 37 | return ( 38 |
39 | Graph {downloadLink} 40 |
{componentText}
41 |
42 | ); 43 | }; 44 | 45 | export default GraphComponentExporter; 46 | -------------------------------------------------------------------------------- /src/DragNDrop/GraphComponentLink.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useStoreState } from "react-flow-renderer"; 10 | 11 | import { ComponentSpec } from "../componentSpec"; 12 | import { componentSpecToYaml } from "../componentStore"; 13 | import { augmentComponentSpec } from "./GraphComponentSpecFlow"; 14 | 15 | interface GraphComponentLinkProps { 16 | componentSpec: ComponentSpec; 17 | downloadFileName?: string; 18 | linkText?: string; 19 | linkRef?: React.Ref; 20 | style?: React.CSSProperties; 21 | } 22 | 23 | const GraphComponentLink = ({ 24 | componentSpec, 25 | downloadFileName = "component.yaml", 26 | linkText = "component.yaml", 27 | linkRef, 28 | style, 29 | }: GraphComponentLinkProps) => { 30 | const nodes = useStoreState((store) => store.nodes); 31 | 32 | try { 33 | componentSpec = augmentComponentSpec(componentSpec, nodes, false, true); 34 | } catch (err: any) { 35 | if (err?.message?.startsWith("The nodes array does not") !== true) { 36 | console.error(err); 37 | return <>err.toString(); 38 | } 39 | } 40 | const componentText = componentSpecToYaml(componentSpec); 41 | const componentTextBlob = new Blob([componentText], { type: "text/yaml" }); // Or application/x-yaml (which leads to downloading) 42 | return ( 43 | 49 | {linkText} 50 | 51 | ); 52 | }; 53 | 54 | export default GraphComponentLink; 55 | -------------------------------------------------------------------------------- /src/DragNDrop/KubeflowPipelinesSubmitter.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2022 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2022 Alexey Volkov 7 | */ 8 | 9 | import yaml from "js-yaml"; 10 | import { useEffect, useState } from "react"; 11 | import { 12 | buildArgoWorkflowFromGraphComponent, 13 | Workflow, 14 | } from "../compilers/Argo/argoCompiler"; 15 | import { ComponentSpec } from "../componentSpec"; 16 | import { ensureGoogleCloudAuthorizesScopes } from "./GoogleCloud"; 17 | 18 | const LOCAL_STORAGE_ENDPOINT_KEY = "KubeflowPipelinesSubmitter/endpoint"; 19 | const LOCAL_STORAGE_AUTH_TOKEN_KEY = "KubeflowPipelinesSubmitter/auth_token"; 20 | 21 | const kfpSubmitPipelineRun = async ( 22 | argoWorkflowSpec: Record, 23 | endpoint: string, 24 | authToken?: string, 25 | googleCloudOAuthClientId?: string, 26 | runName?: string 27 | ) => { 28 | // https://www.kubeflow.org/docs/components/pipelines/reference/api/kubeflow-pipeline-api-spec/#/definitions/apiRun 29 | const kfpRun = { 30 | name: runName ?? argoWorkflowSpec.name ?? "Pipeline", 31 | pipeline_spec: { 32 | workflow_manifest: JSON.stringify(argoWorkflowSpec), 33 | }, 34 | }; 35 | if (!endpoint.includes("://")) { 36 | console.warn("Endpoint URL does not specify a protocol. Using HTTPS."); 37 | endpoint = "https://" + endpoint; 38 | } 39 | if (!endpoint.endsWith("/")) { 40 | endpoint = endpoint + "/"; 41 | } 42 | const apiUrl = endpoint + "apis/v1beta1/runs"; 43 | if (!authToken) { 44 | // Auth token not specified. Authenticating the request using Google Cloud 45 | if (googleCloudOAuthClientId) { 46 | const oauthToken = await ensureGoogleCloudAuthorizesScopes( 47 | googleCloudOAuthClientId, 48 | ["https://www.googleapis.com/auth/cloud-platform"] 49 | ); 50 | authToken = oauthToken?.access_token; 51 | } 52 | } 53 | const response = await fetch(apiUrl, { 54 | method: "POST", 55 | body: JSON.stringify(kfpRun), 56 | headers: new Headers({ 57 | Authorization: "Bearer " + authToken, 58 | }), 59 | }); 60 | (window as any).gtag?.( 61 | "event", 62 | "KubeflowPipelinesSubmitter_submit_pipeline_run_succeeded", 63 | {} 64 | ); 65 | return response.json(); 66 | }; 67 | 68 | const generateKfpRunUrl = (endpoint: string, runId: string) => { 69 | //https://xxx-dot-us-central2.pipelines.googleusercontent.com/#/runs/details/ 70 | if (!endpoint.includes("://")) { 71 | endpoint = "https://" + endpoint; 72 | } 73 | if (!endpoint.endsWith("/")) { 74 | endpoint = endpoint + "/"; 75 | } 76 | return endpoint + "#/runs/details/" + runId; 77 | }; 78 | 79 | interface KubeflowPipelinesSubmitterProps { 80 | componentSpec?: ComponentSpec; 81 | pipelineArguments?: Map; 82 | googleCloudOAuthClientId?: string; 83 | } 84 | 85 | const KubeflowPipelinesSubmitter = ({ 86 | componentSpec, 87 | pipelineArguments, 88 | googleCloudOAuthClientId, 89 | }: KubeflowPipelinesSubmitterProps) => { 90 | const [argoWorkflow, setArgoWorkflow] = useState( 91 | undefined 92 | ); 93 | const [argoWorkflowYamlBlobUrl, setArgoWorkflowYamlBlobUrl] = useState< 94 | string | undefined 95 | >(undefined); 96 | const [compilationError, setCompilationError] = useState( 97 | undefined 98 | ); 99 | const [submissionError, setSubmissionError] = useState( 100 | undefined 101 | ); 102 | const [endpoint, setEndpoint] = useState( 103 | () => window.localStorage?.getItem(LOCAL_STORAGE_ENDPOINT_KEY) ?? "" 104 | ); 105 | const [authToken, setAuthToken] = useState( 106 | () => window.localStorage?.getItem(LOCAL_STORAGE_AUTH_TOKEN_KEY) ?? "" 107 | ); 108 | const [, setPipelineRunId] = useState(undefined); 109 | const [, setWorkflowResourceName] = useState(undefined); 110 | const [pipelineRunWebUrl, setPipelineRunWebUrl] = useState< 111 | string | undefined 112 | >(undefined); 113 | 114 | useEffect(() => { 115 | if (componentSpec !== undefined) { 116 | try { 117 | const argoWorkflow = buildArgoWorkflowFromGraphComponent( 118 | componentSpec, 119 | pipelineArguments ?? new Map() 120 | ); 121 | argoWorkflow.metadata.labels = { 122 | sdk: "cloud-pipelines-editor", 123 | "cloud-pipelines.net/pipeline-editor": "true", 124 | "pipelines.kubeflow.org/pipeline-sdk-type": "cloud-pipelines-editor", 125 | }; 126 | setArgoWorkflow(argoWorkflow); 127 | const argoWorkflowYaml = yaml.dump(argoWorkflow, { 128 | lineWidth: -1, // Don't fold long strings 129 | quotingType: '"', 130 | }); 131 | const newArgoWorkflowYamlBlobUrl = URL.createObjectURL( 132 | new Blob([argoWorkflowYaml], { type: "application/yaml" }) 133 | ); 134 | // Updating the workflow blob URL (revoking the old workflow blob URL first). 135 | setArgoWorkflowYamlBlobUrl((currentArgoWorkflowYamlBlobUrl) => { 136 | if (currentArgoWorkflowYamlBlobUrl !== undefined) { 137 | URL.revokeObjectURL(currentArgoWorkflowYamlBlobUrl); 138 | } 139 | return newArgoWorkflowYamlBlobUrl; 140 | }); 141 | setCompilationError(undefined); 142 | } catch (err) { 143 | const errorMessage = 144 | typeof err === "object" && err instanceof Error 145 | ? err.toString() 146 | : String(err); 147 | setCompilationError(errorMessage); 148 | } 149 | } 150 | }, [componentSpec, pipelineArguments]); 151 | 152 | const readyToSubmit = endpoint && argoWorkflow; 153 | 154 | return ( 155 |
{ 157 | e.preventDefault(); 158 | if (!endpoint || !argoWorkflow) { 159 | return; 160 | } 161 | setPipelineRunWebUrl(undefined); 162 | try { 163 | // setItem might throw exception on iOS in incognito mode 164 | try { 165 | window.localStorage?.setItem(LOCAL_STORAGE_ENDPOINT_KEY, endpoint); 166 | window.localStorage?.setItem( 167 | LOCAL_STORAGE_AUTH_TOKEN_KEY, 168 | authToken 169 | ); 170 | } catch (err) { 171 | console.error( 172 | "KubeflowPipelinesSubmitter: Error writing properties to the localStorage", 173 | err 174 | ); 175 | } 176 | const runName = 177 | (componentSpec?.name ?? "Pipeline") + 178 | " " + 179 | new Date().toISOString().replace("T", " ").replace("Z", ""); 180 | const result = await kfpSubmitPipelineRun( 181 | argoWorkflow, 182 | endpoint, 183 | authToken, 184 | googleCloudOAuthClientId, 185 | runName 186 | ); 187 | console.debug(result); 188 | const runId = result?.run?.id; 189 | if (typeof runId === "string") { 190 | setPipelineRunId(runId); 191 | const runUrl = generateKfpRunUrl(endpoint, runId); 192 | setPipelineRunWebUrl(runUrl); 193 | } 194 | const runtimeWorkflowManifestString = 195 | result?.pipeline_runtime?.workflow_manifest; 196 | if (typeof runtimeWorkflowManifestString === "string") { 197 | const runtimeWorkflowManifest = JSON.parse( 198 | runtimeWorkflowManifestString 199 | ); 200 | const resourceName = runtimeWorkflowManifest?.metadata?.name; 201 | if (resourceName) { 202 | setWorkflowResourceName(resourceName); 203 | } 204 | } 205 | setSubmissionError(undefined); 206 | } catch (err: any) { 207 | console.error(err); 208 | const errorMessage = 209 | typeof err === "object" && err instanceof Error 210 | ? err.toString() 211 | : String(err); 212 | setSubmissionError(errorMessage); 213 | (window as any).gtag?.( 214 | "event", 215 | "KubeflowPipelinesSubmitter_submit_pipeline_run_failed", 216 | {} 217 | ); 218 | } 219 | }} 220 | > 221 |
227 | 228 | setEndpoint(e.target.value)} 236 | /> 237 |
238 |
244 | 245 | setAuthToken(e.target.value)} 252 | /> 253 |
254 |
260 | 265 | {pipelineRunWebUrl && ( 266 | 272 | Run 273 | 274 | )} 275 |
276 | {argoWorkflowYamlBlobUrl && ( 277 |
282 | {/* TODO: Use pipeline name for the file name */} 283 | Or download the{" "} 284 | 285 | kubeflow_pipeline.yaml 286 | 287 |
288 | )} 289 | {compilationError &&
{compilationError}
} 290 | {submissionError &&
Error: {submissionError}
} 291 |
292 | ); 293 | }; 294 | 295 | export default KubeflowPipelinesSubmitter; 296 | -------------------------------------------------------------------------------- /src/DragNDrop/PipelineAutoSaver.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2022 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2022 Alexey Volkov 7 | */ 8 | 9 | import { Node, useStoreState } from "react-flow-renderer"; 10 | import yaml from "js-yaml"; 11 | 12 | import { ComponentSpec } from "../componentSpec"; 13 | import { componentSpecToYaml } from "../componentStore"; 14 | import { augmentComponentSpec } from "./GraphComponentSpecFlow"; 15 | 16 | const SAVED_COMPONENT_SPEC_KEY = "autosaved.component.yaml"; 17 | 18 | export const savePipelineSpecToSessionStorage = ( 19 | componentSpec: ComponentSpec, 20 | nodes?: Node[] 21 | ) => { 22 | try { 23 | if (nodes !== undefined) { 24 | if (nodes.length === 0) { 25 | console.warn("saveComponentSpec: nodes.length === 0"); 26 | } 27 | componentSpec = augmentComponentSpec(componentSpec, nodes, true, true); 28 | } 29 | const componentText = componentSpecToYaml(componentSpec); 30 | window.sessionStorage.setItem(SAVED_COMPONENT_SPEC_KEY, componentText); 31 | } catch (err: any) { 32 | // TODO: Find a way to avoid the React/Redux race conditions causing this error. 33 | if (err?.message?.startsWith("The nodes array does not") !== true) { 34 | console.error(err); 35 | } 36 | } 37 | }; 38 | 39 | export const loadPipelineSpecFromSessionStorage = () => { 40 | try { 41 | const componentText = window.sessionStorage.getItem( 42 | SAVED_COMPONENT_SPEC_KEY 43 | ); 44 | if (componentText !== null) { 45 | const loadedYaml = yaml.load(componentText); 46 | if (loadedYaml !== null && typeof loadedYaml === "object") { 47 | //TODO: Validate that the spec is valid 48 | const savedComponentSpec = loadedYaml as ComponentSpec; 49 | return savedComponentSpec; 50 | } 51 | } 52 | } catch (err) { 53 | console.error(err); 54 | } 55 | return undefined; 56 | }; 57 | 58 | // Auto-saver is extracted to its own child component since useStoreState in the parent causes infinite re-rendering 59 | // (each render of GraphComponentSpecFlow seems to change the Redux store). 60 | // This component seems to be triggered for every node movement, so even pure layout changes are saved. 61 | export const PipelineAutoSaver = ({ 62 | componentSpec, 63 | }: { 64 | componentSpec: ComponentSpec; 65 | }) => { 66 | const nodes = useStoreState((store) => store.nodes); 67 | // Fixing issue where a React error would cause all node positions to be recorded as undefined (`!`) 68 | // nodes should never be undefined in normal situation. 69 | if (nodes !== undefined && nodes.length > 0) { 70 | savePipelineSpecToSessionStorage(componentSpec, nodes); 71 | } 72 | return null; 73 | }; 74 | -------------------------------------------------------------------------------- /src/DragNDrop/PipelineSubmitter.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2022 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2022 Alexey Volkov 7 | */ 8 | 9 | import { useEffect, useState } from "react"; 10 | import { ArgumentType, ComponentSpec } from "../componentSpec"; 11 | import ArgumentsEditor from "./ArgumentsEditor"; 12 | import GoogleCloudSubmitter from "./GoogleCloud"; 13 | import KubeflowPipelinesSubmitter from "./KubeflowPipelinesSubmitter"; 14 | 15 | interface PipelineSubmitterProps { 16 | componentSpec?: ComponentSpec; 17 | googleCloudOAuthClientId: string; 18 | } 19 | 20 | const PipelineSubmitter = ({ 21 | componentSpec, 22 | googleCloudOAuthClientId, 23 | }: PipelineSubmitterProps) => { 24 | const [pipelineArguments, setPipelineArguments] = useState< 25 | Record 26 | >({}); 27 | 28 | const [stringPipelineArguments, setStringPipelineArguments] = 29 | useState>(new Map()); 30 | 31 | useEffect(() => { 32 | // This filtering is just for typing as the pipeline arguments can only be strings here. 33 | const newStringPipelineArguments = new Map( 34 | Object.entries(pipelineArguments).filter( 35 | // Type guard predicate 36 | (pair): pair is [string, string] => typeof pair[1] === "string" 37 | ) 38 | ); 39 | setStringPipelineArguments(newStringPipelineArguments) 40 | }, [pipelineArguments]); 41 | 42 | return ( 43 | <> 44 | {componentSpec === undefined || // This check is redundant, but TypeScript needs it. 45 | (componentSpec?.inputs?.length ?? 0) === 0 ? undefined : ( 46 |
53 | Arguments 54 | 60 |
61 | )} 62 |
69 | 72 | Submit to Google Cloud 73 | 74 | 79 |
80 |
87 | 90 | Submit to Kubeflow Pipelines 91 | 92 | 97 |
98 | 99 | ); 100 | }; 101 | 102 | export default PipelineSubmitter; 103 | -------------------------------------------------------------------------------- /src/DragNDrop/SamplePipelineLibrary.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useState, useEffect } from "react"; 10 | import { DownloadDataType, downloadDataWithCache, loadObjectFromYamlData } from "../cacheUtils"; 11 | import { ComponentReference, ComponentSpec } from "../componentSpec"; 12 | import { 13 | ComponentReferenceWithSpec, 14 | fullyLoadComponentRefFromUrl, 15 | } from "../componentStore"; 16 | 17 | type PipelineLibraryStruct = { 18 | annotations?: { 19 | [k: string]: unknown; 20 | }; 21 | components: ComponentReference[]; 22 | }; 23 | 24 | const isValidPipelineLibraryStruct = ( 25 | obj: object 26 | ): obj is PipelineLibraryStruct => "components" in obj; 27 | 28 | const loadPipelineLibraryStructFromData = async ( 29 | data: ArrayBuffer, 30 | ) => { 31 | const pipelineLibrary = loadObjectFromYamlData(data); 32 | if (!isValidPipelineLibraryStruct(pipelineLibrary)) { 33 | throw Error(`Invalid Component library data structure: ${pipelineLibrary}`); 34 | } 35 | return pipelineLibrary; 36 | }; 37 | 38 | const loadPipelineLibraryStructFromUrl = async ( 39 | url: string, 40 | downloadData: DownloadDataType = downloadDataWithCache, 41 | ) => { 42 | const pipelineLibrary = await downloadData(url, loadPipelineLibraryStructFromData); 43 | return pipelineLibrary; 44 | }; 45 | 46 | function notUndefined(x: T | undefined): x is T { 47 | return x !== undefined; 48 | } 49 | 50 | interface PipelineLibraryProps { 51 | pipelineLibraryUrl: string; 52 | setComponentSpec?: (componentSpec: ComponentSpec) => void; 53 | downloadData: DownloadDataType; 54 | } 55 | 56 | const SamplePipelineLibrary = ({ 57 | pipelineLibraryUrl, 58 | setComponentSpec, 59 | downloadData = downloadDataWithCache 60 | }: PipelineLibraryProps) => { 61 | const [componentRefs, setComponentRefs] = useState< 62 | ComponentReferenceWithSpec[] 63 | >([]); 64 | 65 | useEffect(() => { 66 | (async () => { 67 | if (componentRefs.length === 0) { 68 | try { 69 | const loadedComponentLibrary = await loadPipelineLibraryStructFromUrl( 70 | pipelineLibraryUrl, 71 | downloadData 72 | ); 73 | const pipelineUrls = loadedComponentLibrary.components 74 | .map((componentRef) => componentRef.url) 75 | .filter(notUndefined); 76 | const loadedComponentRefs = await Promise.all( 77 | pipelineUrls.map((url) => 78 | fullyLoadComponentRefFromUrl(url, downloadData) 79 | ) 80 | ); 81 | setComponentRefs(loadedComponentRefs); 82 | } catch (err) { 83 | console.error(err); 84 | } 85 | } 86 | })(); 87 | }, [pipelineLibraryUrl, downloadData, componentRefs.length]); 88 | 89 | return ( 90 |
97 |
98 | {componentRefs.map((componentRef) => ( 99 |
100 | ⋮ {/* ⋮ ≡ ⋅ */} 101 | 109 |
110 | ))} 111 |
112 |
113 | ); 114 | }; 115 | 116 | export default SamplePipelineLibrary; 117 | -------------------------------------------------------------------------------- /src/DragNDrop/Sidebar.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { DragEvent, useState } from 'react'; 10 | 11 | import ComponentLibrary from './ComponentLibrary' 12 | import ComponentSearch from './ComponentSearch' 13 | import GraphComponentExporter from './GraphComponentExporter' 14 | import VertexAiExporter from './VertexAiExporter' 15 | import { ComponentSpec } from '../componentSpec'; 16 | import UserComponentLibrary from "./UserComponentLibrary"; 17 | import PipelineLibrary from "./PipelineLibrary"; 18 | import { AppSettings } from '../appSettings'; 19 | import PipelineSubmitter from "./PipelineSubmitter"; 20 | import AppSettingsDialog from './AppSettingsDialog'; 21 | import { DownloadDataType, downloadDataWithCache } from '../cacheUtils'; 22 | 23 | const onDragStart = (event: DragEvent, nodeData: object) => { 24 | event.dataTransfer.setData('application/reactflow', JSON.stringify(nodeData)); 25 | event.dataTransfer.setData( 26 | "DragStart.offset", 27 | JSON.stringify({ 28 | offsetX: event.nativeEvent.offsetX, 29 | offsetY: event.nativeEvent.offsetY, 30 | }) 31 | ); 32 | event.dataTransfer.effectAllowed = 'move'; 33 | }; 34 | 35 | interface SidebarProps { 36 | componentSpec?: ComponentSpec, 37 | setComponentSpec?: (componentSpec: ComponentSpec) => void, 38 | appSettings: AppSettings; 39 | downloadData: DownloadDataType; 40 | } 41 | 42 | const Sidebar = ({ 43 | componentSpec, 44 | setComponentSpec, 45 | appSettings, 46 | downloadData = downloadDataWithCache 47 | }: SidebarProps) => { 48 | const [isSettingsDialogOpen, setIsSettingsDialogOpen] = useState(false); 49 | 50 | // Do not include the DebugScratch in the production build 51 | let DebugScratchElement = () => null; 52 | if (process?.env?.NODE_ENV === "development") { 53 | try { 54 | const DebugScratch = require("./DebugScratch").default; 55 | DebugScratchElement = () => 56 | DebugScratch({ 57 | componentSpec: componentSpec, 58 | setComponentSpec: setComponentSpec, 59 | downloadData: downloadData, 60 | }); 61 | } catch (e) {} 62 | } 63 | 64 | return ( 65 | 138 | ); 139 | }; 140 | 141 | export default Sidebar; 142 | -------------------------------------------------------------------------------- /src/DragNDrop/UserComponentLibrary.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { 10 | Button, 11 | Dialog, 12 | DialogActions, 13 | DialogContent, 14 | DialogTitle, 15 | Menu, 16 | MenuItem, 17 | TextField, 18 | } from "@material-ui/core"; 19 | import { useCallback, useState, useEffect, useRef } from "react"; 20 | import { useDropzone } from "react-dropzone"; 21 | import { 22 | addComponentToListByText, 23 | deleteComponentFileFromList, 24 | ComponentFileEntry, 25 | getAllComponentFilesFromList, 26 | addComponentToListByUrl, 27 | } from "../componentStore"; 28 | import DraggableComponent from "./DraggableComponent"; 29 | 30 | const USER_COMPONENTS_LIST_NAME = "user_components"; 31 | 32 | const UserComponentLibrary = () => { 33 | const [errorMessage, setErrorMessage] = useState(""); 34 | const [componentFiles, setComponentFiles] = useState( 35 | new Map() 36 | ); 37 | const [contextMenuFileName, setContextMenuFileName] = useState(); 38 | const [contextMenuAnchor, setContextMenuAnchor] = useState(); 39 | const [isImportComponentDialogOpen, setIsImportComponentDialogOpen] = 40 | useState(false); 41 | 42 | const refreshComponents = useCallback(() => { 43 | getAllComponentFilesFromList(USER_COMPONENTS_LIST_NAME).then( 44 | setComponentFiles 45 | ); 46 | }, [setComponentFiles]); 47 | 48 | useEffect(refreshComponents, [refreshComponents]); 49 | 50 | const onDrop = useCallback((acceptedFiles: File[]) => { 51 | acceptedFiles.forEach((file) => { 52 | const reader = new FileReader(); 53 | reader.onabort = () => console.log("file reading was aborted"); 54 | reader.onerror = () => console.log("file reading has failed"); 55 | reader.onload = async () => { 56 | const binaryStr = reader.result; 57 | if (binaryStr === null || binaryStr === undefined) { 58 | console.error(`Dropped file reader result was ${binaryStr}`); 59 | return; 60 | } 61 | try { 62 | const componentRefPlusData = await addComponentToListByText( 63 | USER_COMPONENTS_LIST_NAME, 64 | binaryStr, 65 | ); 66 | const componentRef = componentRefPlusData.componentRef; 67 | console.debug("storeComponentText succeeded", componentRef); 68 | (window as any).gtag?.("event", "UserComponents_component_import", { 69 | result: "succeeded", 70 | }); 71 | setErrorMessage(""); 72 | refreshComponents(); 73 | } catch (err) { 74 | const errorMessage = 75 | typeof err === "object" && err ? err.toString() : String(err); 76 | setErrorMessage( 77 | `Error parsing the dropped file as component: ${errorMessage}.` 78 | ); 79 | console.error("Error parsing the dropped file as component", err); 80 | (window as any).gtag?.("event", "UserComponents_component_import", { 81 | result: "failed", 82 | }); 83 | } 84 | }; 85 | reader.readAsArrayBuffer(file); 86 | }); 87 | }, [refreshComponents]); 88 | 89 | const onImportFromUrl = useCallback( 90 | async (url: string) => { 91 | try { 92 | const componentFileEntry = await addComponentToListByUrl( 93 | USER_COMPONENTS_LIST_NAME, 94 | url 95 | ); 96 | const componentRef = componentFileEntry.componentRef; 97 | console.debug("addComponentToListByUrl succeeded", componentRef); 98 | (window as any).gtag?.( 99 | "event", 100 | "UserComponents_component_import_from_url_succeeded" 101 | ); 102 | setErrorMessage(""); 103 | refreshComponents(); 104 | setIsImportComponentDialogOpen(false); 105 | } catch (err) { 106 | const errorMessage = 107 | typeof err === "object" && err ? err.toString() : String(err); 108 | setErrorMessage( 109 | `Error parsing the file as component: ${errorMessage}.` 110 | ); 111 | console.error("Error importing component from the URL", err); 112 | (window as any).gtag?.( 113 | "event", 114 | "UserComponents_component_import_from_url_failed" 115 | ); 116 | } 117 | }, 118 | [refreshComponents] 119 | ); 120 | 121 | const handleContextMenuDelete = async () => { 122 | if (contextMenuFileName) { 123 | setContextMenuFileName(undefined); 124 | await deleteComponentFileFromList( 125 | USER_COMPONENTS_LIST_NAME, 126 | contextMenuFileName 127 | ); 128 | refreshComponents(); 129 | } 130 | }; 131 | 132 | const { getRootProps, getInputProps, isDragActive } = useDropzone({ 133 | onDrop, 134 | accept: ".yaml", 135 | }); 136 | 137 | return ( 138 |
139 | 145 |
146 | 147 |
154 | {isDragActive 155 | ? "Drop the files here ..." 156 | : errorMessage || 157 | "Drag and drop component.yaml files or click to select files"} 158 | {Array.from(componentFiles.entries()).map(([fileName, fileEntry]) => ( 159 | { 163 | e.preventDefault(); 164 | setContextMenuAnchor(e.currentTarget); 165 | setContextMenuFileName(fileName); 166 | }} 167 | /> 168 | ))} 169 |
170 |
171 | { 175 | setContextMenuFileName(undefined); 176 | }} 177 | > 178 | 179 | Delete 180 | 181 | 182 | setIsImportComponentDialogOpen(false)} 185 | initialValue={"https://raw.githubusercontent.com/.../component.yaml"} 186 | onImport={onImportFromUrl} 187 | /> 188 |
189 | ); 190 | }; 191 | 192 | export default UserComponentLibrary; 193 | 194 | interface SaveAsDialogProps { 195 | isOpen: boolean; 196 | onImport: (name: string) => void; 197 | onCancel: () => void; 198 | initialValue: string | undefined; 199 | } 200 | 201 | const ImportComponentFromUrlDialog = ({ 202 | isOpen, 203 | onImport, 204 | onCancel, 205 | initialValue, 206 | }: SaveAsDialogProps) => { 207 | const urlInputRef = useRef(); 208 | return ( 209 | 210 | {"Import component"} 211 |
{ 213 | if (urlInputRef.current) { 214 | onImport(urlInputRef.current.value); 215 | } 216 | e.preventDefault(); 217 | }} 218 | > 219 | 220 | 230 | 231 | 232 | 233 | 236 | 237 |
238 |
239 | ); 240 | }; 241 | -------------------------------------------------------------------------------- /src/DragNDrop/VertexAiExporter.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { 10 | useStoreState, 11 | } from "react-flow-renderer"; 12 | 13 | import { ComponentSpec } from "../componentSpec"; 14 | import { augmentComponentSpec } from "./GraphComponentSpecFlow"; 15 | import { buildVertexPipelineSpecFromGraphComponentSpec } from '../compilers/GoogleCloudVertexAIPipelines/vertexAiCompiler' 16 | 17 | interface VertexAiExporterProps { 18 | componentSpec: ComponentSpec; 19 | } 20 | 21 | const VertexAiExporter = ({componentSpec}: VertexAiExporterProps) => { 22 | const nodes = useStoreState((store) => store.nodes); 23 | 24 | let vertexPipelineSpecText = ""; 25 | try { 26 | // Augmenting the componentSpec might be useless right now, but it can stabilize the output (e.g. ordering). 27 | // Also, in the future, the original spec might be included in the vertexPipelineSpec 28 | componentSpec = augmentComponentSpec(componentSpec, nodes, true, true); 29 | const vertexPipelineSpec = buildVertexPipelineSpecFromGraphComponentSpec(componentSpec); 30 | vertexPipelineSpecText = JSON.stringify(vertexPipelineSpec, undefined, 2); 31 | } catch(err) { 32 | vertexPipelineSpecText = String(err); 33 | } 34 | 35 | const vertexPipelineSpecTextBlob = new Blob([vertexPipelineSpecText], { type: "application/json" }); // Or application/x-yaml (which leads to downloading) 36 | // TODO: Call vertexPipelineSpecTextBlobUrl.revokeObjectURL in the future 37 | const vertexPipelineSpecTextBlobUrl = URL.createObjectURL(vertexPipelineSpecTextBlob); 38 | 39 | return ( 40 |
41 | 42 | Cloud IR 46 | vertex_pipeline_spec.json 47 | 48 | 49 |
{vertexPipelineSpecText}
50 |
51 | ); 52 | }; 53 | 54 | export default VertexAiExporter; 55 | -------------------------------------------------------------------------------- /src/DragNDrop/dnd.css: -------------------------------------------------------------------------------- 1 | /* Use the proper box layout model by default, but allow elements to override */ 2 | html { 3 | box-sizing: border-box; 4 | } 5 | *, *:before, *:after { 6 | box-sizing: inherit; 7 | } 8 | details > * { 9 | box-sizing: border-box; 10 | } 11 | 12 | .dndflow { 13 | flex-direction: row; 14 | display: flex; 15 | height: 100%; 16 | } 17 | 18 | .dndflow aside { 19 | width: 300px; 20 | border-right: 1px solid #eee; 21 | padding: 15px 10px; 22 | font-size: 12px; 23 | background: #fcfcfc; 24 | } 25 | 26 | .dndflow aside .description { 27 | margin-bottom: 10px; 28 | } 29 | 30 | .dndflow .nodeList { 31 | overflow: auto; 32 | } 33 | 34 | .dndflow .sidebar-node { 35 | /* Resets position: absolute */ 36 | position: relative; 37 | margin: 5px; 38 | } 39 | 40 | /* Node styles */ 41 | 42 | .react-flow__node { 43 | padding: 10px; 44 | border-radius: 3px; 45 | width: 180px; 46 | min-height: 40px; 47 | font-size: 12px; 48 | color: #222; 49 | text-align: center; 50 | border-width: 1px; 51 | border-style: solid; 52 | background: #fff; 53 | border-color: #1a192b; 54 | --border-color: #1a192b; 55 | } 56 | 57 | .react-flow__node.selectable:hover { 58 | box-shadow: 0 2px 4px 2px rgba(0, 0, 0, 0.08); 59 | } 60 | 61 | .react-flow__node.selected, 62 | .react-flow__node.selected:hover 63 | { 64 | box-shadow: 0 0 0 0.5px var(--border-color); 65 | } 66 | 67 | /* Connecting, but the connection is not valid by default */ 68 | .react-flow__handle-connecting { 69 | background: #ff6060; 70 | } 71 | 72 | /* Connecting, but the connection is valid */ 73 | .react-flow__handle-valid { 74 | background: #55dd99; 75 | } 76 | 77 | .react-flow__handle.missing-argument { 78 | background: #ff6060; 79 | } 80 | 81 | .dndflow .reactflow-wrapper { 82 | flex-grow: 1; 83 | height: 100%; 84 | min-height: 50%; 85 | } 86 | 87 | .dndflow .react-flow__handle .label { 88 | position: absolute; 89 | left: 50%; 90 | top: 50%; 91 | 92 | text-overflow: ellipsis; 93 | overflow: hidden; 94 | white-space: nowrap; 95 | /* max-width: 50px; */ 96 | } 97 | 98 | .dndflow .react-flow__handle.react-flow__handle-top .label { 99 | transform: translate(-50%, -50%) translateY(calc(-50% - 4px)); 100 | } 101 | 102 | .dndflow .react-flow__handle.react-flow__handle-bottom .label { 103 | transform: translate(-50%, -50%) translateY(calc(50% + 4px)); 104 | } 105 | 106 | .dndflow .react-flow__handle.react-flow__handle-left .label { 107 | transform: translate(-50%, -50%) translateX(calc(-50% - 4px)); 108 | } 109 | 110 | .dndflow .react-flow__handle.react-flow__handle-right .label { 111 | transform: translate(-50%, -50%) translateX(calc(50% + 4px)); 112 | } 113 | 114 | .dndflow .react-flow__handle.react-flow__handle-bottom .label-angled { 115 | --angle: -45deg; 116 | transform: translate(-50%, -50%) rotate(var(--angle)) translateX(calc(-50% - 4px)); 117 | } 118 | 119 | .dndflow .react-flow__handle.react-flow__handle-top .label-angled { 120 | --angle: -45deg; 121 | transform: translate(-50%, -50%) rotate(var(--angle)) translateX(calc(50% + 4px)); 122 | } 123 | 124 | @media (max-width: 640px) { 125 | .dndflow { 126 | flex-direction: column; 127 | } 128 | 129 | .dndflow aside { 130 | width: 100%; 131 | } 132 | 133 | .react-flow__minimap { 134 | display: none; 135 | } 136 | } 137 | 138 | .highlight-invalid-inputs input:required:invalid { 139 | /* box-shadow: 0 0 0 0.5px #FF0000; */ 140 | /* border-color: #c00000; */ 141 | /* border: 2px dashed red; */ 142 | border: 1px solid red; 143 | } 144 | 145 | .link-button { 146 | background-color: transparent; 147 | border: none; 148 | cursor: pointer; 149 | /* text-decoration: underline; */ 150 | display: inline; 151 | margin: 0; 152 | padding: 0; 153 | } 154 | -------------------------------------------------------------------------------- /src/DragNDrop/index.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import { useEffect, useState } from 'react'; 10 | import { 11 | ReactFlowProvider, 12 | Controls, 13 | Background, 14 | MiniMap, 15 | } from 'react-flow-renderer'; 16 | 17 | import { downloadDataWithCache } from '../cacheUtils'; 18 | import { ComponentSpec } from '../componentSpec'; 19 | import GraphComponentSpecFlow, { 20 | EMPTY_GRAPH_COMPONENT_SPEC, 21 | } from "./GraphComponentSpecFlow"; 22 | import Sidebar from './Sidebar'; 23 | import { getAppSettings } from '../appSettings'; 24 | import { fullyLoadComponentRefFromUrl } from "../componentStore"; 25 | import { 26 | loadPipelineSpecFromSessionStorage, 27 | PipelineAutoSaver, 28 | } from "./PipelineAutoSaver"; 29 | 30 | import './dnd.css'; 31 | 32 | const GRID_SIZE = 10; 33 | 34 | const DnDFlow = () => { 35 | const [componentSpec, setComponentSpec] = useState(); 36 | const [appSettings] = useState(getAppSettings()); 37 | 38 | const downloadData = downloadDataWithCache; 39 | 40 | useEffect(() => { 41 | (async () => { 42 | const restoredComponentSpec = loadPipelineSpecFromSessionStorage(); 43 | if (restoredComponentSpec !== undefined) { 44 | setComponentSpec(restoredComponentSpec); 45 | return; 46 | } 47 | const defaultPipelineUrl = appSettings.defaultPipelineUrl; 48 | try { 49 | const defaultPipelineRef = await fullyLoadComponentRefFromUrl( 50 | defaultPipelineUrl, 51 | downloadData 52 | ); 53 | setComponentSpec(defaultPipelineRef.spec); 54 | } catch (err) { 55 | console.error( 56 | `Failed to load the default pipeline from ${defaultPipelineUrl}` 57 | ); 58 | console.error(err); 59 | setComponentSpec(EMPTY_GRAPH_COMPONENT_SPEC); 60 | } 61 | })(); 62 | }, [appSettings.defaultPipelineUrl, downloadData]); 63 | 64 | if (componentSpec === undefined) { 65 | return <>; 66 | } 67 | 68 | return ( 69 |
70 | 71 |
72 | 78 | 79 | 80 | 81 | 82 |
83 | 89 | 90 |
91 |
92 | ); 93 | }; 94 | 95 | export default DnDFlow; 96 | -------------------------------------------------------------------------------- /src/DragNDrop/testData/name_collision_test.pipeline.component.yaml: -------------------------------------------------------------------------------- 1 | name: Name collision test pipeline 2 | metadata: 3 | annotations: 4 | sdk: 'https://cloud-pipelines.github.io/pipeline-editor/' 5 | inputs: 6 | - name: Foo 7 | annotations: 8 | editor.position: '{"x":40,"y":40,"width":180,"height":40}' 9 | outputs: 10 | - name: Foo 11 | annotations: 12 | editor.position: '{"x":40,"y":200,"width":180,"height":40}' 13 | implementation: 14 | graph: 15 | tasks: 16 | Foo: 17 | componentRef: 18 | spec: 19 | name: Foo 20 | inputs: 21 | - name: Foo 22 | outputs: 23 | - name: Foo 24 | implementation: 25 | container: 26 | image: alpine 27 | command: 28 | - echo 29 | - Hello 30 | arguments: 31 | Foo: 32 | graphInput: 33 | inputName: Foo 34 | annotations: 35 | editor.position: '{"x":40,"y":120,"width":180,"height":40}' 36 | outputValues: 37 | Foo: 38 | taskOutput: 39 | taskId: Foo 40 | outputName: Foo 41 | -------------------------------------------------------------------------------- /src/appSettings.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2022 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2022 Alexey Volkov 7 | */ 8 | 9 | // Settings: Default values and local storage configuration keys 10 | const COMPONENT_LIBRARY_URL_DEFAULT_VALUE = 11 | process.env.PUBLIC_URL + "/component_library.yaml"; 12 | const COMPONENT_LIBRARY_URL_LOCAL_STORAGE_KEY = 13 | "ComponentLibrary/component_library_url"; 14 | 15 | const PIPELINE_LIBRARY_URL_DEFAULT_VALUE = 16 | process.env.PUBLIC_URL + "/pipeline_library.yaml"; 17 | const PIPELINE_LIBRARY_URL_LOCAL_STORAGE_KEY = 18 | "PipelineLibrary/pipeline_library_url"; 19 | 20 | // TODO: Remove this setting in favor of taking the first pipeline from the pipeline library 21 | const DEFAULT_PIPELINE_URL_DEFAULT_VALUE = 22 | "https://raw.githubusercontent.com/Ark-kun/pipelines/2edfd25b5ee3a4aa149c24a225a50041fbd3662d/components/XGBoost/_samples/sample_pipeline.pipeline.component.yaml"; 23 | const DEFAULT_PIPELINE_URL_LOCAL_STORAGE_KEY = "App/default_pipeline_url"; 24 | 25 | const COMPONENT_FEED_URLS_DEFAULT_VALUE = [ 26 | "https://raw.githubusercontent.com/Ark-kun/pipeline_components/pipeline_component_feed/pipeline_component_feed.yaml", 27 | ]; 28 | const COMPONENT_FEED_URLS_LOCAL_STORAGE_KEY = 29 | "ComponentSearch/component_feed_urls"; 30 | 31 | const GITHUB_SEARCH_LOCATIONS_DEFAULT_VALUE = [ 32 | "repo:Ark-kun/pipeline_components path:components", 33 | ]; 34 | const GITHUB_SEARCH_LOCATIONS_LOCAL_STORAGE_KEY = 35 | "ComponentSearch/github_search_locations"; 36 | 37 | const GOOGLE_CLOUD_OAUTH_CLIENT_ID_DEFAULT_VALUE = 38 | "640001104961-2m8hs192tmd9f9nssbr5thr5o3uhmita.apps.googleusercontent.com"; 39 | const GOOGLE_CLOUD_OAUTH_CLIENT_ID_LOCAL_STORAGE_KEY = 40 | "GoogleCloud/google_cloud_oauth_client_id"; 41 | 42 | // Settings interfaces and classes 43 | interface Setting { 44 | get value(): T; 45 | set value(value: T); 46 | resetToDefault(): T; 47 | isOverridden(): boolean; 48 | } 49 | 50 | abstract class SettingBackedByLocalStorage implements Setting { 51 | _defaultValue: T; 52 | _storageKey: string; 53 | 54 | constructor(storageKey: string, defaultValue: T) { 55 | this._defaultValue = defaultValue; 56 | this._storageKey = storageKey; 57 | } 58 | 59 | get value() { 60 | // Defensive programming. 61 | // The window.localStorage should never be missing, null or undefined. 62 | // And localStorage.getItem should never fail. 63 | // However in practice there are reports of failures on the Internet for one reason or another. 64 | // So I'm being extra cautious here. 65 | try { 66 | const stringValue = window.localStorage.getItem(this._storageKey); 67 | if (stringValue !== null) { 68 | return this.deserialize(stringValue); 69 | } 70 | } catch (err) { 71 | console.error( 72 | "window.localStorage.getItem was unavailable or threw an exception. This should not happen." 73 | ); 74 | console.error(err); 75 | } 76 | 77 | return this._defaultValue; 78 | } 79 | 80 | set value(value: T) { 81 | const valueString = this.serialize(value); 82 | const defaultValueString = this.serialize(this._defaultValue); 83 | if (valueString === defaultValueString) { 84 | window.localStorage.removeItem(this._storageKey); 85 | } else { 86 | window.localStorage.setItem(this._storageKey, valueString); 87 | } 88 | } 89 | 90 | abstract serialize(value: T): string; 91 | abstract deserialize(stringValue: string): T; 92 | 93 | resetToDefault() { 94 | window.localStorage.removeItem(this._storageKey); 95 | return this._defaultValue; 96 | } 97 | 98 | isOverridden() { 99 | return window.localStorage.getItem(this._storageKey) !== null; 100 | } 101 | } 102 | 103 | class StringSettingBackedByLocalStorage extends SettingBackedByLocalStorage { 104 | serialize(value: string): string { 105 | return value; 106 | } 107 | deserialize(stringValue: string): string { 108 | return stringValue; 109 | } 110 | } 111 | 112 | class StringArraySettingBackedByLocalStorage extends SettingBackedByLocalStorage< 113 | string[] 114 | > { 115 | serialize(value: string[]): string { 116 | return JSON.stringify(value); 117 | } 118 | deserialize(stringValue: string): string[] { 119 | return JSON.parse(stringValue); 120 | } 121 | } 122 | 123 | export interface MutableAppSettings { 124 | componentLibraryUrl: Setting; 125 | pipelineLibraryUrl: Setting; 126 | defaultPipelineUrl: Setting; 127 | componentFeedUrls: Setting; 128 | gitHubSearchLocations: Setting; 129 | googleCloudOAuthClientId: Setting; 130 | } 131 | 132 | class AppSettingsBackedByLocalStorage implements MutableAppSettings { 133 | componentLibraryUrl = new StringSettingBackedByLocalStorage( 134 | COMPONENT_LIBRARY_URL_LOCAL_STORAGE_KEY, 135 | COMPONENT_LIBRARY_URL_DEFAULT_VALUE 136 | ); 137 | pipelineLibraryUrl = new StringSettingBackedByLocalStorage( 138 | PIPELINE_LIBRARY_URL_LOCAL_STORAGE_KEY, 139 | PIPELINE_LIBRARY_URL_DEFAULT_VALUE 140 | ); 141 | defaultPipelineUrl = new StringSettingBackedByLocalStorage( 142 | DEFAULT_PIPELINE_URL_LOCAL_STORAGE_KEY, 143 | DEFAULT_PIPELINE_URL_DEFAULT_VALUE 144 | ); 145 | componentFeedUrls = new StringArraySettingBackedByLocalStorage( 146 | COMPONENT_FEED_URLS_LOCAL_STORAGE_KEY, 147 | COMPONENT_FEED_URLS_DEFAULT_VALUE 148 | ); 149 | gitHubSearchLocations = new StringArraySettingBackedByLocalStorage( 150 | GITHUB_SEARCH_LOCATIONS_LOCAL_STORAGE_KEY, 151 | GITHUB_SEARCH_LOCATIONS_DEFAULT_VALUE 152 | ); 153 | googleCloudOAuthClientId = new StringSettingBackedByLocalStorage( 154 | GOOGLE_CLOUD_OAUTH_CLIENT_ID_LOCAL_STORAGE_KEY, 155 | GOOGLE_CLOUD_OAUTH_CLIENT_ID_DEFAULT_VALUE 156 | ); 157 | } 158 | 159 | export interface AppSettings { 160 | componentLibraryUrl: string; 161 | pipelineLibraryUrl: string; 162 | defaultPipelineUrl: string; 163 | componentFeedUrls: string[]; 164 | gitHubSearchLocations: string[]; 165 | googleCloudOAuthClientId: string; 166 | } 167 | 168 | export const getMutableAppSettings = (): MutableAppSettings => 169 | new AppSettingsBackedByLocalStorage(); 170 | 171 | export const getAppSettings = (): AppSettings => { 172 | const mutableAppSettings = getMutableAppSettings(); 173 | return { 174 | componentLibraryUrl: mutableAppSettings.componentLibraryUrl.value, 175 | pipelineLibraryUrl: mutableAppSettings.pipelineLibraryUrl.value, 176 | defaultPipelineUrl: mutableAppSettings.defaultPipelineUrl.value, 177 | componentFeedUrls: mutableAppSettings.componentFeedUrls.value, 178 | gitHubSearchLocations: mutableAppSettings.gitHubSearchLocations.value, 179 | googleCloudOAuthClientId: mutableAppSettings.googleCloudOAuthClientId.value, 180 | }; 181 | }; 182 | -------------------------------------------------------------------------------- /src/cacheUtils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import yaml from "js-yaml"; 10 | 11 | const httpGetDataWithCache = async ( 12 | url: string, 13 | transformer: (buffer: ArrayBuffer) => T, 14 | cacheName: string = "cache", 15 | updateIfInCache: boolean = false 16 | ): Promise => { 17 | const cache = await caches.open(cacheName); 18 | let response = await cache.match(url); 19 | let needToUpdateCache = false; 20 | if (response === undefined || updateIfInCache) { 21 | try { 22 | const newResponse = await fetch(url); 23 | if (!newResponse.ok) { 24 | throw new Error( 25 | `Network response was not OK: ${newResponse.status}: ${newResponse.statusText}` 26 | ); 27 | } 28 | response = newResponse; 29 | needToUpdateCache = true; 30 | } catch (err) { 31 | if (response === undefined) { 32 | throw err; 33 | } 34 | } 35 | } 36 | // Preventing TypeError: Failed to execute 'put' on 'Cache': Response body is already used 37 | const responseForCaching = response.clone(); 38 | // Need to verify that the transformer executes with error before putting data in cache. 39 | const result = transformer(await response.arrayBuffer()); 40 | if (needToUpdateCache) { 41 | await cache.put(url, responseForCaching); 42 | } 43 | return result; 44 | }; 45 | 46 | export type DownloadDataType = ( 47 | url: string, 48 | transformer: (buffer: ArrayBuffer) => T 49 | ) => Promise; 50 | 51 | export async function downloadData( 52 | url: string, 53 | transformer: (buffer: ArrayBuffer) => T 54 | ): Promise { 55 | const response = await fetch(url); 56 | if (!response.ok) { 57 | throw new Error( 58 | `Network response was not OK: ${response.status}: ${response.statusText}` 59 | ); 60 | } 61 | const result = transformer(await response.arrayBuffer()); 62 | return result; 63 | } 64 | 65 | const IMMUTABLE_URL_REGEXPS = [ 66 | /^https:\/\/raw.githubusercontent.com\/[-A-Za-z_]+\/[-A-Za-z_]+\/[0-9a-fA-f]{40}\/.*/, 67 | /^https:\/\/gitlab.com\/([-A-Za-z_]+\/){2,}-\/raw\/[0-9a-fA-f]{40}\/.*/, 68 | ]; 69 | 70 | export async function downloadDataWithCache( 71 | url: string, 72 | transformer: (buffer: ArrayBuffer) => T 73 | ): Promise { 74 | const isImmutable = IMMUTABLE_URL_REGEXPS.some((regexp) => url.match(regexp)); 75 | return httpGetDataWithCache(url, transformer, "cache", !isImmutable); 76 | } 77 | 78 | // Data transformer functions 79 | 80 | function loadTextFromData(buffer: ArrayBuffer): string { 81 | return new TextDecoder().decode(buffer); 82 | } 83 | 84 | export function loadObjectFromJsonData(buffer: ArrayBuffer): object { 85 | const obj = JSON.parse(loadTextFromData(buffer)); 86 | if (typeof obj === "object" && obj !== undefined && obj !== null) { 87 | return obj; 88 | } 89 | throw Error(`Expected a JSON-encoded object, but got "${typeof obj}"`); 90 | } 91 | 92 | export function loadObjectFromYamlData(buffer: ArrayBuffer): object { 93 | const obj = yaml.load(loadTextFromData(buffer)); 94 | if (typeof obj === "object" && obj !== undefined && obj !== null) { 95 | return obj; 96 | } 97 | throw Error(`Expected a YAML-encoded object, but got "${typeof obj}"`); 98 | } 99 | -------------------------------------------------------------------------------- /src/compilers/Argo/argo-ui/src/models/kubernetes.ts: -------------------------------------------------------------------------------- 1 | // File taken from https://github.com/argoproj/argo-ui/blob/b7a057c465b2659ae3a0d2f9574caa59f15c2ae6/src/models/kubernetes.ts 2 | 3 | export type Time = string; 4 | export type VolumeDevice = any; 5 | export type Volume = any; 6 | export type EnvFromSource = any; 7 | export type EnvVarSource = any; 8 | export type ResourceRequirements = any; 9 | export type VolumeMount = any; 10 | export type Probe = any; 11 | export type Lifecycle = any; 12 | export type TerminationMessagePolicy = any; 13 | export type PullPolicy = any; 14 | export type SecurityContext = any; 15 | export type PersistentVolumeClaim = any; 16 | export type Affinity = any; 17 | 18 | export interface ListMeta { 19 | continue?: string; 20 | resourceVersion?: string; 21 | selfLink?: string; 22 | } 23 | 24 | export interface ObjectMeta { 25 | name?: string; 26 | generateName?: string; 27 | namespace?: string; 28 | selfLink?: string; 29 | uid?: string; 30 | resourceVersion?: string; 31 | generation?: number; 32 | creationTimestamp?: Time; 33 | deletionTimestamp?: Time; 34 | deletionGracePeriodSeconds?: number; 35 | labels?: {[name: string]: string}; 36 | annotations?: {[name: string]: string}; 37 | ownerReferences?: any[]; 38 | initializers?: any; 39 | finalizers?: string[]; 40 | clusterName?: string; 41 | } 42 | 43 | export interface TypeMeta { 44 | kind?: string; 45 | apiVersion?: string; 46 | } 47 | 48 | export interface LocalObjectReference { 49 | name?: string; 50 | } 51 | 52 | export interface ObjectReference { 53 | kind?: string; 54 | namespace?: string; 55 | name?: string; 56 | uid?: string; 57 | apiVersion?: string; 58 | resourceVersion?: string; 59 | fieldPath?: string; 60 | } 61 | 62 | export interface SecretKeySelector extends LocalObjectReference { 63 | key: string; 64 | optional: boolean; 65 | } 66 | 67 | export interface ContainerPort { 68 | name?: string; 69 | hostPort?: number; 70 | containerPort: number; 71 | protocol?: string; 72 | hostIP?: string; 73 | } 74 | 75 | export interface EnvVar { 76 | name: string; 77 | value?: string; 78 | valueFrom?: EnvVarSource; 79 | } 80 | 81 | export interface Container { 82 | name: string; 83 | image?: string; 84 | command?: string[]; 85 | args?: string[]; 86 | workingDir?: string; 87 | ports?: ContainerPort[]; 88 | envFrom?: EnvFromSource[]; 89 | env?: EnvVar[]; 90 | resources?: ResourceRequirements; 91 | volumeMounts?: VolumeMount[]; 92 | livenessProbe?: Probe; 93 | readinessProbe?: Probe; 94 | lifecycle?: Lifecycle; 95 | terminationMessagePath?: string; 96 | terminationMessagePolicy?: TerminationMessagePolicy; 97 | imagePullPolicy?: PullPolicy; 98 | securityContext?: SecurityContext; 99 | stdin?: boolean; 100 | stdinOnce?: boolean; 101 | tty?: boolean; 102 | } 103 | 104 | export interface WatchEvent { 105 | object: T; 106 | type: 'ADDED' | 'MODIFIED' | 'DELETED' | 'ERROR'; 107 | } 108 | -------------------------------------------------------------------------------- /src/compilers/Argo/argoCompiler.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import fs from "fs"; 10 | import yaml from "js-yaml"; 11 | import path from "path"; 12 | import { ComponentSpec } from "../../componentSpec"; 13 | import { buildArgoWorkflowFromGraphComponent } from "./argoCompiler"; 14 | 15 | test("buildArgoWorkflowFromGraphComponent compiles Data_passing_pipeline", () => { 16 | const sourcePath = path.resolve( 17 | __dirname, 18 | "../testData/Data_passing_pipeline/pipeline.component.yaml" 19 | ); 20 | const expectedPath = path.resolve( 21 | __dirname, 22 | "./testData/Data_passing_pipeline/argo_workflow.yaml" 23 | ); 24 | const pipelineText = fs.readFileSync(sourcePath).toString(); 25 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 26 | const actualResult = buildArgoWorkflowFromGraphComponent( 27 | pipelineSpec, 28 | new Map( 29 | Object.entries({ 30 | anything_param: "anything_param", 31 | something_param: "something_param", 32 | string_param: "string_param_override", 33 | }) 34 | ) 35 | ); 36 | if (fs.existsSync(expectedPath)) { 37 | const expectedResultText = fs 38 | .readFileSync(expectedPath) 39 | .toString(); 40 | const expectedResult = yaml.load(expectedResultText); 41 | expect(actualResult).toEqual(expectedResult); 42 | } else { 43 | fs.writeFileSync( 44 | expectedPath, 45 | yaml.dump(actualResult, { 46 | lineWidth: -1, // Don't fold long strings 47 | quotingType: "\"", 48 | }) 49 | ); 50 | } 51 | }); 52 | 53 | test("buildArgoWorkflowFromGraphComponent compiles XGBoost_pipeline", () => { 54 | const sourcePath = path.resolve( 55 | __dirname, 56 | "../testData/XGBoost_pipeline/pipeline.component.yaml" 57 | ); 58 | const expectedPath = path.resolve( 59 | __dirname, 60 | "./testData/XGBoost_pipeline/argo_workflow.yaml" 61 | ); 62 | const pipelineText = fs.readFileSync(sourcePath).toString(); 63 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 64 | const actualResult = buildArgoWorkflowFromGraphComponent( 65 | pipelineSpec, 66 | new Map() 67 | ); 68 | if (fs.existsSync(expectedPath)) { 69 | const expectedResultText = fs 70 | .readFileSync(expectedPath) 71 | .toString(); 72 | const expectedResult = yaml.load(expectedResultText); 73 | expect(actualResult).toEqual(expectedResult); 74 | } else { 75 | fs.writeFileSync( 76 | expectedPath, 77 | yaml.dump(actualResult, { 78 | lineWidth: -1, // Don't fold long strings 79 | quotingType: "\"", 80 | }) 81 | ); 82 | } 83 | }); 84 | 85 | test("buildArgoWorkflowFromGraphComponent compiles Name_collision_pipeline", () => { 86 | const sourcePath = path.resolve( 87 | __dirname, 88 | "../testData/Name_collision_pipeline/pipeline.component.yaml" 89 | ); 90 | const expectedPath = path.resolve( 91 | __dirname, 92 | "testData/Name_collision_pipeline/argo_workflow.yaml" 93 | ); 94 | const pipelineText = fs.readFileSync(sourcePath).toString(); 95 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 96 | const actualResult = buildArgoWorkflowFromGraphComponent( 97 | pipelineSpec, 98 | new Map() 99 | ); 100 | if (fs.existsSync(expectedPath)) { 101 | const expectedResultText = fs.readFileSync(expectedPath).toString(); 102 | const expectedResult = yaml.load(expectedResultText); 103 | expect(actualResult).toEqual(expectedResult); 104 | } else { 105 | fs.writeFileSync( 106 | expectedPath, 107 | yaml.dump(actualResult, { 108 | lineWidth: -1, // Don't fold long strings 109 | quotingType: '"', 110 | }) 111 | ); 112 | } 113 | }); 114 | -------------------------------------------------------------------------------- /src/compilers/Argo/testData/Name_collision_pipeline/argo_workflow.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: Workflow 3 | metadata: 4 | generateName: name-collision-test-pipeline 5 | annotations: 6 | cloud-pipelines.net/pipeline-editor: "true" 7 | spec: 8 | entrypoint: Name-collision-test-pipeline 9 | templates: 10 | - name: Foo-bar 11 | inputs: 12 | parameters: 13 | - name: Foo-bar 14 | artifacts: 15 | - name: Foo-bar 16 | path: /tmp/inputs/Foo_bar/data 17 | outputs: 18 | parameters: [] 19 | artifacts: 20 | - name: Foo-bar 21 | path: /tmp/outputs/Foo_bar/data 22 | container: 23 | name: main 24 | image: alpine 25 | command: 26 | - sh 27 | - "-exc" 28 | - | 29 | input_value=$0 30 | input_path=$1 31 | output_path=$2 32 | mkdir -p "$(dirname "$output_path")" 33 | 34 | result="Hello Foo bar ${input_value} ${input_path} ${output_path}" 35 | echo "$result" 36 | echo "$result" > "$output_path" 37 | - "{{inputs.parameters.Foo-bar}}" 38 | - "{{inputs.artifacts.Foo-bar.path}}" 39 | - "{{outputs.artifacts.Foo-bar.path}}" 40 | - name: Foo-bar-2 41 | inputs: 42 | parameters: 43 | - name: Foo-bar-2 44 | artifacts: 45 | - name: Foo-bar-2 46 | path: /tmp/inputs/Foo_bar_2/data 47 | outputs: 48 | parameters: [] 49 | artifacts: 50 | - name: Foo-bar-2 51 | path: /tmp/outputs/Foo_bar_2/data 52 | container: 53 | name: main 54 | image: alpine 55 | command: 56 | - sh 57 | - "-exc" 58 | - | 59 | input_value=$0 60 | input_path=$1 61 | output_path=$2 62 | mkdir -p "$(dirname "$output_path")" 63 | 64 | result="Hello Foo bar ${input_value} ${input_path} ${output_path}" 65 | echo "$result" 66 | echo "$result" > "$output_path" 67 | - "{{inputs.parameters.Foo-bar-2}}" 68 | - "{{inputs.artifacts.Foo-bar-2.path}}" 69 | - "{{outputs.artifacts.Foo-bar-2.path}}" 70 | - name: Name-collision-test-pipeline 71 | inputs: 72 | parameters: 73 | - name: Foo-bar 74 | artifacts: 75 | - name: Foo-bar 76 | outputs: 77 | artifacts: 78 | - name: Foo-bar 79 | from: "{{tasks.Foo-bar.outputs.artifacts.Foo-bar}}" 80 | dag: 81 | tasks: 82 | - name: Foo-bar 83 | template: Foo-bar 84 | arguments: 85 | parameters: 86 | - name: Foo-bar 87 | value: "{{inputs.parameters.Foo-bar}}" 88 | artifacts: 89 | - name: Foo-bar 90 | from: "{{inputs.artifacts.Foo-bar}}" 91 | dependencies: [] 92 | - name: Foo-bar-2 93 | template: Foo-bar-2 94 | arguments: 95 | parameters: 96 | - name: Foo-bar-2 97 | value: "{{inputs.parameters.Foo-bar}}" 98 | artifacts: 99 | - name: Foo-bar-2 100 | from: "{{inputs.artifacts.Foo-bar}}" 101 | dependencies: [] 102 | arguments: 103 | parameters: [] 104 | artifacts: [] 105 | -------------------------------------------------------------------------------- /src/compilers/Argo/testData/XGBoost_pipeline/argo_workflow.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: Workflow 3 | metadata: 4 | generateName: xgboost-pipeline 5 | annotations: 6 | cloud-pipelines.net/pipeline-editor: "true" 7 | spec: 8 | entrypoint: XGBoost-pipeline 9 | templates: 10 | - name: Chicago-Taxi-Trips-dataset 11 | inputs: 12 | parameters: 13 | - name: Select 14 | - name: Where 15 | - name: Limit 16 | - name: Format 17 | artifacts: [] 18 | outputs: 19 | parameters: [] 20 | artifacts: 21 | - name: Table 22 | path: /tmp/outputs/Table/data 23 | container: 24 | name: main 25 | image: byrnedo/alpine-curl@sha256:548379d0a4a0c08b9e55d9d87a592b7d35d9ab3037f4936f5ccd09d0b625a342 26 | command: 27 | - sh 28 | - "-c" 29 | - | 30 | set -e -x -o pipefail 31 | output_path="$0" 32 | select="$1" 33 | where="$2" 34 | limit="$3" 35 | format="$4" 36 | mkdir -p "$(dirname "$output_path")" 37 | curl --get 'https://data.cityofchicago.org/resource/wrvz-psew.'"${format}" \ 38 | --data-urlencode '$limit='"${limit}" \ 39 | --data-urlencode '$where='"${where}" \ 40 | --data-urlencode '$select='"${select}" \ 41 | | tr -d '"' > "$output_path" # Removing unneeded quotes around all numbers 42 | - "{{outputs.artifacts.Table.path}}" 43 | - "{{inputs.parameters.Select}}" 44 | - "{{inputs.parameters.Where}}" 45 | - "{{inputs.parameters.Limit}}" 46 | - "{{inputs.parameters.Format}}" 47 | - name: Xgboost-train 48 | inputs: 49 | parameters: [] 50 | artifacts: 51 | - name: training_data 52 | path: /tmp/inputs/training_data/data 53 | outputs: 54 | parameters: [] 55 | artifacts: 56 | - name: model 57 | path: /tmp/outputs/model/data 58 | - name: model_config 59 | path: /tmp/outputs/model_config/data 60 | container: 61 | name: main 62 | image: python:3.7 63 | command: 64 | - sh 65 | - "-c" 66 | - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' --user) && "$0" "$@" 67 | - python3 68 | - "-u" 69 | - "-c" 70 | - | 71 | def _make_parent_dirs_and_return_path(file_path: str): 72 | import os 73 | os.makedirs(os.path.dirname(file_path), exist_ok=True) 74 | return file_path 75 | 76 | def xgboost_train( 77 | training_data_path, # Also supports LibSVM 78 | model_path, 79 | model_config_path, 80 | starting_model_path = None, 81 | 82 | label_column = 0, 83 | num_iterations = 10, 84 | booster_params = None, 85 | 86 | # Booster parameters 87 | objective = 'reg:squarederror', 88 | booster = 'gbtree', 89 | learning_rate = 0.3, 90 | min_split_loss = 0, 91 | max_depth = 6, 92 | ): 93 | '''Train an XGBoost model. 94 | 95 | Args: 96 | training_data_path: Path for the training data in CSV format. 97 | model_path: Output path for the trained model in binary XGBoost format. 98 | model_config_path: Output path for the internal parameter configuration of Booster as a JSON string. 99 | starting_model_path: Path for the existing trained model to start from. 100 | label_column: Column containing the label data. 101 | num_boost_rounds: Number of boosting iterations. 102 | booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html 103 | objective: The learning task and the corresponding learning objective. 104 | See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters 105 | The most common values are: 106 | "reg:squarederror" - Regression with squared loss (default). 107 | "reg:logistic" - Logistic regression. 108 | "binary:logistic" - Logistic regression for binary classification, output probability. 109 | "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation 110 | "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized 111 | "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized 112 | 113 | Annotations: 114 | author: Alexey Volkov 115 | ''' 116 | import pandas 117 | import xgboost 118 | 119 | df = pandas.read_csv( 120 | training_data_path, 121 | ) 122 | 123 | training_data = xgboost.DMatrix( 124 | data=df.drop(columns=[df.columns[label_column]]), 125 | label=df[df.columns[label_column]], 126 | ) 127 | 128 | booster_params = booster_params or {} 129 | booster_params.setdefault('objective', objective) 130 | booster_params.setdefault('booster', booster) 131 | booster_params.setdefault('learning_rate', learning_rate) 132 | booster_params.setdefault('min_split_loss', min_split_loss) 133 | booster_params.setdefault('max_depth', max_depth) 134 | 135 | starting_model = None 136 | if starting_model_path: 137 | starting_model = xgboost.Booster(model_file=starting_model_path) 138 | 139 | model = xgboost.train( 140 | params=booster_params, 141 | dtrain=training_data, 142 | num_boost_round=num_iterations, 143 | xgb_model=starting_model 144 | ) 145 | 146 | # Saving the model in binary format 147 | model.save_model(model_path) 148 | 149 | model_config_str = model.save_config() 150 | with open(model_config_path, 'w') as model_config_file: 151 | model_config_file.write(model_config_str) 152 | 153 | import json 154 | import argparse 155 | _parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n "reg:squarederror" - Regression with squared loss (default).\n "reg:logistic" - Logistic regression.\n "binary:logistic" - Logistic regression for binary classification, output probability.\n "binary:logitraw" - Logistic regression for binary classification, output score before logistic transformation\n "rank:pairwise" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n "rank:ndcg" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov ') 156 | _parser.add_argument("--training-data", dest="training_data_path", type=str, required=True, default=argparse.SUPPRESS) 157 | _parser.add_argument("--starting-model", dest="starting_model_path", type=str, required=False, default=argparse.SUPPRESS) 158 | _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) 159 | _parser.add_argument("--num-iterations", dest="num_iterations", type=int, required=False, default=argparse.SUPPRESS) 160 | _parser.add_argument("--booster-params", dest="booster_params", type=json.loads, required=False, default=argparse.SUPPRESS) 161 | _parser.add_argument("--objective", dest="objective", type=str, required=False, default=argparse.SUPPRESS) 162 | _parser.add_argument("--booster", dest="booster", type=str, required=False, default=argparse.SUPPRESS) 163 | _parser.add_argument("--learning-rate", dest="learning_rate", type=float, required=False, default=argparse.SUPPRESS) 164 | _parser.add_argument("--min-split-loss", dest="min_split_loss", type=float, required=False, default=argparse.SUPPRESS) 165 | _parser.add_argument("--max-depth", dest="max_depth", type=int, required=False, default=argparse.SUPPRESS) 166 | _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) 167 | _parser.add_argument("--model-config", dest="model_config_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) 168 | _parsed_args = vars(_parser.parse_args()) 169 | 170 | _outputs = xgboost_train(**_parsed_args) 171 | args: 172 | - "--training-data" 173 | - "{{inputs.artifacts.training_data.path}}" 174 | - "--model" 175 | - "{{outputs.artifacts.model.path}}" 176 | - "--model-config" 177 | - "{{outputs.artifacts.model_config.path}}" 178 | - name: Xgboost-predict 179 | inputs: 180 | parameters: 181 | - name: label_column 182 | artifacts: 183 | - name: data 184 | path: /tmp/inputs/data/data 185 | - name: model 186 | path: /tmp/inputs/model/data 187 | outputs: 188 | parameters: [] 189 | artifacts: 190 | - name: predictions 191 | path: /tmp/outputs/predictions/data 192 | container: 193 | name: main 194 | image: python:3.7 195 | command: 196 | - sh 197 | - "-c" 198 | - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'xgboost==1.1.1' 'pandas==1.0.5' --user) && "$0" "$@" 199 | - python3 200 | - "-u" 201 | - "-c" 202 | - | 203 | def _make_parent_dirs_and_return_path(file_path: str): 204 | import os 205 | os.makedirs(os.path.dirname(file_path), exist_ok=True) 206 | return file_path 207 | 208 | def xgboost_predict( 209 | data_path, # Also supports LibSVM 210 | model_path, 211 | predictions_path, 212 | label_column = None, 213 | ): 214 | '''Make predictions using a trained XGBoost model. 215 | 216 | Args: 217 | data_path: Path for the feature data in CSV format. 218 | model_path: Path for the trained model in binary XGBoost format. 219 | predictions_path: Output path for the predictions. 220 | label_column: Column containing the label data. 221 | 222 | Annotations: 223 | author: Alexey Volkov 224 | ''' 225 | from pathlib import Path 226 | 227 | import numpy 228 | import pandas 229 | import xgboost 230 | 231 | df = pandas.read_csv( 232 | data_path, 233 | ) 234 | 235 | if label_column is not None: 236 | df = df.drop(columns=[df.columns[label_column]]) 237 | 238 | testing_data = xgboost.DMatrix( 239 | data=df, 240 | ) 241 | 242 | model = xgboost.Booster(model_file=model_path) 243 | 244 | predictions = model.predict(testing_data) 245 | 246 | Path(predictions_path).parent.mkdir(parents=True, exist_ok=True) 247 | numpy.savetxt(predictions_path, predictions) 248 | 249 | import argparse 250 | _parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in CSV format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column: Column containing the label data.\n\n Annotations:\n author: Alexey Volkov ') 251 | _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) 252 | _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) 253 | _parser.add_argument("--label-column", dest="label_column", type=int, required=False, default=argparse.SUPPRESS) 254 | _parser.add_argument("--predictions", dest="predictions_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) 255 | _parsed_args = vars(_parser.parse_args()) 256 | 257 | _outputs = xgboost_predict(**_parsed_args) 258 | args: 259 | - "--data" 260 | - "{{inputs.artifacts.data.path}}" 261 | - "--model" 262 | - "{{inputs.artifacts.model.path}}" 263 | - "--label-column" 264 | - "{{inputs.parameters.label_column}}" 265 | - "--predictions" 266 | - "{{outputs.artifacts.predictions.path}}" 267 | - name: XGBoost-pipeline 268 | inputs: 269 | parameters: [] 270 | artifacts: [] 271 | outputs: 272 | artifacts: [] 273 | dag: 274 | tasks: 275 | - name: dataset 276 | template: Chicago-Taxi-Trips-dataset 277 | arguments: 278 | parameters: 279 | - name: Select 280 | value: tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total 281 | - name: Where 282 | value: trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01" 283 | - name: Limit 284 | value: "1000" 285 | - name: Format 286 | value: csv 287 | artifacts: [] 288 | dependencies: [] 289 | - name: train 290 | template: Xgboost-train 291 | arguments: 292 | parameters: [] 293 | artifacts: 294 | - name: training_data 295 | from: "{{tasks.dataset.outputs.artifacts.Table}}" 296 | dependencies: 297 | - dataset 298 | - name: predict 299 | template: Xgboost-predict 300 | arguments: 301 | parameters: 302 | - name: label_column 303 | value: "0" 304 | artifacts: 305 | - name: data 306 | from: "{{tasks.dataset.outputs.artifacts.Table}}" 307 | - name: model 308 | from: "{{tasks.train.outputs.artifacts.model}}" 309 | dependencies: 310 | - dataset 311 | - train 312 | arguments: 313 | parameters: [] 314 | artifacts: [] 315 | -------------------------------------------------------------------------------- /src/compilers/GoogleCloudVertexAIPipelines/testData/Name_collision_pipeline/google_cloud_vertex_pipeline.json: -------------------------------------------------------------------------------- 1 | { 2 | "displayName": "Name collision test pipeline", 3 | "runtimeConfig": { 4 | "parameters": {}, 5 | "gcsOutputDirectory": "gs://some-bucket/" 6 | }, 7 | "pipelineSpec": { 8 | "pipelineInfo": { 9 | "name": "pipeline" 10 | }, 11 | "sdkVersion": "Cloud-Pipelines", 12 | "schemaVersion": "2.0.0", 13 | "deploymentSpec": { 14 | "executors": { 15 | "Foo bar": { 16 | "container": { 17 | "image": "alpine", 18 | "command": [ 19 | "sh", 20 | "-exc", 21 | "input_value=$0\ninput_path=$1\noutput_path=$2\nmkdir -p \"$(dirname \"$output_path\")\"\n\nresult=\"Hello Foo bar ${input_value} ${input_path} ${output_path}\"\necho \"$result\"\necho \"$result\" > \"$output_path\"\n", 22 | "{{$.inputs.parameters['Foo bar']}}", 23 | "{{$.inputs.artifacts['Foo bar'].path}}", 24 | "{{$.outputs.artifacts['Foo bar'].path}}" 25 | ] 26 | } 27 | }, 28 | "_make_artifact": { 29 | "container": { 30 | "image": "alpine", 31 | "command": [ 32 | "sh", 33 | "-ec", 34 | "mkdir -p \"$(dirname \"$1\")\"; printf \"%s\" \"$0\" > \"$1\"", 35 | "{{$.inputs.parameters['parameter']}}", 36 | "{{$.outputs.artifacts['artifact'].path}}" 37 | ] 38 | } 39 | }, 40 | "Foo bar 2": { 41 | "container": { 42 | "image": "alpine", 43 | "command": [ 44 | "sh", 45 | "-exc", 46 | "input_value=$0\ninput_path=$1\noutput_path=$2\nmkdir -p \"$(dirname \"$output_path\")\"\n\nresult=\"Hello Foo bar ${input_value} ${input_path} ${output_path}\"\necho \"$result\"\necho \"$result\" > \"$output_path\"\n", 47 | "{{$.inputs.parameters['Foo bar 2']}}", 48 | "{{$.inputs.artifacts['Foo bar 2'].path}}", 49 | "{{$.outputs.artifacts['Foo bar 2'].path}}" 50 | ] 51 | } 52 | } 53 | } 54 | }, 55 | "components": { 56 | "Foo bar": { 57 | "inputDefinitions": { 58 | "parameters": { 59 | "Foo bar": { 60 | "type": "STRING" 61 | } 62 | }, 63 | "artifacts": { 64 | "Foo bar": { 65 | "artifactType": { 66 | "schemaTitle": "system.Artifact" 67 | } 68 | } 69 | } 70 | }, 71 | "outputDefinitions": { 72 | "parameters": {}, 73 | "artifacts": { 74 | "Foo bar": { 75 | "artifactType": { 76 | "schemaTitle": "system.Artifact" 77 | } 78 | } 79 | } 80 | }, 81 | "executorLabel": "Foo bar" 82 | }, 83 | "_make_artifact": { 84 | "executorLabel": "_make_artifact", 85 | "inputDefinitions": { 86 | "parameters": { 87 | "parameter": { 88 | "type": "STRING" 89 | } 90 | } 91 | }, 92 | "outputDefinitions": { 93 | "artifacts": { 94 | "artifact": { 95 | "artifactType": { 96 | "schemaTitle": "system.Artifact" 97 | } 98 | } 99 | } 100 | } 101 | }, 102 | "Foo bar 2": { 103 | "inputDefinitions": { 104 | "parameters": { 105 | "Foo bar 2": { 106 | "type": "STRING" 107 | } 108 | }, 109 | "artifacts": { 110 | "Foo bar 2": { 111 | "artifactType": { 112 | "schemaTitle": "system.Artifact" 113 | } 114 | } 115 | } 116 | }, 117 | "outputDefinitions": { 118 | "parameters": {}, 119 | "artifacts": { 120 | "Foo bar 2": { 121 | "artifactType": { 122 | "schemaTitle": "system.Artifact" 123 | } 124 | } 125 | } 126 | }, 127 | "executorLabel": "Foo bar 2" 128 | } 129 | }, 130 | "root": { 131 | "inputDefinitions": { 132 | "parameters": { 133 | "Foo bar": { 134 | "type": "STRING" 135 | } 136 | }, 137 | "artifacts": {} 138 | }, 139 | "outputDefinitions": { 140 | "artifacts": { 141 | "Foo bar": { 142 | "artifactType": { 143 | "schemaTitle": "system.Artifact" 144 | } 145 | } 146 | } 147 | }, 148 | "dag": { 149 | "tasks": { 150 | "Make artifact for Foo bar": { 151 | "componentRef": { 152 | "name": "_make_artifact" 153 | }, 154 | "taskInfo": { 155 | "name": "Make artifact" 156 | }, 157 | "inputs": { 158 | "parameters": { 159 | "parameter": { 160 | "componentInputParameter": "Foo bar" 161 | } 162 | } 163 | }, 164 | "cachingOptions": { 165 | "enableCache": true 166 | } 167 | }, 168 | "Foo bar": { 169 | "taskInfo": { 170 | "name": "Foo bar" 171 | }, 172 | "inputs": { 173 | "parameters": { 174 | "Foo bar": { 175 | "componentInputParameter": "Foo bar" 176 | } 177 | }, 178 | "artifacts": { 179 | "Foo bar": { 180 | "taskOutputArtifact": { 181 | "producerTask": "Make artifact for Foo bar", 182 | "outputArtifactKey": "artifact" 183 | } 184 | } 185 | } 186 | }, 187 | "cachingOptions": { 188 | "enableCache": true 189 | }, 190 | "componentRef": { 191 | "name": "Foo bar" 192 | } 193 | }, 194 | "Foo bar 2": { 195 | "taskInfo": { 196 | "name": "Foo bar" 197 | }, 198 | "inputs": { 199 | "parameters": { 200 | "Foo bar 2": { 201 | "componentInputParameter": "Foo bar" 202 | } 203 | }, 204 | "artifacts": { 205 | "Foo bar 2": { 206 | "taskOutputArtifact": { 207 | "producerTask": "Make artifact for Foo bar", 208 | "outputArtifactKey": "artifact" 209 | } 210 | } 211 | } 212 | }, 213 | "cachingOptions": { 214 | "enableCache": true 215 | }, 216 | "componentRef": { 217 | "name": "Foo bar 2" 218 | } 219 | } 220 | }, 221 | "outputs": { 222 | "artifacts": { 223 | "Foo bar": { 224 | "artifactSelectors": [ 225 | { 226 | "producerSubtask": "Foo bar", 227 | "outputArtifactKey": "Foo bar" 228 | } 229 | ] 230 | } 231 | } 232 | } 233 | } 234 | } 235 | } 236 | } -------------------------------------------------------------------------------- /src/compilers/GoogleCloudVertexAIPipelines/vertexAiCompiler.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import fs from "fs"; 10 | import yaml from "js-yaml"; 11 | import path from "path"; 12 | import { ComponentSpec } from "../../componentSpec"; 13 | import { buildVertexPipelineJobFromGraphComponent } from "./vertexAiCompiler"; 14 | 15 | test("buildVertexPipelineJobFromGraphComponent compiles Data_passing_pipeline", () => { 16 | const sourcePath = path.resolve( 17 | __dirname, 18 | "../testData/Data_passing_pipeline/pipeline.component.yaml" 19 | ); 20 | const expectedPath = path.resolve( 21 | __dirname, 22 | "./testData/Data_passing_pipeline/google_cloud_vertex_pipeline.json" 23 | ); 24 | const pipelineText = fs.readFileSync(sourcePath).toString(); 25 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 26 | const actualResult = buildVertexPipelineJobFromGraphComponent( 27 | pipelineSpec, 28 | "gs://some-bucket/", 29 | new Map( 30 | Object.entries({ 31 | anything_param: "anything_param", 32 | something_param: "something_param", 33 | string_param: "string_param_override", 34 | }) 35 | ) 36 | ); 37 | if (fs.existsSync(expectedPath)) { 38 | const expectedResultText = fs.readFileSync(expectedPath).toString(); 39 | const expectedResult = JSON.parse(expectedResultText); 40 | expect(actualResult).toEqual(expectedResult); 41 | } else { 42 | fs.writeFileSync(expectedPath, JSON.stringify(actualResult, undefined, 2)); 43 | fail(); 44 | } 45 | }); 46 | 47 | test("buildVertexPipelineJobFromGraphComponent compiles XGBoost_pipeline", () => { 48 | const sourcePath = path.resolve( 49 | __dirname, 50 | "../testData/XGBoost_pipeline/pipeline.component.yaml" 51 | ); 52 | const expectedPath = path.resolve( 53 | __dirname, 54 | "./testData/XGBoost_pipeline/google_cloud_vertex_pipeline.json" 55 | ); 56 | const pipelineText = fs.readFileSync(sourcePath).toString(); 57 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 58 | const actualResult = buildVertexPipelineJobFromGraphComponent( 59 | pipelineSpec, 60 | "gs://some-bucket/", 61 | new Map() 62 | ); 63 | if (fs.existsSync(expectedPath)) { 64 | const expectedResultText = fs.readFileSync(expectedPath).toString(); 65 | const expectedResult = JSON.parse(expectedResultText); 66 | expect(actualResult).toEqual(expectedResult); 67 | } else { 68 | fs.writeFileSync(expectedPath, JSON.stringify(actualResult, undefined, 2)); 69 | fail(); 70 | } 71 | }); 72 | 73 | test("buildVertexPipelineJobFromGraphComponent compiles Name_collision_pipeline", () => { 74 | const sourcePath = path.resolve( 75 | __dirname, 76 | "../testData/Name_collision_pipeline/pipeline.component.yaml" 77 | ); 78 | const expectedPath = path.resolve( 79 | __dirname, 80 | "./testData/Name_collision_pipeline/google_cloud_vertex_pipeline.json" 81 | ); 82 | const pipelineText = fs.readFileSync(sourcePath).toString(); 83 | const pipelineSpec = yaml.load(pipelineText) as ComponentSpec; 84 | const actualResult = buildVertexPipelineJobFromGraphComponent( 85 | pipelineSpec, 86 | "gs://some-bucket/", 87 | new Map() 88 | ); 89 | if (fs.existsSync(expectedPath)) { 90 | const expectedResultText = fs.readFileSync(expectedPath).toString(); 91 | const expectedResult = JSON.parse(expectedResultText); 92 | expect(actualResult).toEqual(expectedResult); 93 | } else { 94 | fs.writeFileSync(expectedPath, JSON.stringify(actualResult, undefined, 2)); 95 | fail(); 96 | } 97 | }); 98 | -------------------------------------------------------------------------------- /src/compilers/testData/Name_collision_pipeline/pipeline.component.yaml: -------------------------------------------------------------------------------- 1 | name: Name collision test pipeline 2 | metadata: 3 | annotations: 4 | sdk: https://cloud-pipelines.net/pipeline-editor/ 5 | inputs: 6 | - name: Foo bar 7 | annotations: 8 | editor.position: '{"x":40,"y":40,"width":150,"height":40}' 9 | outputs: 10 | - name: Foo bar 11 | annotations: 12 | editor.position: '{"x":40,"y":200,"width":150,"height":40}' 13 | implementation: 14 | graph: 15 | tasks: 16 | Foo bar: 17 | componentRef: 18 | spec: 19 | name: Foo bar 20 | inputs: 21 | - name: Foo bar 22 | outputs: 23 | - name: Foo bar 24 | implementation: 25 | container: 26 | image: alpine 27 | command: 28 | - sh 29 | - '-exc' 30 | - | 31 | input_value=$0 32 | input_path=$1 33 | output_path=$2 34 | mkdir -p "$(dirname "$output_path")" 35 | 36 | result="Hello Foo bar ${input_value} ${input_path} ${output_path}" 37 | echo "$result" 38 | echo "$result" > "$output_path" 39 | - inputValue: Foo bar 40 | - inputPath: Foo bar 41 | - outputPath: Foo bar 42 | arguments: 43 | Foo bar: 44 | graphInput: 45 | inputName: Foo bar 46 | annotations: 47 | editor.position: '{"x":40,"y":120,"width":180,"height":40}' 48 | Foo bar 2: 49 | componentRef: 50 | spec: 51 | name: Foo bar 52 | inputs: 53 | - name: Foo bar 2 54 | outputs: 55 | - name: Foo bar 2 56 | implementation: 57 | container: 58 | image: alpine 59 | command: 60 | - sh 61 | - '-exc' 62 | - | 63 | input_value=$0 64 | input_path=$1 65 | output_path=$2 66 | mkdir -p "$(dirname "$output_path")" 67 | 68 | result="Hello Foo bar ${input_value} ${input_path} ${output_path}" 69 | echo "$result" 70 | echo "$result" > "$output_path" 71 | - inputValue: Foo bar 2 72 | - inputPath: Foo bar 2 73 | - outputPath: Foo bar 2 74 | arguments: 75 | Foo bar 2: 76 | graphInput: 77 | inputName: Foo bar 78 | annotations: 79 | editor.position: '{"x":240,"y":120,"width":180,"height":40}' 80 | outputValues: 81 | Foo bar: 82 | taskOutput: 83 | taskId: Foo bar 84 | outputName: Foo bar 85 | -------------------------------------------------------------------------------- /src/componentSpec.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | export type MySchema = ComponentSpec; 10 | export type TypeSpecType = 11 | | string 12 | | { 13 | [k: string]: TypeSpecType; 14 | }; 15 | export interface InputOutputSpec { 16 | name: string; 17 | type?: TypeSpecType; 18 | description?: string; 19 | annotations?: { 20 | [k: string]: unknown; 21 | }; 22 | } 23 | /** 24 | * Describes the component input specification 25 | */ 26 | export interface InputSpec extends InputOutputSpec { 27 | name: string; 28 | type?: TypeSpecType; 29 | description?: string; 30 | default?: string; 31 | optional?: boolean; 32 | annotations?: { 33 | [k: string]: unknown; 34 | }; 35 | } 36 | /** 37 | * Describes the component output specification 38 | */ 39 | export interface OutputSpec extends InputOutputSpec { 40 | name: string; 41 | type?: TypeSpecType; 42 | description?: string; 43 | annotations?: { 44 | [k: string]: unknown; 45 | }; 46 | } 47 | /** 48 | * Represents the command-line argument placeholder that will be replaced at run-time by the input argument value. 49 | */ 50 | export interface InputValuePlaceholder { 51 | /** 52 | * Name of the input. 53 | */ 54 | inputValue: string; 55 | } 56 | /** 57 | * Represents the command-line argument placeholder that will be replaced at run-time by a local file path pointing to a file containing the input argument value. 58 | */ 59 | export interface InputPathPlaceholder { 60 | /** 61 | * Name of the input. 62 | */ 63 | inputPath: string; 64 | } 65 | /** 66 | * Represents the command-line argument placeholder that will be replaced at run-time by a local file path pointing to a file where the program should write its output data. 67 | */ 68 | export interface OutputPathPlaceholder { 69 | /** 70 | * Name of the output. 71 | */ 72 | outputPath: string; 73 | } 74 | export type StringOrPlaceholder = 75 | | string 76 | | InputValuePlaceholder 77 | | InputPathPlaceholder 78 | | OutputPathPlaceholder 79 | | ConcatPlaceholder 80 | | IfPlaceholder; 81 | /** 82 | * Represents the command-line argument placeholder that will be replaced at run-time by the concatenated values of its items. 83 | */ 84 | export interface ConcatPlaceholder { 85 | /** 86 | * Items to concatenate 87 | */ 88 | concat: StringOrPlaceholder[]; 89 | } 90 | /** 91 | * Represents the command-line argument placeholder that will be replaced at run-time by a boolean value specifying whether the caller has passed an argument for the specified optional input. 92 | */ 93 | export interface IsPresentPlaceholder { 94 | /** 95 | * Name of the input. 96 | */ 97 | isPresent: string; 98 | } 99 | export type IfConditionArgumentType = 100 | | IsPresentPlaceholder 101 | | boolean 102 | | string 103 | | InputValuePlaceholder; 104 | export type ListOfStringsOrPlaceholders = StringOrPlaceholder[]; 105 | /** 106 | * Represents the command-line argument placeholder that will be replaced at run-time by a boolean value specifying whether the caller has passed an argument for the specified optional input. 107 | */ 108 | export interface IfPlaceholder { 109 | if: { 110 | cond: IfConditionArgumentType; 111 | then: ListOfStringsOrPlaceholders; 112 | else?: ListOfStringsOrPlaceholders; 113 | }; 114 | } 115 | export interface ContainerSpec { 116 | /** 117 | * Docker image name. 118 | */ 119 | image: string; 120 | /** 121 | * Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. 122 | */ 123 | command?: StringOrPlaceholder[]; 124 | /** 125 | * Arguments to the entrypoint. The docker image's CMD is used if this is not provided. 126 | */ 127 | args?: StringOrPlaceholder[]; 128 | /** 129 | * List of environment variables to set in the container. 130 | */ 131 | env?: { 132 | [k: string]: StringOrPlaceholder; 133 | }; 134 | } 135 | /** 136 | * Represents the container component implementation. 137 | */ 138 | export interface ContainerImplementation { 139 | container: ContainerSpec; 140 | } 141 | export type ImplementationType = ContainerImplementation | GraphImplementation; 142 | export interface MetadataSpec { 143 | annotations?: { 144 | [k: string]: unknown; 145 | }; 146 | } 147 | /** 148 | * Component specification. Describes the metadata (name, description, source), the interface (inputs and outputs) and the implementation of the component. 149 | */ 150 | export interface ComponentSpec { 151 | name?: string; 152 | description?: string; 153 | inputs?: InputSpec[]; 154 | outputs?: OutputSpec[]; 155 | implementation: ImplementationType; 156 | metadata?: MetadataSpec; 157 | } 158 | /** 159 | * Component reference. Contains information that can be used to locate and load a component by name, digest or URL 160 | */ 161 | export interface ComponentReference { 162 | name?: string; 163 | digest?: string; 164 | tag?: string; 165 | url?: string; 166 | spec?: ComponentSpec; 167 | // Holds unparsed component text. An alternative to spec. 168 | // url -> data -> text -> spec 169 | // This simplifies code due to ability to preserve the original component data corresponding to the hash digest. 170 | // I debated whether to use data (binary) or text here and decided on text. 171 | // ComponentSpec is usually serialized to YAML or JSON formats that are text based 172 | // and have better support for text compared to binary data. 173 | // Not yet in the standard. 174 | text?: string; 175 | } 176 | /** 177 | * Represents the component argument value that comes from the graph component input. 178 | */ 179 | export interface GraphInputArgument { 180 | /** 181 | * References the input of the graph/pipeline. 182 | */ 183 | graphInput: { 184 | inputName: string; 185 | type?: TypeSpecType; 186 | }; 187 | } 188 | /** 189 | * Represents the component argument value that comes from the output of a sibling task. 190 | */ 191 | export interface TaskOutputArgument { 192 | /** 193 | * References the output of a sibling task. 194 | */ 195 | taskOutput: { 196 | taskId: string; 197 | outputName: string; 198 | type?: TypeSpecType; 199 | }; 200 | } 201 | export type ArgumentType = string | GraphInputArgument | TaskOutputArgument; 202 | /** 203 | * Pair of operands for a binary operation. 204 | */ 205 | export interface TwoArgumentOperands { 206 | op1: ArgumentType; 207 | op2: ArgumentType; 208 | } 209 | /** 210 | * Pair of operands for a binary logical operation. 211 | */ 212 | export interface TwoLogicalOperands { 213 | op1: PredicateType; 214 | op2: PredicateType; 215 | } 216 | /** 217 | * Optional configuration that specifies how the task should be executed. Can be used to set some platform-specific options. 218 | */ 219 | export type PredicateType = 220 | | { 221 | "==": TwoArgumentOperands; 222 | } 223 | | { 224 | "!=": TwoArgumentOperands; 225 | } 226 | | { 227 | ">": TwoArgumentOperands; 228 | } 229 | | { 230 | ">=": TwoArgumentOperands; 231 | } 232 | | { 233 | "<": TwoArgumentOperands; 234 | } 235 | | { 236 | "<=": TwoArgumentOperands; 237 | } 238 | | { 239 | and: TwoLogicalOperands; 240 | } 241 | | { 242 | or: TwoLogicalOperands; 243 | } 244 | | { 245 | not: PredicateType; 246 | }; 247 | 248 | /** 249 | * Optional configuration that specifies how the task should be retried if it fails. 250 | */ 251 | export interface RetryStrategySpec { 252 | maxRetries?: number; 253 | } 254 | /** 255 | * Optional configuration that specifies how the task execution may be skipped if the output data exist in cache. 256 | */ 257 | export interface CachingStrategySpec { 258 | maxCacheStaleness?: string; 259 | } 260 | 261 | export interface ExecutionOptionsSpec { 262 | retryStrategy?: RetryStrategySpec; 263 | cachingStrategy?: CachingStrategySpec; 264 | } 265 | /** 266 | * 'Task specification. Task is a configured component - a component supplied with arguments and other applied configuration changes. 267 | */ 268 | export interface TaskSpec { 269 | componentRef: ComponentReference; 270 | arguments?: { 271 | [k: string]: ArgumentType; 272 | }; 273 | isEnabled?: PredicateType; 274 | executionOptions?: ExecutionOptionsSpec; 275 | annotations?: { 276 | [k: string]: unknown; 277 | }; 278 | } 279 | /** 280 | * Describes the graph component implementation. It represents a graph of component tasks connected to the upstream sources of data using the argument specifications. It also describes the sources of graph output values. 281 | */ 282 | export interface GraphSpec { 283 | tasks: { 284 | [k: string]: TaskSpec; 285 | }; 286 | outputValues?: { 287 | [k: string]: TaskOutputArgument; 288 | }; 289 | } 290 | /** 291 | * Represents the graph component implementation. 292 | */ 293 | export interface GraphImplementation { 294 | graph: GraphSpec; 295 | } 296 | 297 | // Type guards 298 | export const isValidComponentSpec = (obj: any): obj is ComponentSpec => 299 | typeof obj === "object" && "implementation" in obj; 300 | 301 | export const isContainerImplementation = ( 302 | implementation: ImplementationType 303 | ): implementation is ContainerImplementation => "container" in implementation; 304 | 305 | export const isGraphImplementation = ( 306 | implementation: ImplementationType 307 | ): implementation is GraphImplementation => "graph" in implementation; 308 | -------------------------------------------------------------------------------- /src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', 12 | monospace; 13 | } 14 | 15 | html, 16 | body, 17 | #root { 18 | margin: 0; 19 | height: 100%; 20 | } 21 | -------------------------------------------------------------------------------- /src/index.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import React from 'react'; 10 | import ReactDOM from 'react-dom'; 11 | import './index.css'; 12 | import App from './App'; 13 | import * as serviceWorkerRegistration from './serviceWorkerRegistration'; 14 | import reportWebVitals from './reportWebVitals'; 15 | //import { migrateUserData } from "./userDataMigration" 16 | 17 | // Migration is now disabled. 18 | // After 2 months of auto-migration, the redirect from cloud-pipelines.github.io 19 | // to cloud-pipelines.net was changed to hard redirect. 20 | // Accessing the data stored for cloud-pipelines.github.io is now impossible. 21 | // try { 22 | // migrateUserData(); 23 | // } catch (err) { 24 | // console.error(err); 25 | // } 26 | 27 | ReactDOM.render( 28 | 29 | 30 | , 31 | document.getElementById('root') 32 | ); 33 | 34 | // If you want your app to work offline and load faster, you can change 35 | // unregister() to register() below. Note this comes with some pitfalls. 36 | // Learn more about service workers: https://cra.link/PWA 37 | serviceWorkerRegistration.register(); 38 | 39 | // If you want to start measuring performance in your app, pass a function 40 | // to log results (for example: reportWebVitals(console.log)) 41 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals 42 | reportWebVitals(); 43 | -------------------------------------------------------------------------------- /src/logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/react-app-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /src/reportWebVitals.ts: -------------------------------------------------------------------------------- 1 | import { ReportHandler } from 'web-vitals'; 2 | 3 | const reportWebVitals = (onPerfEntry?: ReportHandler) => { 4 | if (onPerfEntry && onPerfEntry instanceof Function) { 5 | import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { 6 | getCLS(onPerfEntry); 7 | getFID(onPerfEntry); 8 | getFCP(onPerfEntry); 9 | getLCP(onPerfEntry); 10 | getTTFB(onPerfEntry); 11 | }); 12 | } 13 | }; 14 | 15 | export default reportWebVitals; 16 | -------------------------------------------------------------------------------- /src/service-worker.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /* eslint-disable no-restricted-globals */ 3 | 4 | // This service worker can be customized! 5 | // See https://developers.google.com/web/tools/workbox/modules 6 | // for the list of available Workbox modules, or add any other 7 | // code you'd like. 8 | // You can also remove this file if you'd prefer not to use a 9 | // service worker, and the Workbox build step will be skipped. 10 | 11 | import { clientsClaim } from 'workbox-core'; 12 | import { ExpirationPlugin } from 'workbox-expiration'; 13 | import { precacheAndRoute, createHandlerBoundToURL } from 'workbox-precaching'; 14 | import { registerRoute } from 'workbox-routing'; 15 | import { StaleWhileRevalidate } from 'workbox-strategies'; 16 | 17 | declare const self: ServiceWorkerGlobalScope; 18 | 19 | clientsClaim(); 20 | 21 | // Precache all of the assets generated by your build process. 22 | // Their URLs are injected into the manifest variable below. 23 | // This variable must be present somewhere in your service worker file, 24 | // even if you decide not to use precaching. See https://cra.link/PWA 25 | precacheAndRoute(self.__WB_MANIFEST); 26 | 27 | // Set up App Shell-style routing, so that all navigation requests 28 | // are fulfilled with your index.html shell. Learn more at 29 | // https://developers.google.com/web/fundamentals/architecture/app-shell 30 | const fileExtensionRegexp = new RegExp('/[^/?]+\\.[^/]+$'); 31 | registerRoute( 32 | // Return false to exempt requests from being fulfilled by index.html. 33 | ({ request, url }: { request: Request; url: URL }) => { 34 | // If this isn't a navigation, skip. 35 | if (request.mode !== 'navigate') { 36 | return false; 37 | } 38 | 39 | // If this is a URL that starts with /_, skip. 40 | if (url.pathname.startsWith('/_')) { 41 | return false; 42 | } 43 | 44 | // If this looks like a URL for a resource, because it contains 45 | // a file extension, skip. 46 | if (url.pathname.match(fileExtensionRegexp)) { 47 | return false; 48 | } 49 | 50 | // Return true to signal that we want to use the handler. 51 | return true; 52 | }, 53 | createHandlerBoundToURL(process.env.PUBLIC_URL + '/index.html') 54 | ); 55 | 56 | // An example runtime caching route for requests that aren't handled by the 57 | // precache, in this case same-origin .png requests like those from in public/ 58 | registerRoute( 59 | // Add in any other file extensions or routing criteria as needed. 60 | ({ url }) => url.origin === self.location.origin && url.pathname.endsWith('.png'), 61 | // Customize this strategy as needed, e.g., by changing to CacheFirst. 62 | new StaleWhileRevalidate({ 63 | cacheName: 'images', 64 | plugins: [ 65 | // Ensure that once this runtime cache reaches a maximum size the 66 | // least-recently used images are removed. 67 | new ExpirationPlugin({ maxEntries: 50 }), 68 | ], 69 | }) 70 | ); 71 | 72 | // This allows the web app to trigger skipWaiting via 73 | // registration.waiting.postMessage({type: 'SKIP_WAITING'}) 74 | self.addEventListener('message', (event) => { 75 | if (event.data && event.data.type === 'SKIP_WAITING') { 76 | self.skipWaiting(); 77 | } 78 | }); 79 | 80 | // Any other custom service worker logic can go here. 81 | -------------------------------------------------------------------------------- /src/serviceWorkerRegistration.ts: -------------------------------------------------------------------------------- 1 | // This optional code is used to register a service worker. 2 | // register() is not called by default. 3 | 4 | // This lets the app load faster on subsequent visits in production, and gives 5 | // it offline capabilities. However, it also means that developers (and users) 6 | // will only see deployed updates on subsequent visits to a page, after all the 7 | // existing tabs open on the page have been closed, since previously cached 8 | // resources are updated in the background. 9 | 10 | // To learn more about the benefits of this model and instructions on how to 11 | // opt-in, read https://cra.link/PWA 12 | 13 | const isLocalhost = Boolean( 14 | window.location.hostname === 'localhost' || 15 | // [::1] is the IPv6 localhost address. 16 | window.location.hostname === '[::1]' || 17 | // 127.0.0.0/8 are considered localhost for IPv4. 18 | window.location.hostname.match(/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/) 19 | ); 20 | 21 | type Config = { 22 | onSuccess?: (registration: ServiceWorkerRegistration) => void; 23 | onUpdate?: (registration: ServiceWorkerRegistration) => void; 24 | }; 25 | 26 | export function register(config?: Config) { 27 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { 28 | // The URL constructor is available in all browsers that support SW. 29 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); 30 | if (publicUrl.origin !== window.location.origin) { 31 | // Our service worker won't work if PUBLIC_URL is on a different origin 32 | // from what our page is served on. This might happen if a CDN is used to 33 | // serve assets; see https://github.com/facebook/create-react-app/issues/2374 34 | return; 35 | } 36 | 37 | window.addEventListener('load', () => { 38 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; 39 | if (isLocalhost) { 40 | // This is running on localhost. Let's check if a service worker still exists or not. 41 | checkValidServiceWorker(swUrl, config); 42 | 43 | // Add some additional logging to localhost, pointing developers to the 44 | // service worker/PWA documentation. 45 | navigator.serviceWorker.ready.then(() => { 46 | console.log( 47 | 'This web app is being served cache-first by a service ' + 48 | 'worker. To learn more, visit https://cra.link/PWA' 49 | ); 50 | }); 51 | } else { 52 | // Is not localhost. Just register service worker 53 | registerValidSW(swUrl, config); 54 | } 55 | }); 56 | } 57 | } 58 | 59 | function registerValidSW(swUrl: string, config?: Config) { 60 | navigator.serviceWorker 61 | .register(swUrl) 62 | .then((registration) => { 63 | registration.onupdatefound = () => { 64 | const installingWorker = registration.installing; 65 | if (installingWorker == null) { 66 | return; 67 | } 68 | installingWorker.onstatechange = () => { 69 | if (installingWorker.state === 'installed') { 70 | if (navigator.serviceWorker.controller) { 71 | // At this point, the updated precached content has been fetched, 72 | // but the previous service worker will still serve the older 73 | // content until all client tabs are closed. 74 | console.log( 75 | 'New content is available and will be used when all ' + 76 | 'tabs for this page are closed. See https://cra.link/PWA.' 77 | ); 78 | 79 | // Execute callback 80 | if (config && config.onUpdate) { 81 | config.onUpdate(registration); 82 | } 83 | } else { 84 | // At this point, everything has been precached. 85 | // It's the perfect time to display a 86 | // "Content is cached for offline use." message. 87 | console.log('Content is cached for offline use.'); 88 | 89 | // Execute callback 90 | if (config && config.onSuccess) { 91 | config.onSuccess(registration); 92 | } 93 | } 94 | } 95 | }; 96 | }; 97 | }) 98 | .catch((error) => { 99 | console.error('Error during service worker registration:', error); 100 | }); 101 | } 102 | 103 | function checkValidServiceWorker(swUrl: string, config?: Config) { 104 | // Check if the service worker can be found. If it can't reload the page. 105 | fetch(swUrl, { 106 | headers: { 'Service-Worker': 'script' }, 107 | }) 108 | .then((response) => { 109 | // Ensure service worker exists, and that we really are getting a JS file. 110 | const contentType = response.headers.get('content-type'); 111 | if ( 112 | response.status === 404 || 113 | (contentType != null && contentType.indexOf('javascript') === -1) 114 | ) { 115 | // No service worker found. Probably a different app. Reload the page. 116 | navigator.serviceWorker.ready.then((registration) => { 117 | registration.unregister().then(() => { 118 | window.location.reload(); 119 | }); 120 | }); 121 | } else { 122 | // Service worker found. Proceed as normal. 123 | registerValidSW(swUrl, config); 124 | } 125 | }) 126 | .catch(() => { 127 | console.log('No internet connection found. App is running in offline mode.'); 128 | }); 129 | } 130 | 131 | export function unregister() { 132 | if ('serviceWorker' in navigator) { 133 | navigator.serviceWorker.ready 134 | .then((registration) => { 135 | registration.unregister(); 136 | }) 137 | .catch((error) => { 138 | console.error(error.message); 139 | }); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /src/setupTests.ts: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom'; 6 | -------------------------------------------------------------------------------- /src/userDataMigration.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @license 3 | * Copyright 2021 Alexey Volkov 4 | * SPDX-License-Identifier: Apache-2.0 5 | * @author Alexey Volkov 6 | * @copyright 2021 Alexey Volkov 7 | */ 8 | 9 | import localForage from "localforage"; 10 | import { 11 | ComponentFileEntry, 12 | getAllComponentFilesFromList, 13 | unsafeWriteFilesToList, 14 | } from "./componentStore"; 15 | 16 | const OLD_SITE_ORIGIN = "https://cloud-pipelines.github.io"; 17 | const NEW_SITE_ORIGIN = "https://cloud-pipelines.net"; 18 | const NEW_SITE_URL = new URL("pipeline-editor", NEW_SITE_ORIGIN).toString(); 19 | const VALID_MIGRATION_TARGET_ORIGINS = [ 20 | OLD_SITE_ORIGIN, 21 | NEW_SITE_ORIGIN, 22 | "http://localhost:3000", 23 | ]; 24 | const SEND_MIGRATION_DATA_URL_PARAM = "send_migration_data"; 25 | 26 | const MIGRATION_SOURCE_ORIGIN = OLD_SITE_ORIGIN; 27 | 28 | const DB_NAME = "components"; 29 | const COMPONENT_STORE_SETTINGS_DB_TABLE_NAME = "component_store_settings"; 30 | const MIGRATED_DATA_FROM_OLD_DOMAIN_SETTING_KEY = 31 | "Migrated components and pipelines from " + MIGRATION_SOURCE_ORIGIN; 32 | 33 | interface Message { 34 | messageType: string; 35 | } 36 | 37 | interface UserFilesMessage extends Message { 38 | messageType: "FileMigrationMessage"; 39 | pipelineFiles: ComponentFileEntry[]; 40 | componentFiles: ComponentFileEntry[]; 41 | } 42 | 43 | const isMessage = (obj: any): obj is Message => 44 | typeof obj === "object" && "messageType" in obj; 45 | 46 | const isFileMigrationMessage = (obj: any): obj is UserFilesMessage => 47 | isMessage(obj) && obj.messageType === "FileMigrationMessage"; 48 | 49 | export const migrateUserData = async () => { 50 | const urlParams = new URL(document.location.href).searchParams; 51 | 52 | // Send the user data (pipelines and components) to the new website if requested. 53 | // Note: window.parent.location.origin or window.parent.origin cannot be accessed due to CORS. 54 | // DOMException: Blocked a frame with origin