├── .gitignore ├── LICENSE ├── README.md ├── ds2ai ├── ClickAI.py ├── DS2dataset.py ├── LabelingAI.py ├── SkyhubAI.py ├── __init__.py └── util.py ├── setup.cfg └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | .idea/ 132 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | The official [DS2.ai](https://github.com/DS2BRAIN/ds2) SDK for Python. 3 | Documentation can be found on [SDK guide](https://docs.ds2.ai/) 4 | -------------------------------------------------------------------------------- /ds2ai/ClickAI.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from .DS2dataset import Dataconnector 4 | from .SkyhubAI import Opsproject 5 | from .util import Util 6 | import requests as req 7 | import json 8 | 9 | class Project(object): 10 | 11 | def __init__(self, info, user, url=None): 12 | if not isinstance(info, dict): 13 | raise Exception(str(info)) 14 | if info.get('error'): 15 | raise Exception(info['message_en']) 16 | self.url = url if url else Util().url 17 | self.__dict__.update(info) 18 | self.user = user 19 | self.user_token = self.user.token 20 | self.id = info['id'] 21 | self.status = info['status'] 22 | self.dataconnectorsList = info['dataconnectorsList'] 23 | if isinstance(self.dataconnectorsList[0], int): 24 | main_dataconnector = self.get_dataconnector(self.dataconnectorsList[0]) 25 | else: 26 | main_dataconnector = Dataconnector(self.dataconnectorsList[0], self.user, url=self.url) 27 | models = [] 28 | for model in info.get("models", []): 29 | models.append(Model(model, user, project=self, main_dataconnector=main_dataconnector)) 30 | self.models = models 31 | self.jupyter_servers = [] 32 | for jupyter_server in info.get('jupyterServers', []): 33 | self.jupyter_servers.append(Jupyterserver(jupyter_server, user)) 34 | 35 | def __repr__(self): 36 | return str(self.id) 37 | 38 | def get_dataconnector(self, dataconnector_id): 39 | return Dataconnector(req.get(f"{self.url}/dataconnector/{dataconnector_id}/", 40 | params={"token": self.user_token}).json(), self.user, url=self.url) 41 | 42 | def refresh(self): 43 | return Project(req.get(f"{self.url}/projects/{self.id}/", 44 | params={"token": self.user_token}).json(), self.user, url=self.url) 45 | 46 | def delete(self): 47 | req.delete(f"{self.url}/projects/{self.id}/",params={"token": self.user_token}) 48 | 49 | def train(self, training_method, value_for_predict, option="speed", algorithm=None, hyper_params={}, **kwargs): 50 | 51 | if self.status != 0: 52 | raise("The training is already started.") 53 | data = { 54 | 'trainingMethod': training_method, 55 | 'valueForPredict': value_for_predict, 56 | 'algorithm': algorithm, 57 | 'hyper_params': hyper_params, 58 | 'option': option, 59 | } 60 | data.update(kwargs) 61 | 62 | return Project(req.post(f"{self.url}/train/{self.id}/", 63 | params={"token": self.user_token}, 64 | data=json.dumps(data)).json(), self.user, url=self.url) 65 | 66 | def stop(self): 67 | 68 | if self.status == 0: 69 | raise("The training is not started.") 70 | 71 | return Project(req.put(f"{self.url}/projects/{self.id}/",params={"token": self.user_token}, 72 | data=json.dumps({ 73 | "status": 0, 74 | "statusText": "stopped", 75 | })).json(), self.user, url=self.url) 76 | 77 | def get_magic_code(self, training_method, value_for_predict, file_path="output.ipynb"): 78 | 79 | response = req.post(f"{self.url}/get-magic-code/", 80 | params={"token": self.user_token}, 81 | data=json.dumps({ 82 | 'project': self.id, 83 | 'trainingMethod': training_method, 84 | 'valueForPredict': value_for_predict, 85 | })) 86 | 87 | if file_path: 88 | with open(file_path, 'w') as output: 89 | text = response.json() 90 | if isinstance(text, dict): 91 | text = json.dumps(text) 92 | output.write(text) 93 | 94 | print(response.json()) 95 | 96 | class Model(object): 97 | utilClass = Util() 98 | 99 | def __init__(self, info, user, project=None, main_dataconnector=None, url=None): 100 | if not isinstance(info, dict): 101 | raise Exception(str(info)) 102 | if info.get('error'): 103 | raise Exception(info['message_en']) 104 | self.__dict__.update(info) 105 | self.id = info['id'] 106 | self.token = info['token'] 107 | self.url = url if url else Util().url 108 | self.user = user 109 | self.user_token = self.user.token 110 | if info.get('trainingMethod'): 111 | self.app_url = f"https://ds2.ai/instant_use.html/?modeltoken={self.token}&modelid={self.id}" 112 | else: 113 | self.app_url = None 114 | 115 | if not project: 116 | self.project = self.get_project(info['project']) 117 | if not main_dataconnector: 118 | self.main_dataconnector = Dataconnector(self.project.dataconnectorsList[0], self.user, url=self.url) 119 | 120 | def __repr__(self): 121 | return str(self.id) 122 | 123 | def get_project(self, project_id): 124 | return Project(req.get(f"{self.url}/projects/{project_id}/", params={"token": self.user_token}).json(), self.user, url=self.url) 125 | 126 | def get_dataconnector(self, dataconnector_id): 127 | return Dataconnector(req.get(f"{self.url}/dataconnector/{dataconnector_id}/", 128 | params={"token": self.user_token}).json(), self.user, url=self.url) 129 | 130 | def predict(self, data, return_type="info"): 131 | 132 | if self.status != 100: 133 | raise("This model is not ready yet.") 134 | 135 | request_data = { 136 | "apptoken": self.user.appTokenCode, 137 | "modelid": self.id, 138 | "modeltoken": self.token, 139 | "userId": self.user.id, 140 | } 141 | predict_url = { 142 | "predict": f"{self.url}/predict/{self.user.id}/", 143 | "predictimagebyurl": f"{self.url}/predictimagebyurl/{self.user.id}/", 144 | "predictimagebyurlinfo": f"{self.url}/predictimagebyurlinfo/{self.user.id}/", 145 | "predictall": f"{self.url}/predictall/{self.user.id}/", 146 | "predictimage": f"{self.url}/predictimage/{self.user.id}/", 147 | "predictimageinfo": f"{self.url}/predictimageinfo/{self.user.id}/", 148 | } 149 | 150 | if type(data) == dict: 151 | data_processed = {} 152 | for key, item in data.items(): 153 | if f"__{self.main_dataconnector.dataconnectorName}" not in key: 154 | data_processed[f"{key}__{self.main_dataconnector.dataconnectorName}"] = item 155 | else: 156 | data_processed[key] = item 157 | data = data_processed 158 | 159 | 160 | return self.utilClass.predict(request_data, predict_url, data, return_type=return_type) 161 | 162 | 163 | def delete(self): 164 | req.delete(f"{self.url}/models/{self.id}/",params={"token": self.user_token}) 165 | 166 | def get_app_url(self): 167 | if self.app_url: 168 | return self.app_url 169 | else: 170 | print("Currently we don't support the app url for the loaded model.") 171 | 172 | def deploy(self, cloud_type="AWS", region="us-west-1", server_type="g4dn.xlarge", name=None): 173 | if not name: 174 | name = f"Ops project {str(round(time.time() * 10000000))}" 175 | 176 | if "AWS" != cloud_type: 177 | raise("Currently we support only AWS cloud for this SDK.") 178 | 179 | return Opsproject(req.post(f"{self.url}/opsprojects/", 180 | params={"token": self.user_token}, 181 | data=json.dumps({ 182 | 'projectName': name, 183 | 'region': region, 184 | 'serverType': server_type, 185 | 'modelId': self.id, 186 | })).json(), self.user, url=self.url) 187 | 188 | 189 | class Jupyterproject(object): 190 | def __init__(self, info, user, url=None): 191 | if not isinstance(info, dict): 192 | raise Exception(str(info)) 193 | if info.get('error'): 194 | raise Exception(info['message_en']) 195 | self.__dict__.update(info) 196 | self.id = info['id'] 197 | self.url = url if url else Util().url 198 | self.user = user 199 | self.user_token = self.user.token 200 | jupyterservers = [] 201 | if info.get('jupyterServers'): 202 | for jupyterserver_raw in info['jupyterServers']: 203 | jupyterservers.append(Jupyterserver(jupyterserver_raw, user)) 204 | self.jupyterservers = jupyterservers 205 | 206 | def __repr__(self): 207 | return str(self.id) 208 | serverType: str = None 209 | jupyterProjectId: int = None 210 | region: str = None 211 | 212 | def add_server(self, cloud_type="AWS", region="us-west-1", server_type="g4dn.xlarge"): 213 | 214 | if "AWS" != cloud_type: 215 | raise("Currently we support only AWS cloud for this SDK.") 216 | 217 | return Jupyterserver(req.post(f"{self.url}/jupyterservers/", 218 | params={"token": self.user_token}, 219 | data=json.dumps({ 220 | 'jupyterProjectId': self.id, 221 | 'serverType': server_type, 222 | 'region': region, 223 | })).json(), self.user, url=self.url) 224 | 225 | def get_server_status(self): 226 | return req.get(f"{self.url}/jupyter-servers-status/", 227 | params={"token": self.user_token, "jupyterProjectId": self.id}).json() 228 | 229 | def get_jupyterservers(self): 230 | return self.jupyterservers 231 | 232 | def refresh(self): 233 | return Jupyterproject(req.get(f"{self.url}/jupyterprojects/{self.id}/", 234 | params={"token": self.user_token}).json(), self.user, url=self.url) 235 | 236 | def delete(self): 237 | req.delete(f"{self.url}/jupyterprojects/{self.id}/",params={"token": self.user_token}) 238 | 239 | 240 | class Jupyterserver(object): 241 | def __init__(self, info, user, url=None): 242 | if not isinstance(info, dict): 243 | raise Exception(str(info)) 244 | if info.get('error'): 245 | raise Exception(info['message_en']) 246 | self.__dict__.update(info) 247 | self.instanceId = info['instanceId'] 248 | self.url = url if url else Util().url 249 | self.user = user 250 | self.user_token = self.user.token 251 | 252 | def __repr__(self): 253 | return str(self.instanceId) 254 | 255 | def stop(self): 256 | print(f"{self.instanceId} is stopped.") 257 | req.post(f"{self.url}/jupyterservers/{self.instanceId}/stop/", 258 | params={"token": self.user_token}) 259 | return 260 | 261 | def resume(self): 262 | print(f"{self.instanceId} is resumed.") 263 | req.post(f"{self.url}/jupyterservers/{self.instanceId}/resume/", 264 | params={"token": self.user_token}) 265 | return 266 | 267 | def delete(self): 268 | print(f"{self.instanceId} is deleted. It will take a time to turn off the server.") 269 | req.delete(f"{self.url}/jupyterservers/{self.instanceId}/",params={"token": self.user_token}) 270 | return 271 | -------------------------------------------------------------------------------- /ds2ai/DS2dataset.py: -------------------------------------------------------------------------------- 1 | import json 2 | from .util import Util 3 | import requests as req 4 | 5 | 6 | class Dataconnector(object): 7 | def __init__(self, info, user, url): 8 | if not isinstance(info, dict): 9 | raise Exception(str(info)) 10 | if info.get('error'): 11 | raise Exception(info['message_en']) 12 | self.__dict__.update(info) 13 | self.id = info['id'] 14 | self.name = info['dataconnectorName'] 15 | self.url = url if url else Util().url 16 | self.user = user 17 | self.status = info['status'] 18 | self.user_token = self.user.token 19 | 20 | def __repr__(self): 21 | return f"{str(self.id)}: {str(self.name)}" 22 | 23 | def delete(self): 24 | req.delete(f"{self.url}/dataconnector/{self.id}/",params={"token": self.user_token}) 25 | 26 | def get_magic_code(self, training_method, value_for_predict, file_path="output.ipynb"): 27 | 28 | if self.status != 100: 29 | raise ("The training data is being processed now. Please retry with dataconnector.get_magic_code() when the data is ready. When it is ready, dataconnector.status will return 100.") 30 | 31 | 32 | response = req.post(f"{self.url}/get-magic-code/", 33 | params={"token": self.user_token}, 34 | data=json.dumps({ 35 | 'dataconnector': self.id, 36 | 'trainingMethod': training_method, 37 | 'valueForPredict': value_for_predict, 38 | })) 39 | 40 | if file_path: 41 | with open(file_path, 'w') as output: 42 | text = response.json() 43 | if isinstance(text, dict): 44 | text = json.dumps(text) 45 | output.write(text) 46 | 47 | print(response.json()) 48 | -------------------------------------------------------------------------------- /ds2ai/LabelingAI.py: -------------------------------------------------------------------------------- 1 | from .util import Util, Asynctask, Instance 2 | import requests as req 3 | import json 4 | 5 | 6 | class Labelproject(object): 7 | def __init__(self, info, user, url=None): 8 | if not isinstance(info, dict): 9 | raise Exception(str(info)) 10 | if info.get('error'): 11 | raise Exception(info['message_en']) 12 | self.__dict__.update(info) 13 | self.id = info['id'] 14 | self.workapp = info['workapp'] 15 | self.url = url if url else Util().url 16 | self.user = user 17 | self.user_token = self.user.token 18 | labelclasses = [] 19 | if info.get("labelclasses"): 20 | for labelclass_raw in info.get("labelclasses"): 21 | labelclasses.append(Labelclass(labelclass_raw, user)) 22 | self.labelclasses = labelclasses 23 | 24 | def __repr__(self): 25 | return str(self.id) 26 | 27 | def delete(self): 28 | req.delete(f"{self.url}/labelprojects/{self.id}/",params={"token": self.user_token}) 29 | 30 | def get_labels(self): 31 | labels_raw = req.get(f"{self.url}/labels-by-labelproject/{self.id}/", params={"token": self.user_token}).json() 32 | labels = [] 33 | for label_raw in labels_raw: 34 | labels.append(Label(label_raw, self.user, url=self.url)) 35 | 36 | return labels 37 | 38 | def get_labelclasses(self): 39 | return self.labelclasses 40 | 41 | def get_labelfiles(self, sorting="created_at", tab="all", count=10, desc=False, searching="", workAssignee=None): 42 | labelfiles_raw = req.get(f"{self.url}/listobjects/", params={ 43 | "token": self.user_token, 44 | "labelprojectId": self.id, 45 | "sorting": sorting, 46 | "tab": tab, 47 | "count": count, 48 | "desc": desc, 49 | "searching": searching, 50 | "workAssignee": workAssignee, 51 | }).json() 52 | labelfiles = [] 53 | for label_raw in labelfiles_raw.get('file',[]): 54 | labelfiles.append(Labelfile(label_raw, self.user, self.labelclasses, url=self.url)) 55 | 56 | return labelfiles 57 | 58 | def create_labelclass(self, name, color="#000000"): 59 | for labelclass in self.labelclasses: 60 | if name == labelclass.name: 61 | raise("You can not create same label class name.") 62 | 63 | return Labelclass(req.post(f"{self.url}/labelclasses/", 64 | params={"token": self.user_token}, 65 | data=json.dumps({ 66 | 'name': name, 67 | 'labelproject': self.id, 68 | 'color': color 69 | })).json(), self.user, url=self.url) 70 | 71 | def create_labelfile(self, data_file): 72 | return req.post(f"{self.url}/add-object/", files={'files': open(data_file, "rb")}, data={ 73 | 'token': self.user_token, 74 | 'labelprojectId': self.id, 75 | 'frame_value': 0 76 | }, stream=True).json() 77 | 78 | def create_custom_ai(self, custom_ai_type="box", use_class_info={}, 79 | valueForPredictColumnId=None, trainingColumnInfo={}): 80 | """ 81 | 82 | `Create Custom Ai` 83 | 84 | :param item 85 | - **token**: str = user token 86 | 87 | :json item 88 | - **custom_ai_type**: str = labeling type. Example, "polygon" or "box" 89 | - **use_class_info**: dict = labelClass to use for Autolabeling 90 | - **labelproject_id**: int = target label project id 91 | 92 | \f 93 | """ 94 | 95 | return req.post(f"{self.url}/customai/", 96 | params={"token": self.user_token}, 97 | data=json.dumps({ 98 | 'custom_ai_type': custom_ai_type, 99 | 'use_class_info': use_class_info, 100 | 'valueForPredictColumnId': valueForPredictColumnId, 101 | 'trainingColumnInfo': trainingColumnInfo, 102 | 'labelproject_id': self.id, 103 | })).json() 104 | pass 105 | 106 | 107 | def autolabeling(self, amount, ai_type="general", autolabeling_type="box", general_ai_type="person", 108 | model_id=None, custom_ai_stage=0, preprocessing_ai_type={}, labeling_class=None ): 109 | """ 110 | `Create AutoLabeling` 111 | 112 | - **ai_type**: int = autolabelingAiType (custom or general or inference) 113 | - **autolabeling_type**: str = autolabeling type (For example, box or polygon) 114 | - **custom_ai_stage**: int = CustomAi Count 115 | - **general_ai_type**: str = None or generalAiType (For example person or road or animal or fire) 116 | - **preprocessing_ai_type**: dict = autolabeling preprocessingType. For example, {"faceblur": true} 117 | - **autolabeling_amount**: int = Number of images to autolabeling 118 | - **labeling_class**: List[str] = List of label classes 119 | 120 | \f 121 | """ 122 | 123 | return req.post(f"{self.url}/autolabeling/", 124 | params={"token": self.user_token}, 125 | data=json.dumps({ 126 | 'autolabeling_amount': amount, 127 | 'autolabeling_ai_type': ai_type, 128 | 'autolabeling_type': autolabeling_type, 129 | 'custom_ai_stage': custom_ai_stage, 130 | 'general_ai_type': general_ai_type, 131 | 'preprocessing_ai_type': preprocessing_ai_type, 132 | 'labeling_class': labeling_class, 133 | 'model_id': model_id, 134 | 'labelproject_id': self.id, 135 | })).json() 136 | pass 137 | 138 | def export(self, is_get_image=False): 139 | if self.workapp in ['object_detection', 'image']: 140 | return Asynctask(req.post(f"{self.url}/export-coco/{self.id}/", 141 | params={"token": self.user_token, 'is_get_image': is_get_image}, 142 | ).json(), self.user, url=self.url) 143 | else: 144 | return Asynctask(req.post(f"{self.url}/export-data/{self.id}/", 145 | params={"token": self.user_token}, 146 | ).json(), self.user, url=self.url) 147 | 148 | 149 | class Labelfile(object): 150 | def __init__(self, info, user, label_classes, url=None): 151 | if not isinstance(info, dict): 152 | raise Exception(str(info)) 153 | if info.get('error'): 154 | raise Exception(info['message_en']) 155 | self.__dict__.update(info) 156 | self.id = info['id'] 157 | self.file_name = info['originalFileName'] 158 | self.status = info['status'] 159 | self.file_url = info['s3key'] 160 | self.width = info['width'] 161 | self.height = info['height'] 162 | self.labelproject = info['labelproject'] 163 | self.label_classes = label_classes 164 | self.url = url if url else Util().url 165 | self.user = user 166 | self.user_token = self.user.token 167 | 168 | def __repr__(self): 169 | return str(self.file_name) 170 | 171 | def download(self, file_path=""): 172 | 173 | if not file_path: 174 | file_path = self.file_name 175 | 176 | response = req.get(self.file_url) 177 | with open(file_path, 'wb') as output: 178 | output.write(response.content) 179 | 180 | return file_path 181 | 182 | def set_done(self, workAssignee=None): 183 | self.status = "done" 184 | return self.set_status("done", workAssignee=workAssignee) 185 | 186 | def set_status(self, status, workAssignee=None): 187 | self.status = status 188 | return req.put(f"{self.url}/sthreefiles/{self.id}/", 189 | params={"token": self.user_token}, 190 | data=json.dumps({ 191 | 'status': status, 192 | 'workAssignee': workAssignee, 193 | })).json() 194 | 195 | def create_label(self, label, class_name=None, color=None, box=[], polygon=[], structuredData=None): 196 | 197 | if isinstance(label, Instance): 198 | return self.create_labels([label]) 199 | return self.create_labels([{ 200 | 'labeltype': label, 201 | 'class_name': class_name, 202 | 'sthreefile': self.id, 203 | 'color': color, 204 | 'box': box, 205 | 'polygon': polygon, 206 | 'structuredData': structuredData, 207 | 'labelproject': self.id, 208 | 'highlighted': False, 209 | 'locked': True, 210 | 'status': "done", 211 | }])[0] 212 | 213 | def create_labels(self, labels_raw): 214 | labels = [] 215 | for label_raw in labels_raw: 216 | if isinstance(label_raw, Instance): 217 | label_raw = label_raw.__dict__ 218 | 219 | label_class_id = None 220 | label_class_name = label_raw.get("class_name") 221 | for label_class in self.label_classes: 222 | if label_class.name == label_class_name: 223 | label_class_id = label_class.id 224 | 225 | if not label_class_id: 226 | raise Exception("You need to create a label class first.") 227 | 228 | processed_points = [] 229 | for point in label_raw.get("polygon", []): 230 | label_raw['labeltype'] = "polygon" 231 | processed_points.append([point[0] / self.width, point[1] / self.height]) 232 | 233 | if label_raw.get("box", []): 234 | label_raw['labeltype'] = "box" 235 | label_raw['x'] = round(label_raw['box'][0] / self.width, 8) 236 | label_raw['w'] = round((label_raw['box'][2] - label_raw['box'][0]) / self.width, 8) 237 | label_raw['y'] = round(label_raw['box'][1] / self.height, 8) 238 | label_raw['h'] = round((label_raw['box'][3] - label_raw['box'][1]) / self.height, 8) 239 | 240 | label_raw.update({ 241 | 'labelproject': self.labelproject, 242 | 'labelclass': label_class_id, 243 | 'sthreefile': self.id, 244 | 'points': processed_points, 245 | 'highlighted': False, 246 | 'locked': True, 247 | 'status': "done" 248 | }) 249 | labels.append(label_raw) 250 | 251 | response = req.post(f"{self.url}/labels/", 252 | params={"token": self.user_token, "info": True}, 253 | data=json.dumps(labels)).json() 254 | 255 | if response.get("result", "") == "success": 256 | self.set_done() 257 | 258 | return response 259 | 260 | def delete(self): 261 | return req.delete(f"{self.url}/sthreefiles/", 262 | params={"token": self.user_token}, 263 | data={"sthreefilesId": [str(self.id)]}) 264 | 265 | class Label(object): 266 | def __init__(self, info, user, url=None): 267 | if not isinstance(info, dict): 268 | raise Exception(str(info)) 269 | if info.get('error'): 270 | raise Exception(info['message_en']) 271 | self.__dict__.update(info) 272 | self.id = info['id'] 273 | self.labeltype = info['labeltype'] 274 | self.url = url if url else Util().url 275 | self.user = user 276 | self.user_token = self.user.token 277 | 278 | def __repr__(self): 279 | return str(f"{self.labeltype}: {self.id}") 280 | 281 | def delete(self): 282 | req.delete(f"{self.url}/labels/{self.id}/",params={"token": self.user_token}) 283 | 284 | 285 | class Labelclass(object): 286 | def __init__(self, info, user, url=None): 287 | if not isinstance(info, dict): 288 | raise Exception(str(info)) 289 | if info.get('error'): 290 | raise Exception(info['message_en']) 291 | self.__dict__.update(info) 292 | self.id = info['id'] 293 | self.labelproject = info['labelproject'] 294 | self.url = url if url else Util().url 295 | self.user = user 296 | self.name = info['name'] 297 | self.user_token = self.user.token 298 | 299 | def __repr__(self): 300 | return str(self.name) 301 | 302 | def modify(self, name=None, color=None): 303 | return Labelclass(req.put(f"{self.url}/labelclasses/{self.id}/", 304 | params={"token": self.user_token}, 305 | data=json.dumps({ 306 | 'name': name, 307 | 'color': color, 308 | })).json(), self.user, url=self.url) 309 | 310 | def delete(self): 311 | req.delete(f"{self.url}/labelclasses/{self.id}/",params={"token": self.user_token}) 312 | -------------------------------------------------------------------------------- /ds2ai/SkyhubAI.py: -------------------------------------------------------------------------------- 1 | from .DS2dataset import Dataconnector 2 | from .util import Util 3 | import requests as req 4 | import json 5 | 6 | class Opsproject(object): 7 | utilClass = Util() 8 | 9 | def __init__(self, info, user, url=None): 10 | if not isinstance(info, dict): 11 | raise Exception(str(info)) 12 | if info.get('error'): 13 | raise Exception(info['message_en']) 14 | self.__dict__.update(info) 15 | self.id = info['id'] 16 | self.url = url if url else Util().url 17 | if info.get('model'): 18 | self.opsModel = Opsmodel(info['model'], user) 19 | self.token = self.opsModel.token 20 | if info.get('trainingMethod'): 21 | self.app_url = f"https://ds2.ai/service_app.html/?modeltoken={self.token}&modelid={self.id}" 22 | else: 23 | self.app_url = None 24 | self.ops_server_groups = [] 25 | if info.get('opsServerGroupsInfo'): 26 | for ops_server_group in info.get('opsServerGroupsInfo', []): 27 | self.ops_server_groups.append(Opsservergroup(ops_server_group, user)) 28 | self.user = user 29 | self.status = info['status'] 30 | self.dataconnectorsList = info['dataconnectorsList'] 31 | self.user_token = self.user.token 32 | if isinstance(self.dataconnectorsList[0], int): 33 | self.main_dataconnector = self.get_dataconnector(self.dataconnectorsList[0]) 34 | else: 35 | self.main_dataconnector = Dataconnector(self.dataconnectorsList[0], self.user, url=self.url) 36 | 37 | def __repr__(self): 38 | return str(self.id) 39 | 40 | def get_app_url(self): 41 | if self.app_url: 42 | return self.app_url 43 | else: 44 | print("Currently we don't support the app url for the loaded model.") 45 | 46 | def delete(self): 47 | return req.delete(f"{self.url}/opsprojects/{self.id}/",params={"token": self.user_token}) 48 | 49 | def get_server_status(self): 50 | return req.get(f"{self.url}/ops-servers-status/", 51 | params={"token": self.user_token, "opsProjectId": self.id}).json() 52 | 53 | def refresh(self): 54 | return Opsproject(req.get(f"{self.url}/opsprojects/{self.id}/", 55 | params={"token": self.user_token}).json(), self.user, url=self.url) 56 | 57 | def predict(self, data, return_type="info"): 58 | 59 | if self.status != 100: 60 | raise("This model is not ready yet.") 61 | 62 | request_data = { 63 | "apptoken": self.user.appTokenCode, 64 | "modelid": self.model['id'], 65 | "modeltoken": self.token, 66 | "userId": self.user.id, 67 | } 68 | predict_url = { 69 | "predict": f"{self.url}/inference/inferenceops{self.id}/", 70 | "predictimagebyurl": f"{self.url}/inferenceimagebyurl/inferenceops{self.id}/", 71 | "predictimagebyurlxai": f"{self.url}/inferenceimagebyurlxai/inferenceops{self.id}/", 72 | "predictimagebyurlinfo": f"{self.url}/inferenceimagebyurlinfo/inferenceops{self.id}/", 73 | "predictall": f"{self.url}/inferenceall/inferenceops{self.id}/", 74 | "predictimage": f"{self.url}/inferenceimage/inferenceops{self.id}/", 75 | "predictimagexai": f"{self.url}/inferenceimagexai/inferenceops{self.id}/", 76 | "predictimageinfo": f"{self.url}/inferenceimageinfo/inferenceops{self.id}/", 77 | } 78 | 79 | if type(data) == dict: 80 | data_processed = {} 81 | for key, item in data.items(): 82 | if f"__{self.main_dataconnector.dataconnectorName}" not in key: 83 | data_processed[f"{key}__{self.main_dataconnector.dataconnectorName}"] = item 84 | else: 85 | data_processed[key] = item 86 | data = data_processed 87 | 88 | return self.utilClass.predict(request_data, predict_url, data, return_type=return_type) 89 | 90 | def get_dataconnector(self, dataconnector_id): 91 | return Dataconnector(req.get(f"{self.url}/dataconnector/{dataconnector_id}/", 92 | params={"token": self.user_token}).json(), self.user, url=self.url) 93 | 94 | class Opsmodel(object): 95 | utilClass = Util() 96 | 97 | def __init__(self, info, user, url=None): 98 | if not isinstance(info, dict): 99 | raise Exception(str(info)) 100 | if info.get('error'): 101 | raise Exception(info['message_en']) 102 | self.__dict__.update(info) 103 | self.id = info['id'] 104 | self.token = info['token'] 105 | self.url = url if url else Util().url 106 | self.user = user 107 | self.user_token = self.user.token 108 | 109 | def __repr__(self): 110 | return str(self.id) 111 | 112 | class Opsservergroup(object): 113 | def __init__(self, info, user, url=None): 114 | if not isinstance(info, dict): 115 | raise Exception(str(info)) 116 | if info.get('error'): 117 | raise Exception(info['message_en']) 118 | self.__dict__.update(info) 119 | self.id = info['id'] 120 | self.url = url if url else Util().url 121 | self.user = user 122 | self.user_token = self.user.token 123 | 124 | def __repr__(self): 125 | return str(self.id) 126 | 127 | def edit_autoscaling(self, min_server_size=None, max_server_size=None, start_server_size=None): 128 | data = {} 129 | if min_server_size: 130 | data['minServerSize'] = min_server_size 131 | if max_server_size: 132 | data['maxServerSize'] = max_server_size 133 | if start_server_size: 134 | data['startServerSize'] = start_server_size 135 | 136 | return req.put(f"{self.url}/opsservergroups/{self.id}/", 137 | params={"token": self.user_token}, 138 | data = json.dumps(data) 139 | ).json() 140 | 141 | def stop(self): 142 | return req.put(f"{self.url}/opsservergroups/{self.id}/", 143 | params={"token": self.user_token}, 144 | data=json.dumps({ 145 | 'minServerSize': 0, 146 | 'maxServerSize': 0, 147 | 'startServerSize': 0, 148 | }) 149 | ).json() 150 | 151 | def resume(self): 152 | return req.put(f"{self.url}/opsservergroups/{self.id}/", 153 | params={"token": self.user_token}, 154 | data=json.dumps({ 155 | 'minServerSize': 1, 156 | 'maxServerSize': 1, 157 | 'startServerSize': 1, 158 | }) 159 | ).json() 160 | 161 | def delete(self): 162 | return req.delete(f"{self.url}/opsservergroups/{self.id}/",params={"token": self.user_token}) 163 | -------------------------------------------------------------------------------- /ds2ai/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The DS2AI Authors. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | __version__ = "1.0.0" 16 | 17 | import time 18 | from .util import Util, Asynctask, User, MarketModel 19 | from .LabelingAI import Labelproject, Labelfile 20 | from .ClickAI import Project, Model, Jupyterproject 21 | from .SkyhubAI import Opsproject 22 | from .DS2dataset import Dataconnector 23 | import requests as req 24 | import json 25 | 26 | class DS2(): 27 | 28 | def __init__(self, apptoken, url=None): 29 | self.utilClass = Util() 30 | self.url = url if url else self.utilClass.url 31 | self.apptoken = apptoken 32 | self.user = self.get_user_info() 33 | self.user_token = self.user.token 34 | 35 | def get_user_info(self): 36 | return User(req.get(f"{self.url}/auth/", params={"apptoken": self.apptoken}).json()) 37 | 38 | def create_project(self, data_file, predict_column_name=None, frame=None, training_method=None): 39 | 40 | dataconnector = self.create_dataconnector(data_file, predict_column_name=predict_column_name, frame=frame) 41 | return self.create_project_by_dataconnector(dataconnector, training_method) 42 | 43 | def create_project_by_dataconnector(self, dataconnector, training_method): 44 | 45 | if isinstance(dataconnector, int): 46 | dataconnector_id = dataconnector 47 | else: 48 | dataconnector_id = dataconnector.id 49 | 50 | return Project(req.post(f"{self.url}/projectfromdataconnectors/", 51 | params={"token": self.user_token}, 52 | data=json.dumps({ 53 | 'dataconnectors': [dataconnector_id], 54 | 'trainingMethod': training_method 55 | })).json(), self.user, url=self.url) 56 | 57 | def load_model(self, model_file): 58 | 59 | with open(model_file, "rb") as f: 60 | file_content = f.read() 61 | return Project(req.post(f"{self.url}/projectswithmodelfile/", 62 | params={"token": self.user_token}, files={'file': file_content}, 63 | data={'filename': model_file.split("/")[-1] if "/" in model_file else model_file}, 64 | stream=True).json(), self.user, url=self.url) 65 | 66 | def get_projects(self, count=25, start=1, desc=True): 67 | items = [] 68 | items_raw = req.get(f"{self.url}/projects/", 69 | params={"token": self.user_token, "start": start, "page": count, "desc": desc}).json()['projects'] 70 | for item_raw in items_raw: 71 | items.append(Project(item_raw, self.user, url=self.url)) 72 | return items 73 | 74 | def get_project(self, project_id): 75 | return Project(req.get(f"{self.url}/projects/{project_id}/", params={"token": self.user_token}).json(), self.user, url=self.url) 76 | 77 | def get_model(self, model_id): 78 | return Model(req.get(f"{self.url}/models/{model_id}/", params={"token": self.user_token}).json(), self.user, url=self.url) 79 | 80 | def get_quick_models(self, count=25, start=1, desc=True): 81 | items = [] 82 | items_raw = req.get(f"{self.url}/market-models/", 83 | params={"token": self.user_token, "start": start, "page": count, "desc": desc, "is_quick_model":True} 84 | ).json()['market_models'] 85 | for item_raw in items_raw: 86 | items.append(MarketModel(item_raw, self.user)) 87 | return items 88 | 89 | def get_quick_model_by_slug_name(self, slug_name): 90 | return MarketModel(req.get(f"{self.url}/marketmodels/slug/{slug_name}/", 91 | params={"token": self.user_token}).json(), self.user, url=self.url) 92 | 93 | def create_dataconnector(self, data_file, has_label_data=False, predict_column_name=None, frame=60): 94 | with open(data_file, "rb") as f: 95 | file_content = f.read() 96 | return Dataconnector(req.post(f"{self.url}/dataconnectorswithfile/", 97 | files={'file': file_content}, 98 | data={'token': self.user_token, 99 | 'filename': data_file.split("/")[-1] if "/" in data_file else data_file, 100 | 'dataconnectorName': data_file.split("/")[-1] if "/" in data_file else data_file, 101 | 'hasLabelData': has_label_data, 102 | 'predictColumnName': predict_column_name, 103 | 'frameValue': frame, 104 | }, stream=True).json(), self.user, url=self.url) 105 | 106 | def get_dataconnectors(self, count=25, start=1, desc=True): 107 | items = [] 108 | items_raw = req.get(f"{self.url}/dataconnector/", 109 | params={"token": self.user_token, "start": start, "page": count, "desc": desc}).json()['dataconnectors'] 110 | for item_raw in items_raw: 111 | items.append(Dataconnector(item_raw, self.user, url=self.url)) 112 | return items 113 | 114 | def get_dataconnector(self, dataconnector_id): 115 | return Dataconnector(req.get(f"{self.url}/dataconnector/{dataconnector_id}/", 116 | params={"token": self.user_token}).json(), self.user, url=self.url) 117 | 118 | def create_labelproject(self, data_file=None, dataconnector=None, dataconnectors=None, 119 | training_method=None, name=None, frame=60): 120 | 121 | if dataconnector: 122 | if isinstance(dataconnector, int): 123 | dataconnector = self.get_dataconnector(dataconnector) 124 | dataconnectors = [dataconnector.id] 125 | 126 | if not training_method: 127 | if dataconnector.dataconnectorName.endswith('.csv'): 128 | training_method = "normal_regression" 129 | else: 130 | training_method = "object_detection" 131 | 132 | if dataconnectors: 133 | 134 | if not name: 135 | name = f"Label Project from dataconnectors : {str(dataconnectors)}" 136 | 137 | return Labelproject(req.post(f"{self.url}/labelproject-from-dataconnectors/", 138 | params={"token": self.user_token}, 139 | data=json.dumps({'dataconnectors': dataconnectors, 140 | 'workapp': training_method, 141 | 'name': name, 142 | 'frame_value': frame, 143 | })).json(), self.user, url=self.url) 144 | 145 | elif data_file: 146 | if not name: 147 | name = f"Label Project from file : {str(data_file.split('/')[-1] if '/' in data_file else data_file)}" 148 | 149 | file_content = open(data_file, "r") 150 | return Labelproject(req.post(f"{self.url}/labelprojects/", 151 | params={"token": self.user_token}, 152 | files={'files': file_content}, 153 | data={ 154 | 'workapp': training_method, 155 | 'frame_value': frame, 156 | 'name': name, 157 | }, stream=True).json()['labelproject'], self.user, url=self.url) 158 | 159 | 160 | else: 161 | raise("You need to choose dataconnectors or files.") 162 | 163 | def get_labelprojects(self, count=25, start=1, desc=True): 164 | items = [] 165 | items_raw = req.get(f"{self.url}/labelprojects/", 166 | params={"token": self.user_token, "start": start, "page": count, "desc": desc}).json()['projects'] 167 | for item_raw in items_raw: 168 | items.append(Labelproject(item_raw, self.user, url=self.url)) 169 | return items 170 | 171 | def get_labelproject(self, labelproject_id): 172 | return Labelproject(req.get(f"{self.url}/labelprojects/{labelproject_id}/", 173 | params={"token": self.user_token}).json(), self.user, url=self.url) 174 | 175 | def get_opsprojects(self, count=25, start=1, desc=True): 176 | items = [] 177 | items_raw = req.get(f"{self.url}/opsprojects/", 178 | params={"token": self.user_token, "start": start, "page": count, "desc": desc}).json()['projects'] 179 | for item_raw in items_raw: 180 | items.append(Opsproject(item_raw, self.user, url=self.url)) 181 | return items 182 | 183 | def get_opsproject(self, opsproject_id): 184 | return Opsproject(req.get(f"{self.url}/opsprojects/{opsproject_id}/", 185 | params={"token": self.user_token}).json(), self.user, url=self.url) 186 | 187 | def get_jupyterprojects(self, count=25, start=1, desc=True): 188 | items = [] 189 | items_raw = req.get(f"{self.url}/jupyterprojects/", 190 | params={"token": self.user_token, "start": start, "page": count, "desc": desc}).json()['projects'] 191 | for item_raw in items_raw: 192 | items.append(Jupyterproject(item_raw, self.user, url=self.url)) 193 | return items 194 | 195 | def get_jupyterproject(self, jupyterproject_id): 196 | return Jupyterproject(req.get(f"{self.url}/jupyterprojects/{jupyterproject_id}/", 197 | params={"token": self.user_token}).json(), self.user, url=self.url) 198 | 199 | def get_asynctasks(self, count=25, start=1, desc=True, tasktype="all"): 200 | items = [] 201 | items_raw = req.get(f"{self.url}/asynctaskall/", 202 | params={"token": self.user_token, "start": start, "page": count, "desc": desc, 203 | "tasktype": tasktype}).json() 204 | for item_raw in items_raw['asynctasks']: 205 | items.append(Asynctask(item_raw, self.user, url=self.url)) 206 | return items 207 | 208 | def get_asynctask(self, asynctask_id): 209 | return Asynctask(req.get(f"{self.url}/asynctasks/{asynctask_id}/", 210 | params={"token": self.user_token}).json(), self.user, url=self.url) 211 | 212 | def start_auto_labeling(self, data_file, amount, has_label_data=False, predict_column_name=None, frame=60, 213 | ai_type="general", autolabeling_type="box", general_ai_type="person", 214 | model_id=None, custom_ai_stage=0, preprocessing_ai_type={}, labeling_class=[], 215 | training_method="object_detection", name='', description='' 216 | ): 217 | 218 | dataconnector = self.create_dataconnector(data_file, has_label_data=has_label_data, predict_column_name=predict_column_name) 219 | print("The data is being processed now. It will take a while. (Mostly less than 5 minutes.)") 220 | is_uploaded = False 221 | for i in range(100): 222 | time.sleep(5) 223 | dataconnector = self.get_dataconnector(dataconnector.id) 224 | if dataconnector.status == 100: 225 | is_uploaded = True 226 | break 227 | 228 | if not is_uploaded: 229 | raise ("The training data is being processed now. Please retry with ds2.train() when the data is ready. When it is ready, dataconnector.status will return 100.") 230 | 231 | if not name: 232 | name = f"label project from dataconnector {dataconnector.id}" 233 | 234 | return Asynctask(req.post(f"{self.url}/start-auto-labeling/", 235 | params={"token": self.user_token}, 236 | data=json.dumps({ 237 | 'dataconnectors': [dataconnector.id], 238 | 'predictColumnName': predict_column_name, 239 | 'frameValue': frame, 240 | 'workapp': training_method, 241 | 'autolabeling_amount': amount, 242 | 'autolabeling_ai_type': ai_type, 243 | 'autolabeling_type': autolabeling_type, 244 | 'general_ai_type': general_ai_type, 245 | 'model_id': model_id, 246 | 'custom_ai_stage': custom_ai_stage, 247 | 'preprocessing_ai_type': preprocessing_ai_type, 248 | 'labeling_class': labeling_class, 249 | 'name': name, 250 | 'description': description, 251 | })).json(), self.user, url=self.url) 252 | 253 | 254 | def train(self, data_file, training_method, value_for_predict, option="accuracy", frame=60, 255 | hyper_params={}, algorithm=None, **kwargs): 256 | 257 | dataconnector = self.create_dataconnector(data_file, has_label_data=True, predict_column_name=value_for_predict) 258 | print("The data is being processed now. It will take a while. (Mostly less than 5 minutes.)") 259 | is_uploaded = False 260 | for i in range(100): 261 | time.sleep(5) 262 | dataconnector = self.get_dataconnector(dataconnector.id) 263 | if dataconnector.status == 100: 264 | is_uploaded = True 265 | break 266 | 267 | if not is_uploaded: 268 | raise ("The training data is being processed now. Please retry with ds2.train() when the data is ready. When it is ready, dataconnector.status will return 100.") 269 | data = { 270 | 'trainingMethod': training_method, 271 | 'valueForPredict': value_for_predict, 272 | 'dataconnector': dataconnector.id, 273 | 'option': option, 274 | 'frameValue': frame, 275 | 'hyper_params': hyper_params, 276 | 'algorithm': algorithm, 277 | } 278 | data.update(kwargs) 279 | return Project(req.post(f"{self.url}/train-from-data/", 280 | params={"token": self.user_token}, 281 | data=json.dumps(data)).json(), self.user, url=self.url) 282 | 283 | def deploy(self, model_file, name=None, cloud_type="AWS", region="us-west-1", server_type="g4dn.xlarge"): 284 | 285 | if "AWS" != cloud_type: 286 | raise("Currently we support only AWS cloud for this SDK.") 287 | 288 | if not name: 289 | name = f"Ops project {str(round(time.time() * 10000000))}" 290 | 291 | with open(model_file, "rb") as f: 292 | file_content = f.read() 293 | return Opsproject(req.post(f"{self.url}/deploy-model-file/", 294 | params={"token": self.user_token}, 295 | files={'file': file_content}, 296 | data={'token': self.user_token, 297 | 'filename': model_file.split("/")[-1] if "/" in model_file else model_file, 298 | 'projectName': name, 299 | 'serverType': server_type, 300 | 'region': region, 301 | }, stream=True).json(), self.user, url=self.url) 302 | 303 | def get_magic_code(self, training_method, data_file, value_for_predict): 304 | 305 | dataconnector = self.create_dataconnector(data_file, has_label_data=True, predict_column_name=value_for_predict) 306 | print("The data is being processed now. It will take a while. (Mostly less than 5 minutes.)") 307 | is_uploaded = False 308 | for i in range(100): 309 | time.sleep(5) 310 | dataconnector = self.get_dataconnector(dataconnector.id) 311 | if dataconnector.status == 100: 312 | is_uploaded = True 313 | break 314 | 315 | if not is_uploaded: 316 | raise ("The training data is being processed now. Please retry with ds2.get_magic_code() when the data is ready. When it is ready, dataconnector.status will return 100.") 317 | 318 | return dataconnector.get_magic_code(training_method, value_for_predict) 319 | 320 | 321 | 322 | 323 | def rent_custom_training_server(self, cloud_type="AWS", region="us-west-1", server_type="g4dn.xlarge", 324 | name=None): 325 | if not name: 326 | name = f"Jupyter project {str(round(time.time() * 10000000))}" 327 | 328 | if "AWS" != cloud_type: 329 | raise("Currently we support only AWS cloud for this SDK.") 330 | 331 | return Jupyterproject(req.post(f"{self.url}/jupyterprojects/", 332 | params={"token": self.user_token}, 333 | data=json.dumps({ 334 | 'projectName': name, 335 | 'region': region, 336 | 'serverType': server_type, 337 | })).json(), self.user, url=self.url) 338 | 339 | def predict(self, data, model_id=None, quick_model_name="", return_type="info"): 340 | 341 | if isinstance(data, Labelfile): 342 | data = data.download() 343 | 344 | if not model_id and not quick_model_name: 345 | raise("You need to choose model id or quick model name.") 346 | 347 | if model_id: 348 | model = self.get_model(model_id) 349 | 350 | if model.status != 100: 351 | raise("This model is not ready yet.") 352 | else: 353 | model = self.get_quick_model_by_slug_name(quick_model_name) 354 | 355 | return model.predict(data, return_type=return_type) 356 | 357 | def get_server_lists(self): 358 | return req.get(f"{self.url}/server-pricing/", params={"token": self.user_token}).json() 359 | 360 | -------------------------------------------------------------------------------- /ds2ai/util.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests as req 3 | 4 | class Util(): 5 | url = "http://0.0.0.0" 6 | 7 | def predict(self, request_data, predict_url, data, return_type="info"): 8 | 9 | if type(data) == dict: 10 | request_data["parameter"] = data 11 | response = {} 12 | response_raw = json.loads(self._predict(predict_url['predict'], request_data).json()) 13 | if response_raw.get('이상값칼럼'): 14 | response['outliar'] = response_raw['이상값칼럼'] 15 | for key, item in response_raw.items(): 16 | if key.endswith('__예측값'): 17 | response[key.replace('__예측값', '__predicted')] = item 18 | return response 19 | else: 20 | if data.startswith(('http://', 'https://')): 21 | request_data['url'] = data 22 | if return_type == "info": 23 | inference_results_raw = self._predict(predict_url['predictimagebyurlinfo'], request_data).json() 24 | inference_results = [] 25 | boxes = inference_results_raw.get("boxes", []) 26 | masks = inference_results_raw.get("masks", []) 27 | for index, inference_result_raw in enumerate(inference_results_raw.get('prediction', [])): 28 | inference_result = { 29 | "class_name": inference_result_raw, 30 | "score": inference_result_raw, 31 | } 32 | if boxes: 33 | inference_result['box'] = boxes[index] 34 | if masks: 35 | inference_result['mask'] = masks[index] 36 | inference_results.append(Instance(inference_result)) 37 | 38 | return inference_results 39 | elif return_type == "xai": 40 | return self._predict(predict_url['predictimagebyurlxai'], request_data).content 41 | else: 42 | return self._predict(predict_url['predictimagebyurl'], request_data).content 43 | else: 44 | with open(data, "rb") as f: 45 | file_content = f.read() 46 | request_data["filename"] = data.split('/')[-1] if '/' in data else data 47 | if data.endswith('.csv'): 48 | return self._predict(predict_url['predictall'], request_data, 49 | file_content=file_content) 50 | if data.endswith((".jpg", ".jpeg", ".png", "gif", ".mp4", ".mov")): 51 | if return_type == "info": 52 | inference_results_raw = self._predict(predict_url['predictimageinfo'], request_data, 53 | file_content=file_content).json() 54 | inference_results = [] 55 | boxes = inference_results_raw.get("boxes", []) 56 | polygons = inference_results_raw.get("polygons", []) 57 | for index, inference_result_raw in enumerate(inference_results_raw.get('prediction', [])): 58 | inference_result = { 59 | "class_name": inference_result_raw, 60 | "score": inference_results_raw['scores'][index], 61 | } 62 | if boxes: 63 | inference_result['box'] = boxes[index] 64 | if polygons: 65 | inference_result['polygon'] = polygons[index] 66 | inference_results.append(Instance(inference_result)) 67 | 68 | return inference_results 69 | 70 | elif return_type == "xai": 71 | return self._predict(predict_url['predictimagexai'], request_data, 72 | file_content=file_content).content 73 | else: 74 | return self._predict(predict_url['predictimage'], request_data, 75 | file_content=file_content).content 76 | 77 | def _predict(self, target_url, request_data, file_content=None): 78 | if file_content: 79 | return req.post(target_url, files={'file': file_content}, data=request_data, stream=True) 80 | else: 81 | return req.post(target_url, data=json.dumps(request_data)) 82 | 83 | class Asynctask(object): 84 | def __init__(self, info, user, url=None): 85 | if not isinstance(info, dict): 86 | raise Exception(str(info)) 87 | if info.get('error'): 88 | raise Exception(info['message_en']) 89 | self.__dict__.update(info) 90 | self.id = info['id'] 91 | self.url = url if url else Util().url 92 | self.user = user 93 | self.user_token = self.user.token 94 | 95 | def __repr__(self): 96 | return str(self.id) 97 | 98 | def refresh(self): 99 | return Asynctask(req.get(f"{self.url}/asynctasks/{self.id}/", 100 | params={"token": self.user_token}).json(), self.user, url=self.url) 101 | 102 | 103 | class MarketModel(object): 104 | utilClass = Util() 105 | 106 | def __init__(self, info, user, url=None): 107 | if not isinstance(info, dict): 108 | raise Exception(str(info)) 109 | if info.get('error'): 110 | raise Exception(info['message_en']) 111 | self.__dict__.update(info) 112 | self.id = info['id'] 113 | self.name = info['name_en'] 114 | self.url = url if url else Util().url 115 | self.user = user 116 | self.user_token = self.user.token 117 | 118 | def __repr__(self): 119 | return str(self.name) 120 | 121 | def predict(self, data, return_type="info"): 122 | request_data = { 123 | "apptoken": self.user.appTokenCode, 124 | "modelid": self.id, 125 | "userId": self.user.id, 126 | } 127 | 128 | predict_url = { 129 | "predict": f"{self.url}/market/predict/", 130 | "predictimagebyurl": f"{self.url}/market/predictimagebyurl/", 131 | "predictimagebyurlxai": f"{self.url}/market/predictimagebyurlxai/", 132 | "predictimagebyurlinfo": f"{self.url}/market/predictimagebyurlinfo/", 133 | "predictall": f"{self.url}/market/predictall/", 134 | "predictimage": f"{self.url}/market/predictimage/", 135 | "predictimagexai": f"{self.url}/market/predictimagexai/", 136 | "predictimageinfo": f"{self.url}/market/predictimageinfo/", 137 | } 138 | 139 | return self.utilClass.predict(request_data, predict_url, data, return_type=return_type) 140 | 141 | class User(object): 142 | def __init__(self, info): 143 | if not isinstance(info, dict): 144 | raise Exception(str(info)) 145 | if info.get('error'): 146 | raise Exception(info['message_en']) 147 | self.token = info['token'] 148 | self.__dict__.update(info) 149 | 150 | class Instance(object): 151 | 152 | def __init__(self, info): 153 | if not isinstance(info, dict): 154 | raise Exception(str(info)) 155 | if info.get('error'): 156 | raise Exception(info['message_en']) 157 | self.__dict__.update(info) 158 | self.class_name = info['class_name'] 159 | 160 | def __repr__(self): 161 | return str(self.class_name) 162 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one 5 | # or more contributor license agreements. See the NOTICE file 6 | # distributed with this work for additional information 7 | # regarding copyright ownership. The ASF licenses this file 8 | # to you under the Apache License, Version 2.0 (the 9 | # "License"); you may not use this file except in compliance 10 | # with the License. You may obtain a copy of the License at 11 | # 12 | # http://www.apache.org/licenses/LICENSE-2.0 13 | # 14 | # Unless required by applicable law or agreed to in writing, 15 | # software distributed under the License is distributed on an 16 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 17 | # KIND, either express or implied. See the License for the 18 | # specific language governing permissions and limitations 19 | # under the License. 20 | # 21 | 22 | from setuptools import setup 23 | setup(name="ds2ai", 24 | version='1.1.0', 25 | description="MLOps Platform - DS2.AI Python SDK", 26 | packages=["ds2ai"], 27 | author="DSLAB GLOBAL, INC", 28 | author_email="contact@dslab.global", 29 | license='Apache License 2.0', 30 | zip_safe=False) 31 | 32 | --------------------------------------------------------------------------------