├── requirements-test.txt ├── requirements.txt ├── ext └── NVIDIA_horo_white.png ├── .gitignore ├── .gitattributes ├── nvidia_clara ├── grpc │ ├── __init__.py │ ├── metrics_pb2_grpc.py │ ├── clara_pb2_grpc.py │ ├── pipelines_pb2_grpc.py │ ├── jobs_pb2_grpc.py │ ├── payloads_pb2_grpc.py │ ├── common_pb2.py │ ├── common_pb2_grpc.py │ ├── metrics_pb2.py │ └── models_pb2_grpc.py ├── constants.py ├── __init__.py ├── base_client.py ├── payload_types.py ├── pipeline_types.py ├── clara_client.py ├── clara_types.py ├── model_types.py └── job_types.py ├── .github └── workflows │ └── build.yml ├── examples ├── clara_client_example.py ├── pipelines_client_example.py ├── jobs_client_example.py ├── payloads_client_example.py └── combined_example.py ├── setup.py ├── README.md ├── CLA.md ├── tests ├── test_client_tools.py ├── test_pipelines_client.py ├── test_payloads_client.py └── test_jobs_client.py └── LICENSE /requirements-test.txt: -------------------------------------------------------------------------------- 1 | grpcio-testing 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | grpcio 2 | protobuf 3 | -------------------------------------------------------------------------------- /ext/NVIDIA_horo_white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA/clara-platform-python-client/main/ext/NVIDIA_horo_white.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .vs/ 3 | .vscode/ 4 | .pytest_cache/ 5 | **/__pycache__/ 6 | build/ 7 | dist/ 8 | *.egg-info/ 9 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * -text 2 | .gitattributes eol=lf 3 | .gitignore eol=lf 4 | *.[Pp][Yy] eol=lf 5 | *.[Mm][Dd] eol=lf 6 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python 3.8 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: 3.8 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install -r requirements.txt 24 | - name: Install Pytest and other Packages 25 | run: | 26 | python -m pip install pytest 27 | python -m pip install -r requirements-test.txt 28 | - name: Run Pytests 29 | run: | 30 | export PYTHONPATH=$(pwd):$PYTHONPATH 31 | python -m pytest -vv . 32 | -------------------------------------------------------------------------------- /nvidia_clara/constants.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | ClaraVersionMajor = 0 17 | ClaraVersionMinor = 6 18 | ClaraVersionPatch = 0 19 | GrpcChunkSizeDefault = 1024 * 1024 20 | GrpcChunkSizeMaximum = 4 * 1024 * 1024 - 512 21 | GrpcChunkSizeMinimum = 1024 22 | GrpcChunkSizeName = "GRPC_CHUNK_SIZE" 23 | GrpcParallelStreamsDefault = 8 24 | GrpcParallelStreamsMaximum = 64 25 | GrpcParallelStreamsMinimum = 1 26 | GrpcParallelStreamsName = "GRPC_PARALLEL_STREAMS" 27 | GrpcChannelProviderUnavailable = "GRPC Channel provider is unavailable." 28 | GrpcClientProviderUnavailable = "GRPC client provider is unavailable." 29 | -------------------------------------------------------------------------------- /nvidia_clara/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from nvidia_clara.jobs_client import JobsClient 16 | from nvidia_clara.pipelines_client import PipelinesClient 17 | from nvidia_clara.payloads_client import PayloadsClient 18 | from nvidia_clara.models_client import ModelsClient 19 | from nvidia_clara.base_client import BaseClient 20 | from nvidia_clara.clara_client import ClaraClient 21 | import nvidia_clara.pipeline_types as PipelineTypes 22 | import nvidia_clara.job_types as JobTypes 23 | import nvidia_clara.payload_types as PayloadTypes 24 | import nvidia_clara.model_types as ModelTypes 25 | import nvidia_clara.model_types as ClaraTypes 26 | -------------------------------------------------------------------------------- /examples/clara_client_example.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from nvidia_clara.clara_client import ClaraClient 16 | import nvidia_clara.clara_types as clara_types 17 | 18 | clara_ip_address = "10.0.0.1" 19 | clara_port = "30031" 20 | 21 | clara_client = ClaraClient(target=clara_ip_address, port=clara_port) 22 | 23 | # Get Clara Version 24 | version = clara_client.version() 25 | 26 | # Getting Gpu Utilization 27 | # Option 1: Getting list which will give snapshot of current GPU Utilization 28 | utilization_list = clara_client.list_utilization() 29 | # Option 2: Obtaining generator which will provide steam of GPU Utilization 30 | utilization_stream = clara_client.stream_utilization() 31 | 32 | # Stop Pipeline Service and Triton 33 | clara_client.stop() 34 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import setuptools 16 | 17 | with open("README.md", "r") as fh: 18 | long_description = fh.read() 19 | 20 | setuptools.setup( 21 | name="nvidia-clara-client", 22 | version="0.8.1.7", 23 | author="NVIDIA Clara Deploy", 24 | description="Python package to interact with Clara Platform Server API", 25 | license='Apache Software License (http://www.apache.org/licenses/LICENSE-2.0)', 26 | long_description=long_description, 27 | long_description_content_type="text/markdown", 28 | url="https://github.com/NVIDIA/clara-platform-python-client", 29 | install_requires=['grpcio', 'protobuf'], 30 | packages=setuptools.find_packages('.'), 31 | classifiers=[ 32 | "Programming Language :: Python :: 3", 33 | "Operating System :: OS Independent", 34 | 'License :: OSI Approved :: Apache Software License' 35 | ], 36 | python_requires='>=3.6', 37 | ) 38 | -------------------------------------------------------------------------------- /examples/pipelines_client_example.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from pathlib import Path 16 | from nvidia_clara.pipelines_client import PipelinesClient 17 | import nvidia_clara.pipeline_types as pipeline_types 18 | 19 | # Client Creation with IP and Port of running instance of Clara 20 | 21 | clara_ip_address = "10.0.0.1" 22 | clara_port = "30031" 23 | 24 | pipeline_client = PipelinesClient(target=clara_ip_address, port=clara_port) 25 | 26 | # Create list of pipeline_types.PipelineDefinition with local path to pipeline .yaml 27 | file_path = "./liver-tumor-pipeline.yaml" 28 | definitions = [pipeline_types.PipelineDefinition(name=file_path, content=Path(file_path).read_text())] 29 | 30 | # Create Pipeline with definition list created 31 | pipeline_id = pipeline_client.create_pipeline(definition=definitions) 32 | print(pipeline_id) 33 | 34 | # Get List of Created Pipelines PipelinesClient.list_pipelines() 35 | pipelines = [(pipe_info.pipeline_id.value, pipe_info.name) for pipe_info in pipeline_client.list_pipelines()] 36 | print(pipelines) 37 | 38 | # Get Details of Pipeline with PipelinesClient.pipeline_details() 39 | pipeline_details = pipeline_client.pipeline_details(pipeline_id=pipeline_id) 40 | 41 | # Remove Pipeline 42 | pipeline_client.remove_pipeline(pipeline_id=pipeline_id) 43 | 44 | -------------------------------------------------------------------------------- /examples/jobs_client_example.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from nvidia_clara.jobs_client import JobsClient 16 | import nvidia_clara.job_types as job_types 17 | import nvidia_clara.pipeline_types as pipeline_types 18 | 19 | # Client Creation with IP and Port of running instance of Clara 20 | clara_ip_address = "10.0.0.1" 21 | clara_port = "30031" 22 | 23 | jobs_client = JobsClient(target=clara_ip_address, port=clara_port) 24 | 25 | # Creates Filter of Healthy Jobs - Additionally could filter by Pipeline Id, State, Completion Time, and Creation Time 26 | job_filter = job_types.JobFilter(has_job_status=[job_types.JobStatus.Healthy]) 27 | 28 | # List Current Jobs with Optional Filter 29 | job_list = jobs_client.list_jobs(job_filter=job_filter) 30 | print(job_list) 31 | 32 | # Identifier of created pipeline (ex. colon tumor segmentation) 33 | colon_tumor_pipeline_id = "f9a843935e654a30beb9d1b8352bfaac" 34 | 35 | # Create Job 36 | job_info = jobs_client.create_job(job_name="colontumor",pipeline_id=pipeline_types.PipelineId(colon_tumor_pipeline_id)) 37 | print(job_info.job_id.value) 38 | 39 | # Start Job 40 | job_token = jobs_client.start_job(job_id=job_info.job_id) 41 | print(job_token.job_state) 42 | print(job_token.job_status) 43 | 44 | # Get Status of Job from Identifier 45 | job_details = jobs_client.get_status(job_id=job_token.job_id) 46 | 47 | print(job_details.job_state) 48 | print(job_details.job_status) 49 | 50 | # Gets List of Operators 51 | print(job_details.operator_details.keys()) 52 | 53 | # Try Canceling Job (if still running) 54 | try: 55 | job_details = jobs_client.cancel_job(job_id=job_token.job_id) 56 | except: 57 | print("Scheduler Rejected Request") 58 | 59 | 60 | -------------------------------------------------------------------------------- /examples/payloads_client_example.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from nvidia_clara.payloads_client import PayloadsClient 16 | import nvidia_clara.payload_types as payload_types 17 | 18 | # Client Creation with IP and Port of running instance of Clara 19 | 20 | clara_ip_address = "10.0.0.1" 21 | clara_port = "30031" 22 | 23 | payload_client = PayloadsClient(target=clara_ip_address, port=clara_port) 24 | 25 | # Create static re-usable Payload 26 | payload_details = payload_client.create_payload() 27 | 28 | # Delete Payload 29 | payload_client.delete_payload(payload_id=payload_details.payload_id) 30 | 31 | # Download from existing Payload ex. Payload with identifier '61a477bf-6bcc-4fdd-abad-ccb8886eb52f' with blob/file name ./input/I114.dcm 32 | example_payload_identifier = '61a477bf-6bcc-4fdd-abad-ccb8886eb52f' 33 | 34 | # Create BinaryIO stream object with write permissions and download from payload identifier: example_payload_identifier 35 | with open('output.dcm', 'wb') as wb: 36 | payload_client.download_from(payload_id=payload_types.PayloadId(example_payload_identifier), 37 | blob_name='./input/I114.dcm', 38 | dest_obj=wb) 39 | 40 | # Uploading BinaryIO stream to a new blob 41 | # Create BinaryIO stream with read permissions (for sake of example: reading previous output stream) 42 | with open('output.dcm', 'rb') as rb: 43 | payload_client.upload(payload_id=payload_types.PayloadId(example_payload_identifier), 44 | blob_name='./test/new_blob.dcm', file_object=rb) 45 | 46 | # Get Details of a Payload 47 | confirming_details = payload_client.get_details( 48 | payload_id=payload_types.PayloadId(example_payload_identifier)) 49 | -------------------------------------------------------------------------------- /nvidia_clara/base_client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | from enum import Enum 17 | from nvidia_clara.grpc import common_pb2, jobs_pb2 18 | import nvidia_clara.constants as constants 19 | 20 | 21 | class BaseClient: 22 | 23 | @staticmethod 24 | def check_response_header(header): 25 | 26 | if not isinstance(header, common_pb2.ResponseHeader): 27 | raise TypeError("Header arguement must be of type ResponseHeader") 28 | 29 | if header.code < 0: 30 | 31 | if header.messages is not None: 32 | 33 | if len(header.messages) > 0: 34 | message_string_list = [header.messages[i] for i in range(len(header.messages))] 35 | 36 | raise Exception('\n'.join(message_string_list)) 37 | 38 | else: 39 | raise Exception("Internal Server Error " + str(header.code)) 40 | 41 | else: 42 | raise Exception("Internal Server Error " + str(header.code)) 43 | 44 | @staticmethod 45 | def get_request_header() -> common_pb2.RequestHeader: 46 | 47 | header = common_pb2.RequestHeader(api_version=common_pb2.Version( 48 | major=constants.ClaraVersionMajor, 49 | minor=constants.ClaraVersionMinor, 50 | patch=constants.ClaraVersionPatch), 51 | user_agent="Nvidia.Clara.Platform") 52 | 53 | return header 54 | 55 | 56 | class RequestIterator(object): 57 | 58 | def __init__(self, requests): 59 | self._requests_iter = iter(requests) 60 | 61 | def __call__(self, handler=None): 62 | while True: 63 | try: 64 | request = next(self._requests_iter) 65 | except StopIteration: 66 | return 67 | yield request 68 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/metrics_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/clara.proto 18 | 19 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 20 | import grpc 21 | 22 | from nvidia_clara.grpc import metrics_pb2 as nvidia_dot_clara_dot_platform_dot_node__monitor_dot_metrics__pb2 23 | 24 | 25 | class MonitorStub(object): 26 | # missing associated documentation comment in .proto file 27 | pass 28 | 29 | def __init__(self, channel): 30 | """Constructor. 31 | 32 | Args: 33 | channel: A grpc.Channel. 34 | """ 35 | self.GpuMetrics = channel.unary_stream( 36 | '/nvidia.clara.platform.node_monitor.Monitor/GpuMetrics', 37 | request_serializer=nvidia_dot_clara_dot_platform_dot_node__monitor_dot_metrics__pb2.MonitorGpuMetricsRequest.SerializeToString, 38 | response_deserializer=nvidia_dot_clara_dot_platform_dot_node__monitor_dot_metrics__pb2.MonitorGpuMetricsResponse.FromString, 39 | ) 40 | 41 | 42 | class MonitorServicer(object): 43 | # missing associated documentation comment in .proto file 44 | pass 45 | 46 | def GpuMetrics(self, request, context): 47 | """Request GPU metrics 48 | """ 49 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 50 | context.set_details('Method not implemented!') 51 | raise NotImplementedError('Method not implemented!') 52 | 53 | 54 | def add_MonitorServicer_to_server(servicer, server): 55 | rpc_method_handlers = { 56 | 'GpuMetrics': grpc.unary_stream_rpc_method_handler( 57 | servicer.GpuMetrics, 58 | request_deserializer=nvidia_dot_clara_dot_platform_dot_node__monitor_dot_metrics__pb2.MonitorGpuMetricsRequest.FromString, 59 | response_serializer=nvidia_dot_clara_dot_platform_dot_node__monitor_dot_metrics__pb2.MonitorGpuMetricsResponse.SerializeToString, 60 | ), 61 | } 62 | generic_handler = grpc.method_handlers_generic_handler( 63 | 'nvidia.clara.platform.node_monitor.Monitor', rpc_method_handlers) 64 | server.add_generic_rpc_handlers((generic_handler,)) 65 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![License](https://img.shields.io/badge/License-Apache_2.0-lightgrey.svg)](https://opensource.org/licenses/Apache-2.0) 2 | [![Clara Deploy Platform](https://img.shields.io/badge/Clara_Deploy_Platform-0.7.1-brightgreen.svg)](https://ngc.nvidia.com/catalog/containers/nvidia:clara:platformapiserver) 3 | 4 | [![NVIDIA](https://github.com/NVIDIA/clara-platform-python-client/blob/main/ext/NVIDIA_horo_white.png?raw=true)](https://docs.nvidia.com/clara/deploy/index.html) 5 | 6 | # Clara Deploy Python Client 7 | An intuitive python 3 package to develop applications with NVIDIA Clara Deploy. Utilize the clients within the **nvidia_clara** package to manage jobs, pipelines, payloads, and models. Each client has an associated set of objects which are defined in seperate 'types' modules (also can be found in nvidia_clara). Look at the examples below to learn more on each client to get started! 8 | 9 | ### Additional Resources to Learn More on Clara Deploy 10 | * [NVIDIA Clara Overview Homepage](https://developer.nvidia.com/clara) 11 | * [NVIDIA Clara Deploy SDK User Guide](https://docs.nvidia.com/clara/deploy/index.html) 12 | 13 | 14 | ### Client Prerequisites 15 | * Python 3.6 or higher 16 | * Clara Deploy 0.7.0 or higher 17 | 18 | ### Pypi Packages Needed 19 | * [Grpcio](https://pypi.org/project/grpcio) 20 | * [Protobuf](https://pypi.org/project/protobuf) 21 | * [Grpcio-Testing (For Running Pytests)](https://pypi.org/project/grpcio-testing) 22 | * [Pytest (For Running Pytests)](https://pypi.org/project/pytest) 23 | 24 | 25 | ## Getting Started 26 | 27 | ### Package Installation 28 | #### Installing from [source repository](https://github.com/NVIDIA/clara-platform-python-client) 29 | ``` 30 | $ git clone --recursive git@github.com:NVIDIA/clara-platform-python-client.git 31 | $ cd ./clara-platform-python-client 32 | $ python3 -m pip install requirements.txt 33 | $ python3 -m pip install . 34 | ``` 35 | 36 | #### Installing directly from [Pypi](https://pypi.org/project/nvidia-clara-client/) 37 | ``` 38 | $ python3 -m pip install nvidia-clara-client 39 | ``` 40 | 41 | ### Clara Client Guides 42 | * [Jobs](https://github.com/NVIDIA/clara-platform-python-client/wiki/Jobs-Client) 43 | : Learn to start and manage Clara jobs 44 | * [Pipelines](https://github.com/NVIDIA/clara-platform-python-client/wiki/Pipelines-Client) 45 | : Learn to create and manage Clara pipelines 46 | * [Payloads](https://github.com/NVIDIA/clara-platform-python-client/wiki/Payloads-Client) 47 | : Learn to create, upload, download, and manage Clara payloads 48 | 49 | ### Full Example(s) Running Pipeline 50 | * [Spleen Segmentation Pipeline](https://github.com/NVIDIA/clara-platform-python-client/wiki/Spleen-Segmentation-Example) 51 | 52 | 53 | ## Running Pytests 54 | *Only for developing with source repository* 55 | ``` 56 | $ pip3 install grpcio-testing 57 | $ pip3 install pytest 58 | $ export PYTHONPATH="${PYTHONPATH}:" 59 | $ pytest 60 | ``` -------------------------------------------------------------------------------- /examples/combined_example.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from nvidia_clara.jobs_client import JobsClient 16 | from nvidia_clara.pipelines_client import PipelinesClient 17 | from nvidia_clara.payloads_client import PayloadsClient 18 | import nvidia_clara.pipeline_types as pipeline_types 19 | import os 20 | from pathlib import Path 21 | 22 | # Clients creation 23 | clara_ip_address = "10.0.0.1" 24 | clara_port = "30031" 25 | 26 | jobs_client = JobsClient(target=clara_ip_address, port=clara_port) 27 | payloads_client = PayloadsClient(target=clara_ip_address, port=clara_port) 28 | pipeline_client = PipelinesClient(target=clara_ip_address, port=clara_port) 29 | 30 | # Create list of pipeline_types.PipelineDefinition with local path to pipeline .yaml 31 | file_path = "../spleen_pipeline.yaml" 32 | definitions = [pipeline_types.PipelineDefinition(name=file_path, content=Path(file_path).read_text())] 33 | 34 | # Create Pipeline with definition list created 35 | pipeline_id = pipeline_client.create_pipeline(definition=definitions) 36 | 37 | # Create Job with newly created Pipeline 38 | job_info = jobs_client.create_job(job_name="spleenjob", pipeline_id=pipeline_types.PipelineId(pipeline_id.value)) 39 | job_id = job_info.job_id 40 | payload_id = job_info.payload_id 41 | 42 | # Local path to directory of files to upload to the job's payload on the Server 43 | input_path = "../app_spleen-input_v1/dcm" 44 | 45 | # Go through files in directory and upload to the job using the payload identifier 46 | for file in os.listdir(input_path): 47 | file_path = input_path + "/" + str(file) 48 | with open(file_path, 'rb') as fp: 49 | payloads_client.upload(payload_id=payload_id, blob_name=file, file_object=fp) 50 | 51 | # Get a List of the jobs 52 | job_list = jobs_client.list_jobs() 53 | 54 | # Start Job 55 | job_token = jobs_client.start_job(job_id=job_id) 56 | 57 | # Loop until job completes 58 | job_status = jobs_client.get_status(job_id=job_id) 59 | while job_status.job_state != 3: 60 | job_status = jobs_client.get_status(job_id=job_id) 61 | 62 | # Get Payload Details - Used to get list of payload files 63 | payload_details = payloads_client.get_details(payload_id=payload_id) 64 | 65 | # Download files from payload if pertaining to output payload directory (ex. "/operators) 66 | for file in payload_details.file_details: 67 | 68 | # Get file path on Server (ex. /operators/dicom-reader/example_file.raw") 69 | file_name = file.name 70 | 71 | # Split file path name (ex. ['','operators','dicom-reader','example_file.raw'] 72 | name = file_name.split('/') 73 | 74 | # Check if file pertains to output directory (ex. "/operators) 75 | if name[1] == 'operators': 76 | 77 | # Download file to a local results directory to a file with same name on server (ex. example_file.raw) 78 | with open("./results/"+name[-1], 'wb+') as wb: 79 | payloads_client.download_from(payload_id=payload_id, blob_name="."+file_name, dest_obj=wb) 80 | 81 | # Gets list of operator logs from job 82 | jobs_logs = jobs_client.job_logs(job_id=job_id, operator_name="dicom-reader") 83 | -------------------------------------------------------------------------------- /CLA.md: -------------------------------------------------------------------------------- 1 | ## Individual Contributor License Agreement (CLA) 2 | 3 | **Thank you for submitting your contributions to this project.** 4 | 5 | By signing this CLA, you agree that the following terms apply to all of your past, present and future contributions 6 | to the project. 7 | 8 | ### License. 9 | 10 | You hereby represent that all present, past and future contributions are governed by the 11 | [Apache 2.0 License](http://www.apache.org/licenses/LICENSE-2.0) 12 | copyright statement. 13 | 14 | This entails that to the extent possible under law, you transfer all copyright and related or neighboring rights 15 | of the code or documents you contribute to the project itself or its maintainers. 16 | Furthermore you also represent that you have the authority to perform the above waiver 17 | with respect to the entirety of you contributions. 18 | 19 | ### Moral Rights. 20 | 21 | To the fullest extent permitted under applicable law, you hereby waive, and agree not to 22 | assert, all of your “moral rights” in or relating to your contributions for the benefit of the project. 23 | 24 | ### Third Party Content. 25 | 26 | If your Contribution includes or is based on any source code, object code, bug fixes, configuration changes, tools, 27 | specifications, documentation, data, materials, feedback, information or other works of authorship that were not 28 | authored by you (“Third Party Content”) or if you are aware of any third party intellectual property or proprietary 29 | rights associated with your Contribution (“Third Party Rights”), 30 | then you agree to include with the submission of your Contribution full details respecting such Third Party 31 | Content and Third Party Rights, including, without limitation, identification of which aspects of your 32 | Contribution contain Third Party Content or are associated with Third Party Rights, the owner/author of the 33 | Third Party Content and Third Party Rights, where you obtained the Third Party Content, and any applicable 34 | third party license terms or restrictions respecting the Third Party Content and Third Party Rights. For greater 35 | certainty, the foregoing obligations respecting the identification of Third Party Content and Third Party Rights 36 | do not apply to any portion of a Project that is incorporated into your Contribution to that same Project. 37 | 38 | ### Representations. 39 | 40 | You represent that, other than the Third Party Content and Third Party Rights identified by 41 | you in accordance with this Agreement, you are the sole author of your Contributions and are legally entitled 42 | to grant the foregoing licenses and waivers in respect of your Contributions. If your Contributions were 43 | created in the course of your employment with your past or present employer(s), you represent that such 44 | employer(s) has authorized you to make your Contributions on behalf of such employer(s) or such employer 45 | (s) has waived all of their right, title or interest in or to your Contributions. 46 | 47 | ### Disclaimer. 48 | 49 | To the fullest extent permitted under applicable law, your Contributions are provided on an "as is" 50 | basis, without any warranties or conditions, express or implied, including, without limitation, any implied 51 | warranties or conditions of non-infringement, merchantability or fitness for a particular purpose. You are not 52 | required to provide support for your Contributions, except to the extent you desire to provide support. 53 | 54 | ### No Obligation. 55 | 56 | You acknowledge that the maintainers of this project are under no obligation to use or incorporate your contributions 57 | into the project. The decision to use or incorporate your contributions into the project will be made at the 58 | sole discretion of the maintainers or their authorized delegates. -------------------------------------------------------------------------------- /nvidia_clara/grpc/clara_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 16 | import grpc 17 | 18 | from nvidia_clara.grpc import clara_pb2 as nvidia_dot_clara_dot_platform_dot_clara__pb2 19 | 20 | 21 | class ClaraStub(object): 22 | # missing associated documentation comment in .proto file 23 | pass 24 | 25 | def __init__(self, channel): 26 | """Constructor. 27 | 28 | Args: 29 | channel: A grpc.Channel. 30 | """ 31 | self.Stop = channel.unary_unary( 32 | '/nvidia.clara.platform.Clara/Stop', 33 | request_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraStopRequest.SerializeToString, 34 | response_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraStopResponse.FromString, 35 | ) 36 | self.Utilization = channel.unary_stream( 37 | '/nvidia.clara.platform.Clara/Utilization', 38 | request_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraUtilizationRequest.SerializeToString, 39 | response_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraUtilizationResponse.FromString, 40 | ) 41 | self.Version = channel.unary_unary( 42 | '/nvidia.clara.platform.Clara/Version', 43 | request_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraVersionRequest.SerializeToString, 44 | response_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraVersionResponse.FromString, 45 | ) 46 | 47 | 48 | class ClaraServicer(object): 49 | # missing associated documentation comment in .proto file 50 | pass 51 | 52 | def Stop(self, request, context): 53 | """Requests the termination of Clara Platform Server and associated resource cleanup. 54 | """ 55 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 56 | context.set_details('Method not implemented!') 57 | raise NotImplementedError('Method not implemented!') 58 | 59 | def Utilization(self, request, context): 60 | """Requests utilization data for all Clara Platform managed GPUs. 61 | """ 62 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 63 | context.set_details('Method not implemented!') 64 | raise NotImplementedError('Method not implemented!') 65 | 66 | def Version(self, request, context): 67 | """Requests version information from Clara Platform Server. 68 | """ 69 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 70 | context.set_details('Method not implemented!') 71 | raise NotImplementedError('Method not implemented!') 72 | 73 | 74 | def add_ClaraServicer_to_server(servicer, server): 75 | rpc_method_handlers = { 76 | 'Stop': grpc.unary_unary_rpc_method_handler( 77 | servicer.Stop, 78 | request_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraStopRequest.FromString, 79 | response_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraStopResponse.SerializeToString, 80 | ), 81 | 'Utilization': grpc.unary_stream_rpc_method_handler( 82 | servicer.Utilization, 83 | request_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraUtilizationRequest.FromString, 84 | response_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraUtilizationResponse.SerializeToString, 85 | ), 86 | 'Version': grpc.unary_unary_rpc_method_handler( 87 | servicer.Version, 88 | request_deserializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraVersionRequest.FromString, 89 | response_serializer=nvidia_dot_clara_dot_platform_dot_clara__pb2.ClaraVersionResponse.SerializeToString, 90 | ), 91 | } 92 | generic_handler = grpc.method_handlers_generic_handler( 93 | 'nvidia.clara.platform.Clara', rpc_method_handlers) 94 | server.add_generic_rpc_handlers((generic_handler,)) 95 | -------------------------------------------------------------------------------- /tests/test_client_tools.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import signal 16 | import time 17 | 18 | import grpc 19 | import grpc_testing 20 | from grpc.framework.foundation import logging_pool 21 | 22 | import nvidia_clara.grpc.common_pb2 as common_pb2 23 | import nvidia_clara.grpc.jobs_pb2 as jobs_pb2 24 | import nvidia_clara.grpc.jobs_pb2_grpc as jobs_pb2_grpc 25 | import nvidia_clara.grpc.payloads_pb2 as payloads_pb2 26 | import nvidia_clara.grpc.payloads_pb2_grpc as payloads_pb2_grpc 27 | import nvidia_clara.grpc.pipelines_pb2 as pipelines_pb2 28 | import nvidia_clara.grpc.pipelines_pb2_grpc as pipelines_pb2_grpc 29 | 30 | SERVICES = { 31 | 'Pipelines': pipelines_pb2.DESCRIPTOR.services_by_name, 32 | 'Jobs': jobs_pb2.DESCRIPTOR.services_by_name, 33 | 'Payloads': payloads_pb2.DESCRIPTOR.services_by_name 34 | } 35 | 36 | 37 | def get_stubs(service, channel): 38 | if service == 'Jobs': 39 | return jobs_pb2_grpc.JobsStub(channel) 40 | elif service == 'Payloads': 41 | return payloads_pb2_grpc.PayloadsStub(channel) 42 | elif service == 'Pipelines': 43 | return pipelines_pb2_grpc.PipelinesStub(channel) 44 | 45 | 46 | class Timeout(Exception): 47 | pass 48 | 49 | 50 | # Reference: https://github.com/grpc/grpc/blob/master/src/python/grpcio_tests/tests/testing/_client_test.py 51 | def verify_request(channel, stub_method, call_sig, expected_requests, responses, timeout=1): 52 | def timeout_handler(signum, frame): 53 | raise Timeout('Timeout while taking requests') 54 | 55 | try: 56 | # setting up timeout handler because grpc_testing module doesn't support timeout for take_xxx_xxx methods 57 | signal.signal(signal.SIGALRM, timeout_handler) 58 | signal.alarm(timeout) 59 | if call_sig == 'stream_unary': 60 | invocation_metadata, rpc = channel.take_stream_unary(stub_method) 61 | rpc.send_initial_metadata(()) 62 | for expected_request in expected_requests: 63 | request = rpc.take_request() 64 | assert expected_request == request 65 | rpc.requests_closed() 66 | rpc.terminate(next(iter(responses)), (), grpc.StatusCode.OK, '') 67 | elif call_sig == 'unary_stream': 68 | invocation_metadata, request, rpc = channel.take_unary_stream(stub_method) 69 | assert next(iter(expected_requests)) == request 70 | rpc.send_initial_metadata(()) 71 | for response in responses: 72 | rpc.send_response(response) 73 | rpc.terminate((), grpc.StatusCode.OK, '') 74 | elif call_sig == 'unary_unary': 75 | invocation_metadata, request, rpc = channel.take_unary_unary(stub_method) 76 | assert next(iter(expected_requests)) == request 77 | rpc.send_initial_metadata(()) 78 | rpc.terminate(next(iter(responses)), (), grpc.StatusCode.OK, '') 79 | except Timeout: 80 | raise 81 | finally: 82 | signal.alarm(0) 83 | 84 | 85 | def run_client_test(service_name, method_name, test_method, stub_method_handlers, *args, **kwargs): 86 | fake_time = grpc_testing.strict_fake_time( 87 | time.time()) 88 | channel = grpc_testing.channel(SERVICES[service_name].values(), 89 | fake_time) 90 | stub = get_stubs(service_name, channel) 91 | service = SERVICES[service_name][service_name] 92 | 93 | client_execution_thread_pool = logging_pool.pool(1) 94 | try: 95 | test_client_only = kwargs.pop('_test_client_only', None) 96 | application_future = client_execution_thread_pool.submit( 97 | test_method, 98 | stub, method_name, *args, **kwargs) 99 | 100 | # if the client method call is expected to raise exception before grpc call 101 | if test_client_only: 102 | pass # do not simulate grpc response 103 | else: 104 | for stub_method_name, call_sig, handlers in stub_method_handlers: 105 | expected_requests, responses = handlers 106 | stub_method = service.methods_by_name[stub_method_name] 107 | verify_request(channel, stub_method, call_sig, expected_requests, responses) 108 | 109 | application_return_value = application_future.result() 110 | application_exception = application_future.exception() 111 | if application_exception: 112 | raise application_exception 113 | return application_return_value 114 | except Timeout: 115 | raise 116 | finally: 117 | client_execution_thread_pool.shutdown(False) 118 | del channel 119 | -------------------------------------------------------------------------------- /nvidia_clara/payload_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from enum import Enum 16 | from typing import List, Mapping 17 | from nvidia_clara.grpc import common_pb2, payloads_pb2 18 | 19 | 20 | class PayloadType(Enum): 21 | Unknown = payloads_pb2.PAYLOAD_TYPE_UNKNOWN 22 | 23 | Pipeline = payloads_pb2.PAYLOAD_TYPE_PIPELINE 24 | 25 | Reusable = payloads_pb2.PAYLOAD_TYPE_REUSABLE 26 | 27 | Minimum = Pipeline 28 | 29 | Maximum = Reusable 30 | 31 | 32 | class PayloadFileDetails: 33 | 34 | def __init__(self, other: payloads_pb2.PayloadFileDetails = None, mode: int = None, name: str = None, 35 | size: int = None): 36 | """ 37 | Args: 38 | mode(int): Permissions 39 | name(str): File Path Location 40 | size(int): Size of File 41 | other(payloads_pb2.PayloadFileDetails): If specified, object information replicated 42 | """ 43 | if other is None: 44 | self._mode = mode 45 | self._name = name 46 | self._size = size 47 | else: 48 | self._mode = other.mode 49 | self._name = other.name 50 | self._size = other.size 51 | 52 | @property 53 | def mode(self): 54 | """ 55 | Mode of the file. 56 | 57 | See [https://en.wikipedia.org/wiki/Chmod] for additional information. 58 | """ 59 | return self._mode 60 | 61 | @mode.setter 62 | def mode(self, mode: int): 63 | """ 64 | Mode of the file. 65 | 66 | See [https://en.wikipedia.org/wiki/Chmod] for additional information. 67 | """ 68 | self._mode = mode 69 | 70 | @property 71 | def name(self): 72 | """ 73 | Unique (withing a payload) name of the file; in path format. 74 | 75 | File names are relative to the root of the payload, and should not be rooted paths (prefixed with a '/' character). 76 | """ 77 | return self._name 78 | 79 | @name.setter 80 | def name(self, name: str): 81 | """ 82 | Unique (withing a payload) name of the file; in path format. 83 | 84 | File names are relative to the root of the payload, and should not be rooted paths (prefixed with a '/' character). 85 | """ 86 | self._name = name 87 | 88 | @property 89 | def size(self): 90 | """Size, in bytes, of the file.""" 91 | return self._size 92 | 93 | @size.setter 94 | def size(self, size: int): 95 | """Size, in bytes, of the file.""" 96 | self._size = size 97 | 98 | def __eq__(self, other): 99 | return (self._mode == other.getMode()) and (self._name == other.getName()) and ( 100 | self._size == other.getSize()) 101 | 102 | def __ne__(self, other): 103 | return not (self == other) 104 | 105 | def __hash__(self): 106 | return hash((self._mode, self._name, self._size)) 107 | 108 | 109 | class PayloadId: 110 | 111 | def __init__(self, value: str = None): 112 | if value == None: 113 | raise Exception("Arguement 'Value' must be initialized to non-null or empty string") 114 | 115 | self._value = value 116 | 117 | @property 118 | def value(self): 119 | return self._value 120 | 121 | def __eq__(self, other): 122 | return self._value == other._value 123 | 124 | def __ne__(self, other): 125 | return not (self == other) 126 | 127 | def __repr__(self): 128 | return "%s" % (self._value) 129 | 130 | def __str__(self): 131 | return "%s" % (self._value) 132 | 133 | def __hash__(self): 134 | return hash(self._value) 135 | 136 | def to_grpc_value(self): 137 | id = common_pb2.Identifier() 138 | id.value = self._value 139 | return id 140 | 141 | 142 | class PayloadDetails: 143 | 144 | def __init__(self, payload_id: PayloadId = None, file_details: List[PayloadFileDetails] = None, 145 | payload_type: payloads_pb2.PayloadType = None, metadata: Mapping[str, str] = None): 146 | if file_details is None: 147 | file_details = [] 148 | if metadata is None: 149 | metadata = dict() 150 | 151 | self._payload_id = payload_id 152 | self._file_details = file_details 153 | self._payload_type = payload_type 154 | self._metadata = metadata 155 | 156 | @property 157 | def payload_id(self): 158 | """Gets the unique identifier of the payload.""" 159 | return self._payload_id 160 | 161 | @payload_id.setter 162 | def payload_id(self, payload_id: PayloadId): 163 | """Sets the unique identifier of the payload.""" 164 | self._payload_id = payload_id 165 | 166 | @property 167 | def file_details(self): 168 | """Gets list of files contained in the payload.""" 169 | return self._file_details 170 | 171 | @file_details.setter 172 | def file_details(self, file_details: List[PayloadFileDetails]): 173 | """Sets a list of files contained in the payload.""" 174 | self._file_details = file_details 175 | 176 | @property 177 | def payload_type(self): 178 | """Gets a list of files contained in the payload.""" 179 | return self._payload_type 180 | 181 | @payload_type.setter 182 | def payload_type(self, payload_type: payloads_pb2.PayloadType): 183 | """Sets a list of files contained in the payload.""" 184 | self._payload_type = payload_type 185 | 186 | @property 187 | def metadata(self) -> Mapping[str, str]: 188 | """ 189 | Metadata (set of key/value pairs) associated with the payload 190 | """ 191 | return self._metadata 192 | 193 | @metadata.setter 194 | def metadata(self, metadata: Mapping[str, str]): 195 | """ 196 | Metadata (set of key/value pairs) associated with the payload 197 | """ 198 | self._metadata = metadata 199 | -------------------------------------------------------------------------------- /nvidia_clara/pipeline_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from typing import List, Mapping 16 | from nvidia_clara.grpc import common_pb2 17 | 18 | 19 | class PipelineDefinition: 20 | 21 | def __init__(self, content: str = None, name: str = None): 22 | self._content = content 23 | self._name = name 24 | 25 | @property 26 | def content(self) -> str: 27 | """Text content of a pipeline definition. 28 | 29 | Content is typed as a "System.String" to avoid encoding related issues.""" 30 | return self._content 31 | 32 | @content.setter 33 | def content(self, content: str): 34 | """Text content of a pipeline definition. 35 | 36 | Content is typed as a "System.String" to avoid encoding related issues.""" 37 | self._content = content 38 | 39 | @property 40 | def name(self) -> str: 41 | """The name of the pipeline definition. 42 | 43 | Not the name of the pipeline as defined by the definition. 44 | 45 | Example: clara/examples/my-pipeline.yml 46 | """ 47 | return self._name 48 | 49 | @name.setter 50 | def name(self, name: str): 51 | """The name of the pipeline definition. 52 | 53 | Not the name of the pipeline as defined by the definition. 54 | 55 | Example: clara/examples/my-pipeline.yml 56 | """ 57 | self._name = name 58 | 59 | 60 | class PipelineId: 61 | 62 | def __init__(self, value: str): 63 | if (value == "") or (value is None): 64 | raise Exception("Value arguement must be initialized to non-null value") 65 | 66 | self._value = value 67 | 68 | @property 69 | def value(self): 70 | return self._value 71 | 72 | def __eq__(self, other): 73 | return self._value == other._value 74 | 75 | def __ne__(self, other): 76 | return not (self == other) 77 | 78 | def __repr__(self): 79 | return "%s" % (self._value) 80 | 81 | def __str__(self): 82 | return "%s" % (self._value) 83 | 84 | def __hash__(self): 85 | return hash(self._value) 86 | 87 | def to_grpc_value(self): 88 | id = common_pb2.Identifier() 89 | id.value = self._value 90 | return id 91 | 92 | 93 | class PipelineDetails: 94 | 95 | def __init__(self, pipeline_id: PipelineId = None, name: str = None, definition: List[PipelineDefinition] = None, 96 | metadata: Mapping[str, str] = None): 97 | if definition is None: 98 | definition = [] 99 | if metadata is None: 100 | metadata = dict() 101 | self._pipeline_id = pipeline_id 102 | self._name = name 103 | self._definition = definition 104 | self._metadata = metadata 105 | 106 | @property 107 | def pipeline_id(self) -> PipelineId: 108 | """Unique identifier of the pipeline.""" 109 | return self._pipeline_id 110 | 111 | @pipeline_id.setter 112 | def pipeline_id(self, pipeline_id: PipelineId): 113 | """Unique identifier of the pipeline.""" 114 | self._pipeline_id = pipeline_id 115 | 116 | @property 117 | def name(self) -> str: 118 | """ 119 | Human readable name of the pipeline. 120 | 121 | Not guaranteed to be unique. 122 | """ 123 | return self._name 124 | 125 | @name.setter 126 | def name(self, name: str): 127 | """ 128 | Human readable name of the pipeline. 129 | 130 | Not guaranteed to be unique. 131 | """ 132 | self._name = name 133 | 134 | @property 135 | def definition(self) -> List[PipelineDefinition]: 136 | """ 137 | The definition of the pipeline. 138 | 139 | Clara pipeline definitions can be multi-file. 140 | """ 141 | return self._definition 142 | 143 | @definition.setter 144 | def definition(self, definition: List[PipelineDefinition]): 145 | """ 146 | The definition of the pipeline. 147 | 148 | Clara pipeline definitions can be multi-file. 149 | """ 150 | self._definition = definition 151 | 152 | @property 153 | def metadata(self) -> Mapping[str, str]: 154 | """ 155 | Metadata (set of key/value pairs) associated with the pipeline 156 | """ 157 | return self._metadata 158 | 159 | @metadata.setter 160 | def metadata(self, metadata: Mapping[str, str]): 161 | """ 162 | Metadata (set of key/value pairs) associated with the pipeline 163 | """ 164 | self._metadata = metadata 165 | 166 | 167 | class PipelineInfo: 168 | 169 | def __init__(self, pipeline_id: PipelineId = None, name: str = None, metadata: Mapping[str, str] = None): 170 | if metadata is None: 171 | metadata = dict() 172 | self._pipeline_id = pipeline_id 173 | self._name = name 174 | self._metadata = metadata 175 | 176 | @property 177 | def pipeline_id(self) -> PipelineId: 178 | """Unique identifier of the pipeline.""" 179 | return self._pipeline_id 180 | 181 | @pipeline_id.setter 182 | def pipeline_id(self, pipeline_id: PipelineId): 183 | """Unique identifier of the pipeline.""" 184 | self._pipeline_id = pipeline_id 185 | 186 | @property 187 | def name(self) -> str: 188 | """ 189 | Human readable name of the pipeline. 190 | 191 | Not guaranteed to be unique. 192 | """ 193 | return self._name 194 | 195 | @name.setter 196 | def name(self, name: str): 197 | """ 198 | Human readable name of the pipeline. 199 | 200 | Not guaranteed to be unique. 201 | """ 202 | self._name = name 203 | 204 | @property 205 | def metadata(self) -> Mapping[str, str]: 206 | """ 207 | Metadata (set of key/value pairs) associated with the pipeline 208 | """ 209 | return self._metadata 210 | 211 | @metadata.setter 212 | def metadata(self, metadata: Mapping[str, str]): 213 | """ 214 | Metadata (set of key/value pairs) associated with the pipeline 215 | """ 216 | self._metadata = metadata 217 | -------------------------------------------------------------------------------- /tests/test_pipelines_client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import nvidia_clara.grpc.common_pb2 as common_pb2 16 | import nvidia_clara.grpc.pipelines_pb2 as pipelines_pb2 17 | 18 | from nvidia_clara.base_client import BaseClient 19 | from nvidia_clara.pipelines_client import PipelinesClient 20 | import nvidia_clara.pipeline_types as pipeline_types 21 | 22 | from tests.test_client_tools import run_client_test 23 | 24 | 25 | def run_pipeline_client(stub, method_name, *args, **kwargs): 26 | with PipelinesClient(target='10.0.0.1:50051', stub=stub) as client: 27 | response = getattr(client, method_name)(*args, **kwargs) 28 | return response 29 | 30 | 31 | class MockClaraPipelineServiceClient: 32 | stub_method_handlers = [] 33 | 34 | def __init__(self, channel, stub=None, request_header=None, logger=None): 35 | pass 36 | 37 | def __enter__(self): 38 | return self 39 | 40 | def __exit__(self, exc_type, exc_val, exc_tb): 41 | return False 42 | 43 | def create_pipeline(self, *args, **kwargs): 44 | return run_client_test( 45 | 'Pipelines', 46 | 'create_pipeline', 47 | run_pipeline_client, 48 | stub_method_handlers=MockClaraPipelineServiceClient.stub_method_handlers, 49 | *args, **kwargs) 50 | 51 | def list_pipelines(self, *args, **kwargs): 52 | return run_client_test( 53 | 'Pipelines', 54 | 'list_pipelines', 55 | run_pipeline_client, 56 | stub_method_handlers=MockClaraPipelineServiceClient.stub_method_handlers, 57 | *args, **kwargs) 58 | 59 | def close(self): 60 | pass 61 | 62 | 63 | PIPELINE_TEXT = '''api-version: 0.2.0 64 | name: sample-pipeline 65 | operators: 66 | - name: producer 67 | import: 68 | path: producer.yaml 69 | - name: consumer 70 | import: 71 | path: consumer.yaml 72 | args: 73 | input-from: producer 74 | ''' 75 | 76 | 77 | def test_create_pipeline(): 78 | pipeline_yaml = 'pipeline.yaml' 79 | 80 | requests = [ 81 | pipelines_pb2.PipelinesCreateRequest( 82 | header=BaseClient.get_request_header(), 83 | definition=pipelines_pb2.PipelineDefinitionFile( 84 | path='pipeline.yaml', 85 | content=PIPELINE_TEXT) 86 | ) 87 | ] 88 | 89 | responses = [ 90 | pipelines_pb2.PipelinesCreateResponse( 91 | header=common_pb2.ResponseHeader( 92 | code=0, 93 | messages=[]), 94 | pipeline_id=common_pb2.Identifier( 95 | value='92656d79fa414db6b294069c0e9e6df5' 96 | ) 97 | ) 98 | ] 99 | 100 | stub_method_handlers = [( 101 | 'Create', 102 | 'stream_unary', 103 | ( 104 | requests, 105 | responses 106 | ) 107 | )] 108 | 109 | # set handlers 110 | MockClaraPipelineServiceClient.stub_method_handlers = stub_method_handlers 111 | 112 | def_list = [ 113 | pipeline_types.PipelineDefinition(name=pipeline_yaml, content=PIPELINE_TEXT) 114 | ] 115 | 116 | with MockClaraPipelineServiceClient('localhost:50051') as client: 117 | pipeline_id = client.create_pipeline(definition=def_list) 118 | print(pipeline_id) 119 | assert pipeline_id.value == '92656d79fa414db6b294069c0e9e6df5' 120 | 121 | 122 | def test_create_pipeline_with_id(): 123 | pipeline_yaml = 'pipeline.yaml' 124 | 125 | requests = [ 126 | pipelines_pb2.PipelinesCreateRequest( 127 | header=BaseClient.get_request_header(), 128 | pipeline_id=common_pb2.Identifier( 129 | value='92656d79fa414db6b294069c0e9e6df5' 130 | ), 131 | definition=pipelines_pb2.PipelineDefinitionFile( 132 | path='pipeline.yaml', 133 | content=PIPELINE_TEXT) 134 | ) 135 | ] 136 | 137 | responses = [ 138 | pipelines_pb2.PipelinesCreateResponse( 139 | header=common_pb2.ResponseHeader( 140 | code=0, 141 | messages=[]), 142 | pipeline_id=common_pb2.Identifier( 143 | value='92656d79fa414db6b294069c0e9e6df5' 144 | ) 145 | ) 146 | ] 147 | 148 | stub_method_handlers = [( 149 | 'Create', 150 | 'stream_unary', 151 | ( 152 | requests, 153 | responses 154 | ) 155 | )] 156 | 157 | # set handlers 158 | MockClaraPipelineServiceClient.stub_method_handlers = stub_method_handlers 159 | 160 | def_list = [ 161 | pipeline_types.PipelineDefinition(name=pipeline_yaml, content=PIPELINE_TEXT) 162 | ] 163 | 164 | pipeline_id = pipeline_types.PipelineId('92656d79fa414db6b294069c0e9e6df5') 165 | 166 | with MockClaraPipelineServiceClient('localhost:50051') as client: 167 | pipeline_id = client.create_pipeline(definition=def_list, pipeline_id=pipeline_id) 168 | print(pipeline_id) 169 | assert pipeline_id.value == '92656d79fa414db6b294069c0e9e6df5' 170 | 171 | 172 | def test_list_pipeline(): 173 | requests = [ 174 | pipelines_pb2.PipelinesListRequest( 175 | header=BaseClient.get_request_header() 176 | ) 177 | ] 178 | 179 | responses = [ 180 | pipelines_pb2.PipelinesListResponse( 181 | header=common_pb2.ResponseHeader( 182 | code=0, 183 | messages=[]), 184 | details=pipelines_pb2.PipelinesListResponse.PipelineDetails( 185 | name='Pipeline_1', 186 | pipeline_id=common_pb2.Identifier( 187 | value='92656d79fa414db6b294069c0e9e6df5' 188 | ) 189 | ) 190 | ), 191 | pipelines_pb2.PipelinesListResponse( 192 | header=common_pb2.ResponseHeader( 193 | code=0, 194 | messages=[]), 195 | details=pipelines_pb2.PipelinesListResponse.PipelineDetails( 196 | name='Pipeline_2', 197 | pipeline_id=common_pb2.Identifier( 198 | value='21656d79fa414db6b294069c0e9e6r23' 199 | ) 200 | ) 201 | ) 202 | ] 203 | 204 | stub_method_handlers = [( 205 | 'List', 206 | 'unary_stream', 207 | ( 208 | requests, 209 | responses 210 | ) 211 | )] 212 | 213 | # set handlers 214 | MockClaraPipelineServiceClient.stub_method_handlers = stub_method_handlers 215 | 216 | with MockClaraPipelineServiceClient('localhost:50051') as client: 217 | pipeline_list = client.list_pipelines() 218 | 219 | print(pipeline_list) 220 | 221 | assert len(pipeline_list) == 2 222 | assert pipeline_list[0].pipeline_id.value == '92656d79fa414db6b294069c0e9e6df5' 223 | assert pipeline_list[1].pipeline_id.value == '21656d79fa414db6b294069c0e9e6r23' 224 | -------------------------------------------------------------------------------- /nvidia_clara/clara_client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import datetime 16 | from typing import List, Mapping, Iterator 17 | import grpc 18 | from nvidia_clara.grpc import common_pb2, clara_pb2, clara_pb2_grpc 19 | from nvidia_clara.base_client import BaseClient 20 | import nvidia_clara.clara_types as clara_types 21 | import nvidia_clara.job_types as job_types 22 | 23 | 24 | class ClaraClient(BaseClient): 25 | 26 | def __init__(self, target: str, port: str = None, stub=None): 27 | """ 28 | Clara Client Creation 29 | 30 | Args: 31 | target (str): ipv4 address of clara instance 32 | port (str): if specified, port will be appended to the target with a ":" 33 | """ 34 | if target is None: 35 | raise Exception("Target must be initialized to a non-null value") 36 | 37 | self._connection = target 38 | 39 | if port is not None: 40 | self._connection += ":" + port 41 | 42 | self._channel = grpc.insecure_channel(self._connection) 43 | 44 | if stub is None: 45 | self._stub = clara_pb2_grpc.ClaraStub(self._channel) 46 | else: 47 | self._stub = stub 48 | 49 | @staticmethod 50 | def get_timestamp(seconds_since_year_one: str) -> datetime.datetime: 51 | """ 52 | Create datetime.datetime object from a string date 53 | 54 | Args: 55 | seconds_since_year_one(str): date to parse 56 | 57 | Returns: 58 | datetime.datetime object 59 | """ 60 | if (seconds_since_year_one is None) or (seconds_since_year_one == ""): 61 | return None 62 | 63 | try: 64 | # Check to see if in form of seconds since year one 65 | seconds_int = float(seconds_since_year_one.value) - 62167219200 66 | except: 67 | # Otherwise parse timestamp 68 | return datetime.datetime.strptime(seconds_since_year_one, "%Y-%m-%d %H:%M:%SZ") 69 | 70 | if seconds_int < 0: 71 | return None 72 | 73 | result_date = datetime.datetime.fromtimestamp(seconds_int) 74 | 75 | return result_date 76 | 77 | def stop(self, timeout=None): 78 | """Sends stop request to instance of Pipeline Services and Triton""" 79 | 80 | if (self._channel is None) or (self._stub is None): 81 | raise Exception("Connection is currently closed. Please run reconnect() to reopen connection") 82 | 83 | request = clara_pb2.ClaraStopRequest(header=self.get_request_header()) 84 | 85 | response = self._stub.Stop(request, timeout=timeout) 86 | 87 | self.check_response_header(header=response.header) 88 | 89 | def list_utilization(self, timeout=None) -> List[clara_types.ClaraUtilizationDetails]: 90 | """ 91 | Method for aquiring snapshot of GPU utilization information of Clara in a list 92 | 93 | Returns: 94 | List[clara_types.ClaraGpuUtilization] with snapshot of GPU Utilization details for Clara GPUs 95 | """ 96 | 97 | if (self._channel is None) or (self._stub is None): 98 | raise Exception("Connection is currently closed. Please run reconnect() to reopen connection") 99 | 100 | request = clara_pb2.ClaraUtilizationRequest(header=self.get_request_header(), watch=False) 101 | 102 | response = self._stub.Utilization(request, timeout=timeout) 103 | 104 | utilization_list = [] 105 | 106 | header_check = False 107 | 108 | for resp in response: 109 | 110 | if not header_check: 111 | self.check_response_header(header=resp.header) 112 | header_check = True 113 | 114 | metrics = resp.gpu_metrics 115 | clara_utilization_details = clara_types.ClaraUtilizationDetails() 116 | for item in metrics: 117 | gpu_utilization = clara_types.ClaraGpuUtilization( 118 | node_id=item.node_id, 119 | pcie_id=item.pcie_id, 120 | compute_utilization=item.compute_utilization, 121 | memory_free=item.memory_free, 122 | memory_used=item.memory_used, 123 | memory_utilization=item.memory_utilization, 124 | timestamp=self.get_timestamp(item.timestamp), 125 | ) 126 | 127 | for proc_info in item.process_details: 128 | process_details = clara_types.ClaraProcessDetails( 129 | name=proc_info.name, 130 | ) 131 | 132 | if proc_info.job_id.value: 133 | process_details.job_id = job_types.JobId(proc_info.job_id.value) 134 | 135 | gpu_utilization.process_details.append((process_details)) 136 | 137 | clara_utilization_details.gpu_metrics.append((gpu_utilization)) 138 | 139 | utilization_list.append(clara_utilization_details) 140 | 141 | return utilization_list 142 | 143 | def stream_utilization(self, timeout=None) -> Iterator[clara_types.ClaraUtilizationDetails]: 144 | """ 145 | Method for aquiring stream of GPU utilization information of Clara 146 | 147 | Returns: 148 | Iterator[clara_types.ClaraUtilizationDetails] with stream of GPU Utilization details for Clara GPUs 149 | """ 150 | 151 | if (self._channel is None) or (self._stub is None): 152 | raise Exception("Connection is currently closed. Please run reconnect() to reopen connection") 153 | 154 | request = clara_pb2.ClaraUtilizationRequest(header=self.get_request_header(), watch=True) 155 | 156 | response = self._stub.Utilization(request, timeout=timeout) 157 | 158 | header_check = False 159 | 160 | for resp in response: 161 | 162 | if not header_check: 163 | self.check_response_header(header=resp.header) 164 | header_check = True 165 | 166 | metrics = resp.gpu_metrics 167 | clara_utilization_details = clara_types.ClaraUtilizationDetails() 168 | for item in metrics: 169 | gpu_utilization = clara_types.ClaraGpuUtilization( 170 | node_id=item.node_id, 171 | pcie_id=item.pcie_id, 172 | compute_utilization=item.compute_utilization, 173 | memory_free=item.memory_free, 174 | memory_used=item.memory_used, 175 | memory_utilization=item.memory_utilization, 176 | timestamp=self.get_timestamp(item.timestamp), 177 | ) 178 | 179 | for proc_info in item.process_details: 180 | process_details = clara_types.ClaraProcessDetails( 181 | name=proc_info.name, 182 | job_id=job_types.JobId(proc_info.job_id.value) 183 | ) 184 | gpu_utilization.process_details.append((process_details)) 185 | 186 | clara_utilization_details.gpu_metrics.append((gpu_utilization)) 187 | 188 | yield clara_utilization_details 189 | 190 | def version(self, timeout=None): 191 | """Get Clara Version""" 192 | 193 | request = clara_pb2.ClaraVersionRequest(header=self.get_request_header()) 194 | 195 | response = self._stub.Version(request, timeout=timeout) 196 | 197 | self.check_response_header(header=response.header) 198 | 199 | result = clara_types.ClaraVersionInfo( 200 | major=response.version.major, 201 | minor=response.version.minor, 202 | patch=response.version.patch, 203 | label=response.version.label 204 | ) 205 | 206 | return result 207 | -------------------------------------------------------------------------------- /nvidia_clara/clara_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from datetime import datetime 16 | from typing import List 17 | 18 | from nvidia_clara.job_types import JobId 19 | 20 | 21 | class ClaraVersionInfo: 22 | 23 | def __init__(self, major: int = None, minor: int = None, patch: int = None, label: str = None): 24 | """Clara version information.""" 25 | self._major = major 26 | self._minor = minor 27 | self._patch = patch 28 | self._label = label 29 | 30 | @property 31 | def major(self) -> int: 32 | """Version Major""" 33 | return self._major 34 | 35 | @major.setter 36 | def major(self, major: int): 37 | """Version Major""" 38 | self._major = major 39 | 40 | @property 41 | def minor(self) -> int: 42 | """Version Minor""" 43 | return self._minor 44 | 45 | @minor.setter 46 | def minor(self, minor: int): 47 | """Version Minor""" 48 | self._minor = minor 49 | 50 | @property 51 | def patch(self) -> int: 52 | """Version Patch""" 53 | return self._patch 54 | 55 | @patch.setter 56 | def patch(self, patch: int): 57 | """Version Patch""" 58 | self._patch = patch 59 | 60 | @property 61 | def label(self) -> str: 62 | """Version Label""" 63 | return self._label 64 | 65 | @label.setter 66 | def label(self, label: str): 67 | """Version Label""" 68 | self._label = label 69 | 70 | 71 | class ClaraProcessDetails: 72 | def __init__(self, name: str = None, job_id: JobId = None): 73 | self._name = name 74 | self._job_id = job_id 75 | 76 | @property 77 | def name(self) -> str: 78 | """ 79 | Name of the process utilizing the GPU. 80 | - When job_id is provided, is the unique (to the pipeline-job) name of the pipeline-job operator utilizing the GPU. 81 | - When job_id is not provided, is the name of the Clara Platform managed, non-pipeline process utilizing the GPU. 82 | """ 83 | return self._name 84 | 85 | @name.setter 86 | def name(self, name: str): 87 | """ 88 | Name of the process utilizing the GPU. 89 | - When job_id is provided, is the unique (to the pipeline-job) name of the pipeline-job operator utilizing the GPU. 90 | - When job_id is not provided, is the name of the Clara Platform managed, non-pipeline process utilizing the GPU. 91 | """ 92 | self._name = name 93 | 94 | @property 95 | def job_id(self) -> JobId: 96 | """Unique identifier of the pipeline-job utilizing the GPU. 97 | Only provided when the process utilizing the GPU is a pipeline-job. 98 | """ 99 | return self._job_id 100 | 101 | @job_id.setter 102 | def job_id(self, job_id: JobId): 103 | """Unique identifier of the pipeline-job utilizing the GPU. 104 | Only provided when the process utilizing the GPU is a pipeline-job. 105 | """ 106 | self._job_id = job_id 107 | 108 | 109 | class ClaraGpuUtilization: 110 | 111 | def __init__(self, node_id: int = None, pcie_id: int = None, compute_utilization: float = None, 112 | memory_free: int = None, memory_used: int = None, memory_utilization: float = None, 113 | process_details: List[ClaraProcessDetails] = None, timestamp: datetime = None): 114 | """GPU Utilization details for a Clara process.""" 115 | if process_details is None: 116 | process_details = [] 117 | self._node_id = node_id 118 | self._pcie_id = pcie_id 119 | self._compute_utilization = compute_utilization 120 | self._memory_free = memory_free 121 | self._memory_used = memory_used 122 | self._memory_utilization = memory_utilization 123 | self._process_details = process_details 124 | self._timestamp = timestamp 125 | 126 | @property 127 | def node_id(self) -> int: 128 | """Unique (to the cluster) name of the node which contains the GPU.""" 129 | return self._node_id 130 | 131 | @node_id.setter 132 | def node_id(self, node_id: int): 133 | """Unique (to the cluster) name of the node which contains the GPU.""" 134 | self._node_id = node_id 135 | 136 | @property 137 | def pcie_id(self) -> int: 138 | """PCIE device identifier of the GPU.""" 139 | return self._pcie_id 140 | 141 | @pcie_id.setter 142 | def pcie_id(self, pcie_id: int): 143 | """PCIE device identifier of the GPU.""" 144 | self._pcie_id = pcie_id 145 | 146 | @property 147 | def compute_utilization(self) -> float: 148 | """GPU compute utilization; in the range of zero to one, inclusive [0, 1].""" 149 | return self._compute_utilization 150 | 151 | @compute_utilization.setter 152 | def compute_utilization(self, compute_utilization: float): 153 | """GPU compute utilization; in the range of zero to one, inclusive [0, 1].""" 154 | self._compute_utilization = compute_utilization 155 | 156 | @property 157 | def memory_free(self) -> int: 158 | """Free GPU memory, measured in megabytes.""" 159 | return self._memory_free 160 | 161 | @memory_free.setter 162 | def memory_free(self, memory_free: int): 163 | """Free GPU memory, measured in megabytes.""" 164 | self._memory_free = memory_free 165 | 166 | @property 167 | def memory_used(self) -> int: 168 | """Used GPU memory, measured in megabytes.""" 169 | return self._memory_used 170 | 171 | @memory_used.setter 172 | def memory_used(self, memory_used: int): 173 | """Used GPU memory, measured in megabytes.""" 174 | self._memory_used = memory_used 175 | 176 | @property 177 | def memory_utilization(self) -> float: 178 | """GPU memory utilization; in the range of zero to one, inclusive [0, 1].""" 179 | return self._memory_utilization 180 | 181 | @memory_utilization.setter 182 | def memory_utilization(self, memory_utilization: float): 183 | """GPU memory utilization; in the range of zero to one, inclusive [0, 1].""" 184 | self._memory_utilization = memory_utilization 185 | 186 | @property 187 | def process_details(self) -> List[ClaraProcessDetails]: 188 | """List of pipeline-job operators and/or Clara Platform managed process utilizing the GPU.""" 189 | return self._process_details 190 | 191 | @process_details.setter 192 | def process_details(self, process_details: List[ClaraProcessDetails]): 193 | """List of pipeline-job operators and/or Clara Platform managed process utilizing the GPU.""" 194 | self._process_details = process_details 195 | 196 | @property 197 | def timestamp(self) -> datetime: 198 | """Timestamp when the associated metrics data was collected.""" 199 | return self._timestamp 200 | 201 | @timestamp.setter 202 | def timestamp(self, timestamp: datetime): 203 | """Timestamp when the associated metrics data was collected.""" 204 | self._timestamp = timestamp 205 | 206 | 207 | class ClaraUtilizationDetails: 208 | """Utilization details for a Clara process.""" 209 | 210 | def __init__(self, gpu_metrics: List[ClaraGpuUtilization] = None): 211 | if gpu_metrics is None: 212 | gpu_metrics = [] 213 | self._gpu_metrics = gpu_metrics 214 | 215 | @property 216 | def gpu_metrics(self) -> List[ClaraGpuUtilization]: 217 | """List of Utilization Details of each GPU""" 218 | return self._gpu_metrics 219 | 220 | @gpu_metrics.setter 221 | def gpu_metrics(self, gpu_metrics: List[ClaraGpuUtilization]): 222 | """List of Utilization Details of each GPU""" 223 | self._gpu_metrics = gpu_metrics 224 | -------------------------------------------------------------------------------- /tests/test_payloads_client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import os 16 | 17 | import nvidia_clara.grpc.common_pb2 as common_pb2 18 | import nvidia_clara.grpc.payloads_pb2 as payloads_pb2 19 | 20 | from nvidia_clara.base_client import BaseClient 21 | from nvidia_clara.payloads_client import PayloadsClient 22 | import nvidia_clara.payload_types as payload_types 23 | 24 | from tests.test_jobs_client import run_client_test 25 | 26 | 27 | def run_payload_client(stub, method_name, *args, **kwargs): 28 | with PayloadsClient(target='10.0.0.1:50051', stub=stub) as client: 29 | response = getattr(client, method_name)(*args, **kwargs) 30 | return response 31 | 32 | 33 | class MockClaraPayloadServiceClient: 34 | stub_method_handlers = [] 35 | 36 | def __init__(self, channel, stub=None, request_header=None, logger=None): 37 | pass 38 | 39 | def __enter__(self): 40 | return self 41 | 42 | def __exit__(self, exc_type, exc_val, exc_tb): 43 | return False 44 | 45 | def create_payload(self, *args, **kwargs): 46 | return run_client_test( 47 | 'Payloads', 48 | 'create_payload', 49 | run_payload_client, 50 | stub_method_handlers=MockClaraPayloadServiceClient.stub_method_handlers, 51 | *args, **kwargs) 52 | 53 | def download_from(self, *args, **kwargs): 54 | return run_client_test( 55 | 'Payloads', 56 | 'download_from', 57 | run_payload_client, 58 | stub_method_handlers=MockClaraPayloadServiceClient.stub_method_handlers, 59 | *args, **kwargs) 60 | 61 | def upload(self, *args, **kwargs): 62 | return run_client_test( 63 | 'Payloads', 64 | 'upload', 65 | run_payload_client, 66 | stub_method_handlers=MockClaraPayloadServiceClient.stub_method_handlers, 67 | *args, **kwargs) 68 | 69 | def close(self): 70 | pass 71 | 72 | 73 | def test_create_payload(): 74 | requests = [ 75 | payloads_pb2.PayloadsCreateRequest( 76 | header=BaseClient.get_request_header() 77 | ) 78 | ] 79 | 80 | responses = [ 81 | payloads_pb2.PayloadsCreateResponse( 82 | header=common_pb2.ResponseHeader( 83 | code=0, 84 | messages=[]), 85 | payload_id=common_pb2.Identifier( 86 | value='92656d79fa414db6b294069c0e9e6df5' 87 | ), 88 | type=payloads_pb2.PAYLOAD_TYPE_REUSABLE 89 | ) 90 | ] 91 | 92 | stub_method_handlers = [( 93 | 'Create', 94 | 'unary_unary', 95 | ( 96 | requests, 97 | responses 98 | ) 99 | )] 100 | 101 | # set handlers 102 | MockClaraPayloadServiceClient.stub_method_handlers = stub_method_handlers 103 | 104 | with MockClaraPayloadServiceClient('localhost:50051') as client: 105 | payload_details = client.create_payload() 106 | print(payload_details.payload_id) 107 | print(payload_details.payload_type) 108 | assert payload_details.payload_id.value == '92656d79fa414db6b294069c0e9e6df5' 109 | assert payload_details.payload_type == 2 110 | 111 | 112 | MHD_TEXT = '''ObjectType = Image 113 | NDims = 3 114 | BinaryData = True 115 | BinaryDataByteOrderMSB = False 116 | CompressedData = False 117 | TransformMatrix = -1 0 0 0 1 0 0 0 1 118 | Offset = 0 0 0 119 | CenterOfRotation = 0 0 0 120 | AnatomicalOrientation = RAI 121 | ElementSpacing = 0.98 0.98 1.5 122 | DimSize = 460 286 1182 123 | ElementType = MET_SHORT 124 | ElementDataFile = highResCT.raw 125 | ''' 126 | 127 | 128 | def test_download_file(): 129 | fake_payload_id = '7ac5c691e13d4f45894a3a70d9925936' 130 | fake_request_file_name = '/input/highResCT.mhd' 131 | 132 | requests = [ 133 | payloads_pb2.PayloadsDownloadRequest( 134 | header=BaseClient.get_request_header(), 135 | payload_id=common_pb2.Identifier(value=fake_payload_id), 136 | name=fake_request_file_name) 137 | ] 138 | 139 | responses = [ 140 | payloads_pb2.PayloadsDownloadResponse( 141 | header=common_pb2.ResponseHeader( 142 | code=0, 143 | messages=[]), 144 | details=payloads_pb2.PayloadFileDetails(mode=0, name=fake_request_file_name, 145 | size=len(MHD_TEXT)), 146 | data=MHD_TEXT.encode('utf-8') 147 | ) 148 | ] 149 | 150 | stub_method_handlers = [( 151 | 'Download', 152 | 'unary_stream', 153 | ( 154 | requests, 155 | responses 156 | ) 157 | )] 158 | 159 | MockClaraPayloadServiceClient.stub_method_handlers = stub_method_handlers 160 | 161 | with MockClaraPayloadServiceClient('localhost:50051') as client: 162 | if os.path.exists('./highResCT.mhd'): 163 | os.remove('./highResCT.mhd') 164 | 165 | with open('./highResCT.mhd', 'wb+') as wb: 166 | file_details = client.download_from(payload_id=payload_types.PayloadId(fake_payload_id), 167 | blob_name=fake_request_file_name, 168 | dest_obj=wb) 169 | assert file_details.mode == 0 170 | assert file_details.name == fake_request_file_name 171 | assert file_details.size == len(MHD_TEXT) 172 | 173 | data = '' 174 | 175 | with open('./highResCT.mhd', 'r') as file: 176 | data = file.read() 177 | 178 | os.remove('./highResCT.mhd') 179 | 180 | print("Data Returned: ") 181 | print(data) 182 | 183 | assert data == MHD_TEXT 184 | 185 | 186 | def test_upload(tmp_path): 187 | fake_payload_id = '7ac5c691e13d4f45894a3a70d9925936' 188 | fake_file_name = './image.mhd' 189 | fake_response_file_name = './input/image.mhd' 190 | 191 | requests = [ 192 | payloads_pb2.PayloadsUploadRequest( 193 | header=BaseClient.get_request_header(), 194 | payload_id=common_pb2.Identifier(value=fake_payload_id), 195 | details=payloads_pb2.PayloadFileDetails(mode=0, name=fake_response_file_name, size=len(MHD_TEXT)), 196 | data=MHD_TEXT.encode('utf-8') 197 | ) 198 | ] 199 | responses = [ 200 | payloads_pb2.PayloadsUploadResponse( 201 | header=common_pb2.ResponseHeader( 202 | code=0, 203 | messages=[]), 204 | details=payloads_pb2.PayloadFileDetails(mode=0, name=fake_response_file_name, 205 | size=len(MHD_TEXT)) 206 | ) 207 | ] 208 | 209 | stub_method_handlers = [( 210 | 'Upload', 211 | 'stream_unary', 212 | ( 213 | requests, 214 | responses 215 | ) 216 | )] 217 | 218 | MockClaraPayloadServiceClient.stub_method_handlers = stub_method_handlers 219 | 220 | with MockClaraPayloadServiceClient('localhost:50051') as client: 221 | if os.path.exists(fake_file_name): 222 | os.remove(fake_file_name) 223 | 224 | with open(fake_file_name, 'w') as wb: 225 | wb.write(MHD_TEXT) 226 | 227 | file_details = None 228 | 229 | with open(fake_file_name, 'rb+') as fp: 230 | file_details = client.upload(payload_id=payload_types.PayloadId(fake_payload_id), 231 | blob_name=fake_response_file_name, file_object=fp) 232 | 233 | os.remove(fake_file_name) 234 | 235 | print(file_details.mode, file_details.name, file_details.size) 236 | 237 | assert file_details.mode == 0 238 | assert file_details.name == fake_response_file_name 239 | assert file_details.size == len(MHD_TEXT) 240 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/pipelines_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/clara.proto 18 | 19 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 20 | import grpc 21 | 22 | from nvidia_clara.grpc import pipelines_pb2 as nvidia_dot_clara_dot_platform_dot_pipelines__pb2 23 | 24 | 25 | class PipelinesStub(object): 26 | # missing associated documentation comment in .proto file 27 | pass 28 | 29 | def __init__(self, channel): 30 | """Constructor. 31 | 32 | Args: 33 | channel: A grpc.Channel. 34 | """ 35 | self.AddMetadata = channel.unary_unary( 36 | '/nvidia.clara.platform.Pipelines/AddMetadata', 37 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesAddMetadataRequest.SerializeToString, 38 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesAddMetadataResponse.FromString, 39 | ) 40 | self.Create = channel.stream_unary( 41 | '/nvidia.clara.platform.Pipelines/Create', 42 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesCreateRequest.SerializeToString, 43 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesCreateResponse.FromString, 44 | ) 45 | self.Details = channel.unary_stream( 46 | '/nvidia.clara.platform.Pipelines/Details', 47 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesDetailsRequest.SerializeToString, 48 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesDetailsResponse.FromString, 49 | ) 50 | self.List = channel.unary_stream( 51 | '/nvidia.clara.platform.Pipelines/List', 52 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesListRequest.SerializeToString, 53 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesListResponse.FromString, 54 | ) 55 | self.Remove = channel.unary_unary( 56 | '/nvidia.clara.platform.Pipelines/Remove', 57 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveRequest.SerializeToString, 58 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveResponse.FromString, 59 | ) 60 | self.RemoveMetadata = channel.unary_unary( 61 | '/nvidia.clara.platform.Pipelines/RemoveMetadata', 62 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveMetadataRequest.SerializeToString, 63 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveMetadataResponse.FromString, 64 | ) 65 | self.Update = channel.stream_unary( 66 | '/nvidia.clara.platform.Pipelines/Update', 67 | request_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesUpdateRequest.SerializeToString, 68 | response_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesUpdateResponse.FromString, 69 | ) 70 | 71 | 72 | class PipelinesServicer(object): 73 | # missing associated documentation comment in .proto file 74 | pass 75 | 76 | def AddMetadata(self, request, context): 77 | """Requests the addition of metadata to a pipeline. 78 | """ 79 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 80 | context.set_details('Method not implemented!') 81 | raise NotImplementedError('Method not implemented!') 82 | 83 | def Create(self, request_iterator, context): 84 | """Requests the creation of a new pipeline. 85 | """ 86 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 87 | context.set_details('Method not implemented!') 88 | raise NotImplementedError('Method not implemented!') 89 | 90 | def Details(self, request, context): 91 | """Requests details of a pipeline. 92 | """ 93 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 94 | context.set_details('Method not implemented!') 95 | raise NotImplementedError('Method not implemented!') 96 | 97 | def List(self, request, context): 98 | """Requests a listing of all pipelines known by the service. 99 | """ 100 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 101 | context.set_details('Method not implemented!') 102 | raise NotImplementedError('Method not implemented!') 103 | 104 | def Remove(self, request, context): 105 | """Requests the removal of a pipeline definition from the service. 106 | """ 107 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 108 | context.set_details('Method not implemented!') 109 | raise NotImplementedError('Method not implemented!') 110 | 111 | def RemoveMetadata(self, request, context): 112 | """Requests the removal of specified metadata of a pipeline. 113 | """ 114 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 115 | context.set_details('Method not implemented!') 116 | raise NotImplementedError('Method not implemented!') 117 | 118 | def Update(self, request_iterator, context): 119 | """Requests an update to a known pipeline definition. 120 | """ 121 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 122 | context.set_details('Method not implemented!') 123 | raise NotImplementedError('Method not implemented!') 124 | 125 | 126 | def add_PipelinesServicer_to_server(servicer, server): 127 | rpc_method_handlers = { 128 | 'AddMetadata': grpc.unary_unary_rpc_method_handler( 129 | servicer.AddMetadata, 130 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesAddMetadataRequest.FromString, 131 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesAddMetadataResponse.SerializeToString, 132 | ), 133 | 'Create': grpc.stream_unary_rpc_method_handler( 134 | servicer.Create, 135 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesCreateRequest.FromString, 136 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesCreateResponse.SerializeToString, 137 | ), 138 | 'Details': grpc.unary_stream_rpc_method_handler( 139 | servicer.Details, 140 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesDetailsRequest.FromString, 141 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesDetailsResponse.SerializeToString, 142 | ), 143 | 'List': grpc.unary_stream_rpc_method_handler( 144 | servicer.List, 145 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesListRequest.FromString, 146 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesListResponse.SerializeToString, 147 | ), 148 | 'Remove': grpc.unary_unary_rpc_method_handler( 149 | servicer.Remove, 150 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveRequest.FromString, 151 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveResponse.SerializeToString, 152 | ), 153 | 'RemoveMetadata': grpc.unary_unary_rpc_method_handler( 154 | servicer.RemoveMetadata, 155 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveMetadataRequest.FromString, 156 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesRemoveMetadataResponse.SerializeToString, 157 | ), 158 | 'Update': grpc.stream_unary_rpc_method_handler( 159 | servicer.Update, 160 | request_deserializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesUpdateRequest.FromString, 161 | response_serializer=nvidia_dot_clara_dot_platform_dot_pipelines__pb2.PipelinesUpdateResponse.SerializeToString, 162 | ), 163 | } 164 | generic_handler = grpc.method_handlers_generic_handler( 165 | 'nvidia.clara.platform.Pipelines', rpc_method_handlers) 166 | server.add_generic_rpc_handlers((generic_handler,)) 167 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/jobs_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/clara.proto 18 | 19 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 20 | import grpc 21 | 22 | from nvidia_clara.grpc import jobs_pb2 as nvidia_dot_clara_dot_platform_dot_jobs__pb2 23 | 24 | 25 | class JobsStub(object): 26 | # missing associated documentation comment in .proto file 27 | pass 28 | 29 | def __init__(self, channel): 30 | """Constructor. 31 | 32 | Args: 33 | channel: A grpc.Channel. 34 | """ 35 | self.AddMetadata = channel.unary_unary( 36 | '/nvidia.clara.platform.Jobs/AddMetadata', 37 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsAddMetadataRequest.SerializeToString, 38 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsAddMetadataResponse.FromString, 39 | ) 40 | self.Cancel = channel.unary_unary( 41 | '/nvidia.clara.platform.Jobs/Cancel', 42 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCancelRequest.SerializeToString, 43 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCancelResponse.FromString, 44 | ) 45 | self.Create = channel.unary_unary( 46 | '/nvidia.clara.platform.Jobs/Create', 47 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCreateRequest.SerializeToString, 48 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCreateResponse.FromString, 49 | ) 50 | self.List = channel.unary_stream( 51 | '/nvidia.clara.platform.Jobs/List', 52 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsListRequest.SerializeToString, 53 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsListResponse.FromString, 54 | ) 55 | self.ReadLogs = channel.unary_stream( 56 | '/nvidia.clara.platform.Jobs/ReadLogs', 57 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsReadLogsRequest.SerializeToString, 58 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsReadLogsResponse.FromString, 59 | ) 60 | self.RemoveMetadata = channel.unary_unary( 61 | '/nvidia.clara.platform.Jobs/RemoveMetadata', 62 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsRemoveMetadataRequest.SerializeToString, 63 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsRemoveMetadataResponse.FromString, 64 | ) 65 | self.Start = channel.unary_unary( 66 | '/nvidia.clara.platform.Jobs/Start', 67 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStartRequest.SerializeToString, 68 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStartResponse.FromString, 69 | ) 70 | self.Status = channel.unary_unary( 71 | '/nvidia.clara.platform.Jobs/Status', 72 | request_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStatusRequest.SerializeToString, 73 | response_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStatusResponse.FromString, 74 | ) 75 | 76 | 77 | class JobsServicer(object): 78 | # missing associated documentation comment in .proto file 79 | pass 80 | 81 | def AddMetadata(self, request, context): 82 | """Requests the addition of metadata to a job. 83 | """ 84 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 85 | context.set_details('Method not implemented!') 86 | raise NotImplementedError('Method not implemented!') 87 | 88 | def Cancel(self, request, context): 89 | """Request cancellation of a running job. 90 | """ 91 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 92 | context.set_details('Method not implemented!') 93 | raise NotImplementedError('Method not implemented!') 94 | 95 | def Create(self, request, context): 96 | """Requests creation of a new job based on a known pipeline. 97 | """ 98 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 99 | context.set_details('Method not implemented!') 100 | raise NotImplementedError('Method not implemented!') 101 | 102 | def List(self, request, context): 103 | """Requests a filtered list of all known jobs, or a list of all running jobs if no filter is provided. 104 | """ 105 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 106 | context.set_details('Method not implemented!') 107 | raise NotImplementedError('Method not implemented!') 108 | 109 | def ReadLogs(self, request, context): 110 | """Requests the download of logs for an operator of a job. 111 | """ 112 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 113 | context.set_details('Method not implemented!') 114 | raise NotImplementedError('Method not implemented!') 115 | 116 | def RemoveMetadata(self, request, context): 117 | """Requests the removal of metadata from a job. 118 | """ 119 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 120 | context.set_details('Method not implemented!') 121 | raise NotImplementedError('Method not implemented!') 122 | 123 | def Start(self, request, context): 124 | """Request starting of a job created by the Create RPC. 125 | """ 126 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 127 | context.set_details('Method not implemented!') 128 | raise NotImplementedError('Method not implemented!') 129 | 130 | def Status(self, request, context): 131 | """Requests the status of a known job. 132 | """ 133 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 134 | context.set_details('Method not implemented!') 135 | raise NotImplementedError('Method not implemented!') 136 | 137 | 138 | def add_JobsServicer_to_server(servicer, server): 139 | rpc_method_handlers = { 140 | 'AddMetadata': grpc.unary_unary_rpc_method_handler( 141 | servicer.AddMetadata, 142 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsAddMetadataRequest.FromString, 143 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsAddMetadataResponse.SerializeToString, 144 | ), 145 | 'Cancel': grpc.unary_unary_rpc_method_handler( 146 | servicer.Cancel, 147 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCancelRequest.FromString, 148 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCancelResponse.SerializeToString, 149 | ), 150 | 'Create': grpc.unary_unary_rpc_method_handler( 151 | servicer.Create, 152 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCreateRequest.FromString, 153 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsCreateResponse.SerializeToString, 154 | ), 155 | 'List': grpc.unary_stream_rpc_method_handler( 156 | servicer.List, 157 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsListRequest.FromString, 158 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsListResponse.SerializeToString, 159 | ), 160 | 'ReadLogs': grpc.unary_stream_rpc_method_handler( 161 | servicer.ReadLogs, 162 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsReadLogsRequest.FromString, 163 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsReadLogsResponse.SerializeToString, 164 | ), 165 | 'RemoveMetadata': grpc.unary_unary_rpc_method_handler( 166 | servicer.RemoveMetadata, 167 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsRemoveMetadataRequest.FromString, 168 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsRemoveMetadataResponse.SerializeToString, 169 | ), 170 | 'Start': grpc.unary_unary_rpc_method_handler( 171 | servicer.Start, 172 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStartRequest.FromString, 173 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStartResponse.SerializeToString, 174 | ), 175 | 'Status': grpc.unary_unary_rpc_method_handler( 176 | servicer.Status, 177 | request_deserializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStatusRequest.FromString, 178 | response_serializer=nvidia_dot_clara_dot_platform_dot_jobs__pb2.JobsStatusResponse.SerializeToString, 179 | ), 180 | } 181 | generic_handler = grpc.method_handlers_generic_handler( 182 | 'nvidia.clara.platform.Jobs', rpc_method_handlers) 183 | server.add_generic_rpc_handlers((generic_handler,)) 184 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/payloads_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/clara.proto 18 | 19 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 20 | import grpc 21 | 22 | from nvidia_clara.grpc import payloads_pb2 as nvidia_dot_clara_dot_platform_dot_payloads__pb2 23 | 24 | 25 | class PayloadsStub(object): 26 | # missing associated documentation comment in .proto file 27 | pass 28 | 29 | def __init__(self, channel): 30 | """Constructor. 31 | 32 | Args: 33 | channel: A grpc.Channel. 34 | """ 35 | self.AddMetadata = channel.unary_unary( 36 | '/nvidia.clara.platform.Payloads/AddMetadata', 37 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsAddMetadataRequest.SerializeToString, 38 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsAddMetadataResponse.FromString, 39 | ) 40 | self.Create = channel.unary_unary( 41 | '/nvidia.clara.platform.Payloads/Create', 42 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsCreateRequest.SerializeToString, 43 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsCreateResponse.FromString, 44 | ) 45 | self.Delete = channel.unary_unary( 46 | '/nvidia.clara.platform.Payloads/Delete', 47 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDeleteRequest.SerializeToString, 48 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDeleteResponse.FromString, 49 | ) 50 | self.Details = channel.unary_stream( 51 | '/nvidia.clara.platform.Payloads/Details', 52 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDetailsRequest.SerializeToString, 53 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDetailsResponse.FromString, 54 | ) 55 | self.Download = channel.unary_stream( 56 | '/nvidia.clara.platform.Payloads/Download', 57 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDownloadRequest.SerializeToString, 58 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDownloadResponse.FromString, 59 | ) 60 | self.Remove = channel.unary_unary( 61 | '/nvidia.clara.platform.Payloads/Remove', 62 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveRequest.SerializeToString, 63 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveResponse.FromString, 64 | ) 65 | self.RemoveMetadata = channel.unary_unary( 66 | '/nvidia.clara.platform.Payloads/RemoveMetadata', 67 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveMetadataRequest.SerializeToString, 68 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveMetadataResponse.FromString, 69 | ) 70 | self.Upload = channel.stream_unary( 71 | '/nvidia.clara.platform.Payloads/Upload', 72 | request_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsUploadRequest.SerializeToString, 73 | response_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsUploadResponse.FromString, 74 | ) 75 | 76 | 77 | class PayloadsServicer(object): 78 | # missing associated documentation comment in .proto file 79 | pass 80 | 81 | def AddMetadata(self, request, context): 82 | """Requests the addition of metadata to a payload. 83 | """ 84 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 85 | context.set_details('Method not implemented!') 86 | raise NotImplementedError('Method not implemented!') 87 | 88 | def Create(self, request, context): 89 | """Requests the creation of a new payload. 90 | """ 91 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 92 | context.set_details('Method not implemented!') 93 | raise NotImplementedError('Method not implemented!') 94 | 95 | def Delete(self, request, context): 96 | """Requests the deletion of a known payload. 97 | """ 98 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 99 | context.set_details('Method not implemented!') 100 | raise NotImplementedError('Method not implemented!') 101 | 102 | def Details(self, request, context): 103 | """Requests the details (file listing) of a known payload. 104 | """ 105 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 106 | context.set_details('Method not implemented!') 107 | raise NotImplementedError('Method not implemented!') 108 | 109 | def Download(self, request, context): 110 | """Requests the download of a blob (file) from a known payload. 111 | """ 112 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 113 | context.set_details('Method not implemented!') 114 | raise NotImplementedError('Method not implemented!') 115 | 116 | def Remove(self, request, context): 117 | """Requests the removal, or deletion, of a blob from a known payload. 118 | """ 119 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 120 | context.set_details('Method not implemented!') 121 | raise NotImplementedError('Method not implemented!') 122 | 123 | def RemoveMetadata(self, request, context): 124 | """Requests the removal of metadata from a payload. 125 | """ 126 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 127 | context.set_details('Method not implemented!') 128 | raise NotImplementedError('Method not implemented!') 129 | 130 | def Upload(self, request_iterator, context): 131 | """Requests the upload of a blob (file) to a known payload. 132 | When payload type is PAYLOAD_TYPE_PIPELINE, uploads are written to the ~/input/ folder of the payload. 133 | """ 134 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 135 | context.set_details('Method not implemented!') 136 | raise NotImplementedError('Method not implemented!') 137 | 138 | 139 | def add_PayloadsServicer_to_server(servicer, server): 140 | rpc_method_handlers = { 141 | 'AddMetadata': grpc.unary_unary_rpc_method_handler( 142 | servicer.AddMetadata, 143 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsAddMetadataRequest.FromString, 144 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsAddMetadataResponse.SerializeToString, 145 | ), 146 | 'Create': grpc.unary_unary_rpc_method_handler( 147 | servicer.Create, 148 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsCreateRequest.FromString, 149 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsCreateResponse.SerializeToString, 150 | ), 151 | 'Delete': grpc.unary_unary_rpc_method_handler( 152 | servicer.Delete, 153 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDeleteRequest.FromString, 154 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDeleteResponse.SerializeToString, 155 | ), 156 | 'Details': grpc.unary_stream_rpc_method_handler( 157 | servicer.Details, 158 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDetailsRequest.FromString, 159 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDetailsResponse.SerializeToString, 160 | ), 161 | 'Download': grpc.unary_stream_rpc_method_handler( 162 | servicer.Download, 163 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDownloadRequest.FromString, 164 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsDownloadResponse.SerializeToString, 165 | ), 166 | 'Remove': grpc.unary_unary_rpc_method_handler( 167 | servicer.Remove, 168 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveRequest.FromString, 169 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveResponse.SerializeToString, 170 | ), 171 | 'RemoveMetadata': grpc.unary_unary_rpc_method_handler( 172 | servicer.RemoveMetadata, 173 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveMetadataRequest.FromString, 174 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsRemoveMetadataResponse.SerializeToString, 175 | ), 176 | 'Upload': grpc.stream_unary_rpc_method_handler( 177 | servicer.Upload, 178 | request_deserializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsUploadRequest.FromString, 179 | response_serializer=nvidia_dot_clara_dot_platform_dot_payloads__pb2.PayloadsUploadResponse.SerializeToString, 180 | ), 181 | } 182 | generic_handler = grpc.method_handlers_generic_handler( 183 | 'nvidia.clara.platform.Payloads', rpc_method_handlers) 184 | server.add_generic_rpc_handlers((generic_handler,)) 185 | -------------------------------------------------------------------------------- /nvidia_clara/model_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from enum import Enum 16 | from typing import Mapping as HashMap, List, Mapping 17 | from nvidia_clara.grpc import models_pb2, common_pb2 18 | 19 | 20 | class CatalogId: 21 | 22 | def __init__(self, value: str): 23 | """ 24 | Unique identifier for an Inference Model Catalog. 25 | """ 26 | 27 | if (value is None) or (value == ""): 28 | raise Exception("Catalog identifier value must be intialized.") 29 | 30 | self._value = value 31 | 32 | @property 33 | def value(self): 34 | return self._value 35 | 36 | def __eq__(self, other) -> bool: 37 | return self._value == other.value 38 | 39 | def __ne__(self, other) -> bool: 40 | return not (self == other) 41 | 42 | def __repr__(self): 43 | return "%s" % self._value 44 | 45 | def __str__(self): 46 | return "%s" % self._value 47 | 48 | def __hash__(self): 49 | return hash(self._value) 50 | 51 | def to_grpc_value(self): 52 | id = common_pb2.Identifier() 53 | id.value = self._value 54 | return id 55 | 56 | 57 | class ModelType(Enum): 58 | Unknown = 0 59 | 60 | TensorFlow = 1 61 | 62 | TensorRT = 2 63 | 64 | PyTorch = 3 65 | 66 | Minimum = TensorFlow 67 | 68 | Maximum = PyTorch 69 | 70 | 71 | class ModelId: 72 | def __init__(self, value: str): 73 | if (value is None) or (value == ""): 74 | raise Exception("Model identifier value must be intialized.") 75 | 76 | self._value = value 77 | 78 | @property 79 | def value(self): 80 | return self._value 81 | 82 | def __eq__(self, other) -> bool: 83 | return self._value == other.value 84 | 85 | def __ne__(self, other) -> bool: 86 | return not (self == other) 87 | 88 | def __repr__(self): 89 | return "%s" % self._value 90 | 91 | def __str__(self): 92 | return "%s" % self._value 93 | 94 | def __hash__(self): 95 | return hash(self._value) 96 | 97 | def to_grpc_value(self): 98 | id = common_pb2.Identifier() 99 | id.value = self._value 100 | return id 101 | 102 | 103 | class ModelDetails: 104 | 105 | def __init__(self, other: models_pb2.ModelDetails = None, model_id: ModelId = None, name: str = None, 106 | tags: Mapping[str, str] = None, 107 | model_type: ModelType = None, metadata: Mapping[str, str] = None): 108 | if other is None: 109 | if tags is None: 110 | tags = dict() 111 | if metadata is None: 112 | metadata = dict() 113 | self._model_id = model_id 114 | self._name = name 115 | self._tags = tags 116 | self._model_type = model_type 117 | self._metadata = metadata 118 | else: 119 | self._model_id = other.model_id 120 | self._name = other.name 121 | self._tags = other.tags 122 | self._model_type = other.type 123 | self._metadata = other.metadata 124 | 125 | @property 126 | def model_id(self) -> ModelId: 127 | """Unique identifier of this inference model.""" 128 | return self._model_id 129 | 130 | @model_id.setter 131 | def model_id(self, model_id: ModelId = None): 132 | """Unique identifier of this inference model.""" 133 | self._model_id = model_id 134 | 135 | @property 136 | def name(self) -> str: 137 | """The name of this inference model.""" 138 | return self._name 139 | 140 | @name.setter 141 | def name(self, name: str = None): 142 | """The name of this inference model.""" 143 | self._name = name 144 | 145 | @property 146 | def tags(self) -> Mapping[str, str]: 147 | """The set of tags / meta-data associated with this infrence model.""" 148 | return self._tags 149 | 150 | @tags.setter 151 | def tags(self, tags: Mapping[str, str] = None): 152 | """The set of tags / meta-data associated with this infrence model.""" 153 | self._tags = tags 154 | 155 | @property 156 | def model_type(self) -> ModelId: 157 | """The type (inference toolset) of this inference model.""" 158 | return self._model_type 159 | 160 | @model_type.setter 161 | def model_type(self, model_type: ModelType = None): 162 | """The type (inference toolset) of this inference model.""" 163 | self._model_type = model_type 164 | 165 | @property 166 | def metadata(self) -> Mapping[str, str]: 167 | """ 168 | Metadata (set of key/value pairs) associated with the model 169 | """ 170 | return self._metadata 171 | 172 | @metadata.setter 173 | def metadata(self, metadata: Mapping[str, str]): 174 | """ 175 | Metadata (set of key/value pairs) associated with the model 176 | """ 177 | self._metadata = metadata 178 | 179 | 180 | class CatalogDetails: 181 | 182 | def __init__(self, other: models_pb2.ModelCatalogDetails = None, catalog_id: CatalogId = None, 183 | models: List[ModelDetails] = None): 184 | if other is None: 185 | if catalog_id is None: 186 | raise Exception("Catalog identifier can not be None and must be initializes") 187 | 188 | self._catalog_id = catalog_id 189 | 190 | if models is None: 191 | self._models = [] 192 | else: 193 | self._models = models 194 | else: 195 | self._catalog_id = None 196 | 197 | if (other.catalog_id.value is not None) or (other.catalog_id.value != ""): 198 | self._catalog_id = CatalogId(value=other.catalog_id.value) 199 | 200 | self._models = [] 201 | 202 | if len(other.models) > 0: 203 | 204 | for model in other.models: 205 | new_model = ModelDetails(other=model) 206 | self._models.append(new_model) 207 | 208 | @property 209 | def catalog_id(self) -> CatalogId: 210 | """Unique identifier of this inference model catalog.""" 211 | return self._catalog_id 212 | 213 | @catalog_id.setter 214 | def catalog_id(self, catalog_id: CatalogId = None): 215 | """Unique identifier of this inference model catalog.""" 216 | self._catalog_id = catalog_id 217 | 218 | @property 219 | def models(self) -> List[ModelDetails]: 220 | """List of inference models associated with this inference model catalog.""" 221 | return self._models 222 | 223 | @models.setter 224 | def models(self, models: List[ModelDetails] = None): 225 | """List of inference models associated with this inference model catalog.""" 226 | self._models = models 227 | 228 | 229 | class InstanceId: 230 | def __init__(self, value: str): 231 | """Unique identifier for an Model Catalog Instance.""" 232 | if (value is None) or (value == ""): 233 | raise Exception("InstanceId identifier value must be intialized.") 234 | 235 | self._value = value 236 | 237 | @property 238 | def value(self): 239 | return self._value 240 | 241 | def __eq__(self, other) -> bool: 242 | return self._value == other.value 243 | 244 | def __ne__(self, other) -> bool: 245 | return not (self == other) 246 | 247 | def __repr__(self): 248 | return "%s" % self._value 249 | 250 | def __str__(self): 251 | return "%s" % self._value 252 | 253 | def __hash__(self): 254 | return hash(self._value) 255 | 256 | def to_grpc_value(self): 257 | id = common_pb2.Identifier() 258 | id.value = self._value 259 | return id 260 | 261 | 262 | class InstanceDetails: 263 | def __init__(self, other: models_pb2.ModelCatalogDetails = None, instance_id: InstanceId = None, 264 | models: List[ModelDetails] = None): 265 | if other is None: 266 | if instance_id is None: 267 | raise Exception("Instance identifier can not be None and must be initializes") 268 | 269 | self._instance_id = instance_id 270 | 271 | if models is None: 272 | self._models = [] 273 | else: 274 | self._models = models 275 | else: 276 | self._instance_id = None 277 | 278 | if (other.catalog_id.value is not None) or (other.catalog_id.value != ""): 279 | self._instance_id = InstanceId(value=other.catalog_id.value) 280 | 281 | self._models = [] 282 | 283 | if len(other.models) > 0: 284 | 285 | for model in other.models: 286 | new_model = ModelDetails(other=model) 287 | self._models.append(new_model) 288 | 289 | @property 290 | def instance_id(self) -> InstanceId: 291 | """Unqiue identifier of this inference model catalog instance.""" 292 | return self._instance_id 293 | 294 | @instance_id.setter 295 | def instance_id(self, instance_id: InstanceId = None): 296 | """Unqiue identifier of this inference model catalog instance.""" 297 | self._instance_id = instance_id 298 | 299 | @property 300 | def models(self) -> List[ModelDetails]: 301 | """List of inference models associated with this inference model catalog instance.""" 302 | return self._models 303 | 304 | @models.setter 305 | def models(self, models: List[ModelDetails] = None): 306 | """List of inference models associated with this inference model catalog instance.""" 307 | self._models = models 308 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/common_pb2.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/common.proto 18 | 19 | import sys 20 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 21 | from google.protobuf import descriptor as _descriptor 22 | from google.protobuf import message as _message 23 | from google.protobuf import reflection as _reflection 24 | from google.protobuf import symbol_database as _symbol_database 25 | # @@protoc_insertion_point(imports) 26 | 27 | _sym_db = _symbol_database.Default() 28 | 29 | 30 | 31 | 32 | DESCRIPTOR = _descriptor.FileDescriptor( 33 | name='nvidia/clara/platform/common.proto', 34 | package='nvidia.clara.platform', 35 | syntax='proto3', 36 | serialized_options=_b('\n\031com.nvidia.clara.platformZ\004apis\252\002\032Nvidia.Clara.Platform.Grpc'), 37 | serialized_pb=_b('\n\"nvidia/clara/platform/common.proto\x12\x15nvidia.clara.platform\"\x1b\n\nIdentifier\x12\r\n\x05value\x18\x01 \x01(\t\"E\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\r\n\x05label\x18\x04 \x01(\t\"X\n\rRequestHeader\x12\x33\n\x0b\x61pi_version\x18\x01 \x01(\x0b\x32\x1e.nvidia.clara.platform.Version\x12\x12\n\nuser_agent\x18\x02 \x01(\t\"0\n\x0eResponseHeader\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x11\x12\x10\n\x08messages\x18\x02 \x03(\t\"\x1a\n\tTimestamp\x12\r\n\x05value\x18\x01 \x01(\x12\x42>\n\x19\x63om.nvidia.clara.platformZ\x04\x61pis\xaa\x02\x1aNvidia.Clara.Platform.Grpcb\x06proto3') 38 | ) 39 | 40 | 41 | 42 | 43 | _IDENTIFIER = _descriptor.Descriptor( 44 | name='Identifier', 45 | full_name='nvidia.clara.platform.Identifier', 46 | filename=None, 47 | file=DESCRIPTOR, 48 | containing_type=None, 49 | fields=[ 50 | _descriptor.FieldDescriptor( 51 | name='value', full_name='nvidia.clara.platform.Identifier.value', index=0, 52 | number=1, type=9, cpp_type=9, label=1, 53 | has_default_value=False, default_value=_b("").decode('utf-8'), 54 | message_type=None, enum_type=None, containing_type=None, 55 | is_extension=False, extension_scope=None, 56 | serialized_options=None, file=DESCRIPTOR), 57 | ], 58 | extensions=[ 59 | ], 60 | nested_types=[], 61 | enum_types=[ 62 | ], 63 | serialized_options=None, 64 | is_extendable=False, 65 | syntax='proto3', 66 | extension_ranges=[], 67 | oneofs=[ 68 | ], 69 | serialized_start=61, 70 | serialized_end=88, 71 | ) 72 | 73 | 74 | _VERSION = _descriptor.Descriptor( 75 | name='Version', 76 | full_name='nvidia.clara.platform.Version', 77 | filename=None, 78 | file=DESCRIPTOR, 79 | containing_type=None, 80 | fields=[ 81 | _descriptor.FieldDescriptor( 82 | name='major', full_name='nvidia.clara.platform.Version.major', index=0, 83 | number=1, type=5, cpp_type=1, label=1, 84 | has_default_value=False, default_value=0, 85 | message_type=None, enum_type=None, containing_type=None, 86 | is_extension=False, extension_scope=None, 87 | serialized_options=None, file=DESCRIPTOR), 88 | _descriptor.FieldDescriptor( 89 | name='minor', full_name='nvidia.clara.platform.Version.minor', index=1, 90 | number=2, type=5, cpp_type=1, label=1, 91 | has_default_value=False, default_value=0, 92 | message_type=None, enum_type=None, containing_type=None, 93 | is_extension=False, extension_scope=None, 94 | serialized_options=None, file=DESCRIPTOR), 95 | _descriptor.FieldDescriptor( 96 | name='patch', full_name='nvidia.clara.platform.Version.patch', index=2, 97 | number=3, type=5, cpp_type=1, label=1, 98 | has_default_value=False, default_value=0, 99 | message_type=None, enum_type=None, containing_type=None, 100 | is_extension=False, extension_scope=None, 101 | serialized_options=None, file=DESCRIPTOR), 102 | _descriptor.FieldDescriptor( 103 | name='label', full_name='nvidia.clara.platform.Version.label', index=3, 104 | number=4, type=9, cpp_type=9, label=1, 105 | has_default_value=False, default_value=_b("").decode('utf-8'), 106 | message_type=None, enum_type=None, containing_type=None, 107 | is_extension=False, extension_scope=None, 108 | serialized_options=None, file=DESCRIPTOR), 109 | ], 110 | extensions=[ 111 | ], 112 | nested_types=[], 113 | enum_types=[ 114 | ], 115 | serialized_options=None, 116 | is_extendable=False, 117 | syntax='proto3', 118 | extension_ranges=[], 119 | oneofs=[ 120 | ], 121 | serialized_start=90, 122 | serialized_end=159, 123 | ) 124 | 125 | 126 | _REQUESTHEADER = _descriptor.Descriptor( 127 | name='RequestHeader', 128 | full_name='nvidia.clara.platform.RequestHeader', 129 | filename=None, 130 | file=DESCRIPTOR, 131 | containing_type=None, 132 | fields=[ 133 | _descriptor.FieldDescriptor( 134 | name='api_version', full_name='nvidia.clara.platform.RequestHeader.api_version', index=0, 135 | number=1, type=11, cpp_type=10, label=1, 136 | has_default_value=False, default_value=None, 137 | message_type=None, enum_type=None, containing_type=None, 138 | is_extension=False, extension_scope=None, 139 | serialized_options=None, file=DESCRIPTOR), 140 | _descriptor.FieldDescriptor( 141 | name='user_agent', full_name='nvidia.clara.platform.RequestHeader.user_agent', index=1, 142 | number=2, type=9, cpp_type=9, label=1, 143 | has_default_value=False, default_value=_b("").decode('utf-8'), 144 | message_type=None, enum_type=None, containing_type=None, 145 | is_extension=False, extension_scope=None, 146 | serialized_options=None, file=DESCRIPTOR), 147 | ], 148 | extensions=[ 149 | ], 150 | nested_types=[], 151 | enum_types=[ 152 | ], 153 | serialized_options=None, 154 | is_extendable=False, 155 | syntax='proto3', 156 | extension_ranges=[], 157 | oneofs=[ 158 | ], 159 | serialized_start=161, 160 | serialized_end=249, 161 | ) 162 | 163 | 164 | _RESPONSEHEADER = _descriptor.Descriptor( 165 | name='ResponseHeader', 166 | full_name='nvidia.clara.platform.ResponseHeader', 167 | filename=None, 168 | file=DESCRIPTOR, 169 | containing_type=None, 170 | fields=[ 171 | _descriptor.FieldDescriptor( 172 | name='code', full_name='nvidia.clara.platform.ResponseHeader.code', index=0, 173 | number=1, type=17, cpp_type=1, label=1, 174 | has_default_value=False, default_value=0, 175 | message_type=None, enum_type=None, containing_type=None, 176 | is_extension=False, extension_scope=None, 177 | serialized_options=None, file=DESCRIPTOR), 178 | _descriptor.FieldDescriptor( 179 | name='messages', full_name='nvidia.clara.platform.ResponseHeader.messages', index=1, 180 | number=2, type=9, cpp_type=9, label=3, 181 | has_default_value=False, default_value=[], 182 | message_type=None, enum_type=None, containing_type=None, 183 | is_extension=False, extension_scope=None, 184 | serialized_options=None, file=DESCRIPTOR), 185 | ], 186 | extensions=[ 187 | ], 188 | nested_types=[], 189 | enum_types=[ 190 | ], 191 | serialized_options=None, 192 | is_extendable=False, 193 | syntax='proto3', 194 | extension_ranges=[], 195 | oneofs=[ 196 | ], 197 | serialized_start=251, 198 | serialized_end=299, 199 | ) 200 | 201 | 202 | _TIMESTAMP = _descriptor.Descriptor( 203 | name='Timestamp', 204 | full_name='nvidia.clara.platform.Timestamp', 205 | filename=None, 206 | file=DESCRIPTOR, 207 | containing_type=None, 208 | fields=[ 209 | _descriptor.FieldDescriptor( 210 | name='value', full_name='nvidia.clara.platform.Timestamp.value', index=0, 211 | number=1, type=18, cpp_type=2, label=1, 212 | has_default_value=False, default_value=0, 213 | message_type=None, enum_type=None, containing_type=None, 214 | is_extension=False, extension_scope=None, 215 | serialized_options=None, file=DESCRIPTOR), 216 | ], 217 | extensions=[ 218 | ], 219 | nested_types=[], 220 | enum_types=[ 221 | ], 222 | serialized_options=None, 223 | is_extendable=False, 224 | syntax='proto3', 225 | extension_ranges=[], 226 | oneofs=[ 227 | ], 228 | serialized_start=301, 229 | serialized_end=327, 230 | ) 231 | 232 | _REQUESTHEADER.fields_by_name['api_version'].message_type = _VERSION 233 | DESCRIPTOR.message_types_by_name['Identifier'] = _IDENTIFIER 234 | DESCRIPTOR.message_types_by_name['Version'] = _VERSION 235 | DESCRIPTOR.message_types_by_name['RequestHeader'] = _REQUESTHEADER 236 | DESCRIPTOR.message_types_by_name['ResponseHeader'] = _RESPONSEHEADER 237 | DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP 238 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 239 | 240 | Identifier = _reflection.GeneratedProtocolMessageType('Identifier', (_message.Message,), dict( 241 | DESCRIPTOR = _IDENTIFIER, 242 | __module__ = 'nvidia.clara.platform.common_pb2' 243 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Identifier) 244 | )) 245 | _sym_db.RegisterMessage(Identifier) 246 | 247 | Version = _reflection.GeneratedProtocolMessageType('Version', (_message.Message,), dict( 248 | DESCRIPTOR = _VERSION, 249 | __module__ = 'nvidia.clara.platform.common_pb2' 250 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Version) 251 | )) 252 | _sym_db.RegisterMessage(Version) 253 | 254 | RequestHeader = _reflection.GeneratedProtocolMessageType('RequestHeader', (_message.Message,), dict( 255 | DESCRIPTOR = _REQUESTHEADER, 256 | __module__ = 'nvidia.clara.platform.common_pb2' 257 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.RequestHeader) 258 | )) 259 | _sym_db.RegisterMessage(RequestHeader) 260 | 261 | ResponseHeader = _reflection.GeneratedProtocolMessageType('ResponseHeader', (_message.Message,), dict( 262 | DESCRIPTOR = _RESPONSEHEADER, 263 | __module__ = 'nvidia.clara.platform.common_pb2' 264 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.ResponseHeader) 265 | )) 266 | _sym_db.RegisterMessage(ResponseHeader) 267 | 268 | Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict( 269 | DESCRIPTOR = _TIMESTAMP, 270 | __module__ = 'nvidia.clara.platform.common_pb2' 271 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Timestamp) 272 | )) 273 | _sym_db.RegisterMessage(Timestamp) 274 | 275 | 276 | DESCRIPTOR._options = None 277 | # @@protoc_insertion_point(module_scope) 278 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/common_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/common.proto 18 | 19 | import sys 20 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 21 | from google.protobuf import descriptor as _descriptor 22 | from google.protobuf import message as _message 23 | from google.protobuf import reflection as _reflection 24 | from google.protobuf import symbol_database as _symbol_database 25 | # @@protoc_insertion_point(imports) 26 | 27 | _sym_db = _symbol_database.Default() 28 | 29 | 30 | 31 | 32 | DESCRIPTOR = _descriptor.FileDescriptor( 33 | name='nvidia/clara/platform/common.proto', 34 | package='nvidia.clara.platform', 35 | syntax='proto3', 36 | serialized_options=_b('\n\031com.nvidia.clara.platformZ\004apis\252\002\032Nvidia.Clara.Platform.Grpc'), 37 | serialized_pb=_b('\n\"nvidia/clara/platform/common.proto\x12\x15nvidia.clara.platform\"\x1b\n\nIdentifier\x12\r\n\x05value\x18\x01 \x01(\t\"E\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\r\n\x05label\x18\x04 \x01(\t\"X\n\rRequestHeader\x12\x33\n\x0b\x61pi_version\x18\x01 \x01(\x0b\x32\x1e.nvidia.clara.platform.Version\x12\x12\n\nuser_agent\x18\x02 \x01(\t\"0\n\x0eResponseHeader\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x11\x12\x10\n\x08messages\x18\x02 \x03(\t\"\x1a\n\tTimestamp\x12\r\n\x05value\x18\x01 \x01(\x12\x42>\n\x19\x63om.nvidia.clara.platformZ\x04\x61pis\xaa\x02\x1aNvidia.Clara.Platform.Grpcb\x06proto3') 38 | ) 39 | 40 | 41 | 42 | 43 | _IDENTIFIER = _descriptor.Descriptor( 44 | name='Identifier', 45 | full_name='nvidia.clara.platform.Identifier', 46 | filename=None, 47 | file=DESCRIPTOR, 48 | containing_type=None, 49 | fields=[ 50 | _descriptor.FieldDescriptor( 51 | name='value', full_name='nvidia.clara.platform.Identifier.value', index=0, 52 | number=1, type=9, cpp_type=9, label=1, 53 | has_default_value=False, default_value=_b("").decode('utf-8'), 54 | message_type=None, enum_type=None, containing_type=None, 55 | is_extension=False, extension_scope=None, 56 | serialized_options=None, file=DESCRIPTOR), 57 | ], 58 | extensions=[ 59 | ], 60 | nested_types=[], 61 | enum_types=[ 62 | ], 63 | serialized_options=None, 64 | is_extendable=False, 65 | syntax='proto3', 66 | extension_ranges=[], 67 | oneofs=[ 68 | ], 69 | serialized_start=61, 70 | serialized_end=88, 71 | ) 72 | 73 | 74 | _VERSION = _descriptor.Descriptor( 75 | name='Version', 76 | full_name='nvidia.clara.platform.Version', 77 | filename=None, 78 | file=DESCRIPTOR, 79 | containing_type=None, 80 | fields=[ 81 | _descriptor.FieldDescriptor( 82 | name='major', full_name='nvidia.clara.platform.Version.major', index=0, 83 | number=1, type=5, cpp_type=1, label=1, 84 | has_default_value=False, default_value=0, 85 | message_type=None, enum_type=None, containing_type=None, 86 | is_extension=False, extension_scope=None, 87 | serialized_options=None, file=DESCRIPTOR), 88 | _descriptor.FieldDescriptor( 89 | name='minor', full_name='nvidia.clara.platform.Version.minor', index=1, 90 | number=2, type=5, cpp_type=1, label=1, 91 | has_default_value=False, default_value=0, 92 | message_type=None, enum_type=None, containing_type=None, 93 | is_extension=False, extension_scope=None, 94 | serialized_options=None, file=DESCRIPTOR), 95 | _descriptor.FieldDescriptor( 96 | name='patch', full_name='nvidia.clara.platform.Version.patch', index=2, 97 | number=3, type=5, cpp_type=1, label=1, 98 | has_default_value=False, default_value=0, 99 | message_type=None, enum_type=None, containing_type=None, 100 | is_extension=False, extension_scope=None, 101 | serialized_options=None, file=DESCRIPTOR), 102 | _descriptor.FieldDescriptor( 103 | name='label', full_name='nvidia.clara.platform.Version.label', index=3, 104 | number=4, type=9, cpp_type=9, label=1, 105 | has_default_value=False, default_value=_b("").decode('utf-8'), 106 | message_type=None, enum_type=None, containing_type=None, 107 | is_extension=False, extension_scope=None, 108 | serialized_options=None, file=DESCRIPTOR), 109 | ], 110 | extensions=[ 111 | ], 112 | nested_types=[], 113 | enum_types=[ 114 | ], 115 | serialized_options=None, 116 | is_extendable=False, 117 | syntax='proto3', 118 | extension_ranges=[], 119 | oneofs=[ 120 | ], 121 | serialized_start=90, 122 | serialized_end=159, 123 | ) 124 | 125 | 126 | _REQUESTHEADER = _descriptor.Descriptor( 127 | name='RequestHeader', 128 | full_name='nvidia.clara.platform.RequestHeader', 129 | filename=None, 130 | file=DESCRIPTOR, 131 | containing_type=None, 132 | fields=[ 133 | _descriptor.FieldDescriptor( 134 | name='api_version', full_name='nvidia.clara.platform.RequestHeader.api_version', index=0, 135 | number=1, type=11, cpp_type=10, label=1, 136 | has_default_value=False, default_value=None, 137 | message_type=None, enum_type=None, containing_type=None, 138 | is_extension=False, extension_scope=None, 139 | serialized_options=None, file=DESCRIPTOR), 140 | _descriptor.FieldDescriptor( 141 | name='user_agent', full_name='nvidia.clara.platform.RequestHeader.user_agent', index=1, 142 | number=2, type=9, cpp_type=9, label=1, 143 | has_default_value=False, default_value=_b("").decode('utf-8'), 144 | message_type=None, enum_type=None, containing_type=None, 145 | is_extension=False, extension_scope=None, 146 | serialized_options=None, file=DESCRIPTOR), 147 | ], 148 | extensions=[ 149 | ], 150 | nested_types=[], 151 | enum_types=[ 152 | ], 153 | serialized_options=None, 154 | is_extendable=False, 155 | syntax='proto3', 156 | extension_ranges=[], 157 | oneofs=[ 158 | ], 159 | serialized_start=161, 160 | serialized_end=249, 161 | ) 162 | 163 | 164 | _RESPONSEHEADER = _descriptor.Descriptor( 165 | name='ResponseHeader', 166 | full_name='nvidia.clara.platform.ResponseHeader', 167 | filename=None, 168 | file=DESCRIPTOR, 169 | containing_type=None, 170 | fields=[ 171 | _descriptor.FieldDescriptor( 172 | name='code', full_name='nvidia.clara.platform.ResponseHeader.code', index=0, 173 | number=1, type=17, cpp_type=1, label=1, 174 | has_default_value=False, default_value=0, 175 | message_type=None, enum_type=None, containing_type=None, 176 | is_extension=False, extension_scope=None, 177 | serialized_options=None, file=DESCRIPTOR), 178 | _descriptor.FieldDescriptor( 179 | name='messages', full_name='nvidia.clara.platform.ResponseHeader.messages', index=1, 180 | number=2, type=9, cpp_type=9, label=3, 181 | has_default_value=False, default_value=[], 182 | message_type=None, enum_type=None, containing_type=None, 183 | is_extension=False, extension_scope=None, 184 | serialized_options=None, file=DESCRIPTOR), 185 | ], 186 | extensions=[ 187 | ], 188 | nested_types=[], 189 | enum_types=[ 190 | ], 191 | serialized_options=None, 192 | is_extendable=False, 193 | syntax='proto3', 194 | extension_ranges=[], 195 | oneofs=[ 196 | ], 197 | serialized_start=251, 198 | serialized_end=299, 199 | ) 200 | 201 | 202 | _TIMESTAMP = _descriptor.Descriptor( 203 | name='Timestamp', 204 | full_name='nvidia.clara.platform.Timestamp', 205 | filename=None, 206 | file=DESCRIPTOR, 207 | containing_type=None, 208 | fields=[ 209 | _descriptor.FieldDescriptor( 210 | name='value', full_name='nvidia.clara.platform.Timestamp.value', index=0, 211 | number=1, type=18, cpp_type=2, label=1, 212 | has_default_value=False, default_value=0, 213 | message_type=None, enum_type=None, containing_type=None, 214 | is_extension=False, extension_scope=None, 215 | serialized_options=None, file=DESCRIPTOR), 216 | ], 217 | extensions=[ 218 | ], 219 | nested_types=[], 220 | enum_types=[ 221 | ], 222 | serialized_options=None, 223 | is_extendable=False, 224 | syntax='proto3', 225 | extension_ranges=[], 226 | oneofs=[ 227 | ], 228 | serialized_start=301, 229 | serialized_end=327, 230 | ) 231 | 232 | _REQUESTHEADER.fields_by_name['api_version'].message_type = _VERSION 233 | DESCRIPTOR.message_types_by_name['Identifier'] = _IDENTIFIER 234 | DESCRIPTOR.message_types_by_name['Version'] = _VERSION 235 | DESCRIPTOR.message_types_by_name['RequestHeader'] = _REQUESTHEADER 236 | DESCRIPTOR.message_types_by_name['ResponseHeader'] = _RESPONSEHEADER 237 | DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP 238 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 239 | 240 | Identifier = _reflection.GeneratedProtocolMessageType('Identifier', (_message.Message,), dict( 241 | DESCRIPTOR = _IDENTIFIER, 242 | __module__ = 'nvidia.clara.platform.common_pb2' 243 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Identifier) 244 | )) 245 | _sym_db.RegisterMessage(Identifier) 246 | 247 | Version = _reflection.GeneratedProtocolMessageType('Version', (_message.Message,), dict( 248 | DESCRIPTOR = _VERSION, 249 | __module__ = 'nvidia.clara.platform.common_pb2' 250 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Version) 251 | )) 252 | _sym_db.RegisterMessage(Version) 253 | 254 | RequestHeader = _reflection.GeneratedProtocolMessageType('RequestHeader', (_message.Message,), dict( 255 | DESCRIPTOR = _REQUESTHEADER, 256 | __module__ = 'nvidia.clara.platform.common_pb2' 257 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.RequestHeader) 258 | )) 259 | _sym_db.RegisterMessage(RequestHeader) 260 | 261 | ResponseHeader = _reflection.GeneratedProtocolMessageType('ResponseHeader', (_message.Message,), dict( 262 | DESCRIPTOR = _RESPONSEHEADER, 263 | __module__ = 'nvidia.clara.platform.common_pb2' 264 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.ResponseHeader) 265 | )) 266 | _sym_db.RegisterMessage(ResponseHeader) 267 | 268 | Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict( 269 | DESCRIPTOR = _TIMESTAMP, 270 | __module__ = 'nvidia.clara.platform.common_pb2' 271 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.Timestamp) 272 | )) 273 | _sym_db.RegisterMessage(Timestamp) 274 | 275 | 276 | DESCRIPTOR._options = None 277 | # @@protoc_insertion_point(module_scope) 278 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /nvidia_clara/grpc/metrics_pb2.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/node-monitor/metrics.proto 18 | 19 | import sys 20 | _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) 21 | from google.protobuf import descriptor as _descriptor 22 | from google.protobuf import message as _message 23 | from google.protobuf import reflection as _reflection 24 | from google.protobuf import symbol_database as _symbol_database 25 | # @@protoc_insertion_point(imports) 26 | 27 | _sym_db = _symbol_database.Default() 28 | 29 | 30 | from nvidia_clara.grpc import common_pb2 as nvidia_dot_clara_dot_platform_dot_common__pb2 31 | 32 | from nvidia_clara.grpc.common_pb2 import * 33 | 34 | DESCRIPTOR = _descriptor.FileDescriptor( 35 | name='nvidia/clara/platform/node-monitor/metrics.proto', 36 | package='nvidia.clara.platform.node_monitor', 37 | syntax='proto3', 38 | serialized_options=_b('\n%com.nvidia.clara.platform.nodemonitorZ\004apis\252\002&Nvidia.Clara.Platform.NodeMonitor.Grpc'), 39 | serialized_pb=_b('\n0nvidia/clara/platform/node-monitor/metrics.proto\x12\"nvidia.clara.platform.node_monitor\x1a\"nvidia/clara/platform/common.proto\"\xbb\x02\n\nGpuDetails\x12\x11\n\tdevice_id\x18\x01 \x01(\x05\x12G\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x39.nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics\x12\x33\n\ttimestamp\x18\x03 \x01(\x0b\x32 .nvidia.clara.platform.Timestamp\x1a\x9b\x01\n\nGpuMetrics\x12\x1a\n\x12memory_utilization\x18\x01 \x01(\x02\x12\x17\n\x0fgpu_utilization\x18\x02 \x01(\x02\x12\x12\n\nfree_bar_1\x18\x03 \x01(\x03\x12\x12\n\nused_bar_1\x18\x04 \x01(\x03\x12\x17\n\x0f\x66ree_gpu_memory\x18\x05 \x01(\x03\x12\x17\n\x0fused_gpu_memory\x18\x06 \x01(\x03\"P\n\x18MonitorGpuMetricsRequest\x12\x34\n\x06header\x18\x01 \x01(\x0b\x32$.nvidia.clara.platform.RequestHeader\"\x97\x01\n\x19MonitorGpuMetricsResponse\x12\x35\n\x06header\x18\x01 \x01(\x0b\x32%.nvidia.clara.platform.ResponseHeader\x12\x43\n\x0bgpu_details\x18\x02 \x03(\x0b\x32..nvidia.clara.platform.node_monitor.GpuDetails2\x97\x01\n\x07Monitor\x12\x8b\x01\n\nGpuMetrics\x12<.nvidia.clara.platform.node_monitor.MonitorGpuMetricsRequest\x1a=.nvidia.clara.platform.node_monitor.MonitorGpuMetricsResponse0\x01\x42V\n%com.nvidia.clara.platform.nodemonitorZ\x04\x61pis\xaa\x02&Nvidia.Clara.Platform.NodeMonitor.GrpcP\x00\x62\x06proto3') 40 | , 41 | dependencies=[nvidia_dot_clara_dot_platform_dot_common__pb2.DESCRIPTOR,], 42 | public_dependencies=[nvidia_dot_clara_dot_platform_dot_common__pb2.DESCRIPTOR,]) 43 | 44 | 45 | 46 | 47 | _GPUDETAILS_GPUMETRICS = _descriptor.Descriptor( 48 | name='GpuMetrics', 49 | full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics', 50 | filename=None, 51 | file=DESCRIPTOR, 52 | containing_type=None, 53 | fields=[ 54 | _descriptor.FieldDescriptor( 55 | name='memory_utilization', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.memory_utilization', index=0, 56 | number=1, type=2, cpp_type=6, label=1, 57 | has_default_value=False, default_value=float(0), 58 | message_type=None, enum_type=None, containing_type=None, 59 | is_extension=False, extension_scope=None, 60 | serialized_options=None, file=DESCRIPTOR), 61 | _descriptor.FieldDescriptor( 62 | name='gpu_utilization', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.gpu_utilization', index=1, 63 | number=2, type=2, cpp_type=6, label=1, 64 | has_default_value=False, default_value=float(0), 65 | message_type=None, enum_type=None, containing_type=None, 66 | is_extension=False, extension_scope=None, 67 | serialized_options=None, file=DESCRIPTOR), 68 | _descriptor.FieldDescriptor( 69 | name='free_bar_1', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.free_bar_1', index=2, 70 | number=3, type=3, cpp_type=2, label=1, 71 | has_default_value=False, default_value=0, 72 | message_type=None, enum_type=None, containing_type=None, 73 | is_extension=False, extension_scope=None, 74 | serialized_options=None, file=DESCRIPTOR), 75 | _descriptor.FieldDescriptor( 76 | name='used_bar_1', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.used_bar_1', index=3, 77 | number=4, type=3, cpp_type=2, label=1, 78 | has_default_value=False, default_value=0, 79 | message_type=None, enum_type=None, containing_type=None, 80 | is_extension=False, extension_scope=None, 81 | serialized_options=None, file=DESCRIPTOR), 82 | _descriptor.FieldDescriptor( 83 | name='free_gpu_memory', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.free_gpu_memory', index=4, 84 | number=5, type=3, cpp_type=2, label=1, 85 | has_default_value=False, default_value=0, 86 | message_type=None, enum_type=None, containing_type=None, 87 | is_extension=False, extension_scope=None, 88 | serialized_options=None, file=DESCRIPTOR), 89 | _descriptor.FieldDescriptor( 90 | name='used_gpu_memory', full_name='nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics.used_gpu_memory', index=5, 91 | number=6, type=3, cpp_type=2, label=1, 92 | has_default_value=False, default_value=0, 93 | message_type=None, enum_type=None, containing_type=None, 94 | is_extension=False, extension_scope=None, 95 | serialized_options=None, file=DESCRIPTOR), 96 | ], 97 | extensions=[ 98 | ], 99 | nested_types=[], 100 | enum_types=[ 101 | ], 102 | serialized_options=None, 103 | is_extendable=False, 104 | syntax='proto3', 105 | extension_ranges=[], 106 | oneofs=[ 107 | ], 108 | serialized_start=285, 109 | serialized_end=440, 110 | ) 111 | 112 | _GPUDETAILS = _descriptor.Descriptor( 113 | name='GpuDetails', 114 | full_name='nvidia.clara.platform.node_monitor.GpuDetails', 115 | filename=None, 116 | file=DESCRIPTOR, 117 | containing_type=None, 118 | fields=[ 119 | _descriptor.FieldDescriptor( 120 | name='device_id', full_name='nvidia.clara.platform.node_monitor.GpuDetails.device_id', index=0, 121 | number=1, type=5, cpp_type=1, label=1, 122 | has_default_value=False, default_value=0, 123 | message_type=None, enum_type=None, containing_type=None, 124 | is_extension=False, extension_scope=None, 125 | serialized_options=None, file=DESCRIPTOR), 126 | _descriptor.FieldDescriptor( 127 | name='data', full_name='nvidia.clara.platform.node_monitor.GpuDetails.data', index=1, 128 | number=2, type=11, cpp_type=10, label=1, 129 | has_default_value=False, default_value=None, 130 | message_type=None, enum_type=None, containing_type=None, 131 | is_extension=False, extension_scope=None, 132 | serialized_options=None, file=DESCRIPTOR), 133 | _descriptor.FieldDescriptor( 134 | name='timestamp', full_name='nvidia.clara.platform.node_monitor.GpuDetails.timestamp', index=2, 135 | number=3, type=11, cpp_type=10, label=1, 136 | has_default_value=False, default_value=None, 137 | message_type=None, enum_type=None, containing_type=None, 138 | is_extension=False, extension_scope=None, 139 | serialized_options=None, file=DESCRIPTOR), 140 | ], 141 | extensions=[ 142 | ], 143 | nested_types=[_GPUDETAILS_GPUMETRICS, ], 144 | enum_types=[ 145 | ], 146 | serialized_options=None, 147 | is_extendable=False, 148 | syntax='proto3', 149 | extension_ranges=[], 150 | oneofs=[ 151 | ], 152 | serialized_start=125, 153 | serialized_end=440, 154 | ) 155 | 156 | 157 | _MONITORGPUMETRICSREQUEST = _descriptor.Descriptor( 158 | name='MonitorGpuMetricsRequest', 159 | full_name='nvidia.clara.platform.node_monitor.MonitorGpuMetricsRequest', 160 | filename=None, 161 | file=DESCRIPTOR, 162 | containing_type=None, 163 | fields=[ 164 | _descriptor.FieldDescriptor( 165 | name='header', full_name='nvidia.clara.platform.node_monitor.MonitorGpuMetricsRequest.header', index=0, 166 | number=1, type=11, cpp_type=10, label=1, 167 | has_default_value=False, default_value=None, 168 | message_type=None, enum_type=None, containing_type=None, 169 | is_extension=False, extension_scope=None, 170 | serialized_options=None, file=DESCRIPTOR), 171 | ], 172 | extensions=[ 173 | ], 174 | nested_types=[], 175 | enum_types=[ 176 | ], 177 | serialized_options=None, 178 | is_extendable=False, 179 | syntax='proto3', 180 | extension_ranges=[], 181 | oneofs=[ 182 | ], 183 | serialized_start=442, 184 | serialized_end=522, 185 | ) 186 | 187 | 188 | _MONITORGPUMETRICSRESPONSE = _descriptor.Descriptor( 189 | name='MonitorGpuMetricsResponse', 190 | full_name='nvidia.clara.platform.node_monitor.MonitorGpuMetricsResponse', 191 | filename=None, 192 | file=DESCRIPTOR, 193 | containing_type=None, 194 | fields=[ 195 | _descriptor.FieldDescriptor( 196 | name='header', full_name='nvidia.clara.platform.node_monitor.MonitorGpuMetricsResponse.header', index=0, 197 | number=1, type=11, cpp_type=10, label=1, 198 | has_default_value=False, default_value=None, 199 | message_type=None, enum_type=None, containing_type=None, 200 | is_extension=False, extension_scope=None, 201 | serialized_options=None, file=DESCRIPTOR), 202 | _descriptor.FieldDescriptor( 203 | name='gpu_details', full_name='nvidia.clara.platform.node_monitor.MonitorGpuMetricsResponse.gpu_details', index=1, 204 | number=2, type=11, cpp_type=10, label=3, 205 | has_default_value=False, default_value=[], 206 | message_type=None, enum_type=None, containing_type=None, 207 | is_extension=False, extension_scope=None, 208 | serialized_options=None, file=DESCRIPTOR), 209 | ], 210 | extensions=[ 211 | ], 212 | nested_types=[], 213 | enum_types=[ 214 | ], 215 | serialized_options=None, 216 | is_extendable=False, 217 | syntax='proto3', 218 | extension_ranges=[], 219 | oneofs=[ 220 | ], 221 | serialized_start=525, 222 | serialized_end=676, 223 | ) 224 | 225 | _GPUDETAILS_GPUMETRICS.containing_type = _GPUDETAILS 226 | _GPUDETAILS.fields_by_name['data'].message_type = _GPUDETAILS_GPUMETRICS 227 | _GPUDETAILS.fields_by_name['timestamp'].message_type = nvidia_dot_clara_dot_platform_dot_common__pb2._TIMESTAMP 228 | _MONITORGPUMETRICSREQUEST.fields_by_name['header'].message_type = nvidia_dot_clara_dot_platform_dot_common__pb2._REQUESTHEADER 229 | _MONITORGPUMETRICSRESPONSE.fields_by_name['header'].message_type = nvidia_dot_clara_dot_platform_dot_common__pb2._RESPONSEHEADER 230 | _MONITORGPUMETRICSRESPONSE.fields_by_name['gpu_details'].message_type = _GPUDETAILS 231 | DESCRIPTOR.message_types_by_name['GpuDetails'] = _GPUDETAILS 232 | DESCRIPTOR.message_types_by_name['MonitorGpuMetricsRequest'] = _MONITORGPUMETRICSREQUEST 233 | DESCRIPTOR.message_types_by_name['MonitorGpuMetricsResponse'] = _MONITORGPUMETRICSRESPONSE 234 | _sym_db.RegisterFileDescriptor(DESCRIPTOR) 235 | 236 | GpuDetails = _reflection.GeneratedProtocolMessageType('GpuDetails', (_message.Message,), dict( 237 | 238 | GpuMetrics = _reflection.GeneratedProtocolMessageType('GpuMetrics', (_message.Message,), dict( 239 | DESCRIPTOR = _GPUDETAILS_GPUMETRICS, 240 | __module__ = 'nvidia.clara.platform.node_monitor.metrics_pb2' 241 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.node_monitor.GpuDetails.GpuMetrics) 242 | )) 243 | , 244 | DESCRIPTOR = _GPUDETAILS, 245 | __module__ = 'nvidia.clara.platform.node_monitor.metrics_pb2' 246 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.node_monitor.GpuDetails) 247 | )) 248 | _sym_db.RegisterMessage(GpuDetails) 249 | _sym_db.RegisterMessage(GpuDetails.GpuMetrics) 250 | 251 | MonitorGpuMetricsRequest = _reflection.GeneratedProtocolMessageType('MonitorGpuMetricsRequest', (_message.Message,), dict( 252 | DESCRIPTOR = _MONITORGPUMETRICSREQUEST, 253 | __module__ = 'nvidia.clara.platform.node_monitor.metrics_pb2' 254 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.node_monitor.MonitorGpuMetricsRequest) 255 | )) 256 | _sym_db.RegisterMessage(MonitorGpuMetricsRequest) 257 | 258 | MonitorGpuMetricsResponse = _reflection.GeneratedProtocolMessageType('MonitorGpuMetricsResponse', (_message.Message,), dict( 259 | DESCRIPTOR = _MONITORGPUMETRICSRESPONSE, 260 | __module__ = 'nvidia.clara.platform.node_monitor.metrics_pb2' 261 | # @@protoc_insertion_point(class_scope:nvidia.clara.platform.node_monitor.MonitorGpuMetricsResponse) 262 | )) 263 | _sym_db.RegisterMessage(MonitorGpuMetricsResponse) 264 | 265 | 266 | DESCRIPTOR._options = None 267 | 268 | _MONITOR = _descriptor.ServiceDescriptor( 269 | name='Monitor', 270 | full_name='nvidia.clara.platform.node_monitor.Monitor', 271 | file=DESCRIPTOR, 272 | index=0, 273 | serialized_options=None, 274 | serialized_start=679, 275 | serialized_end=830, 276 | methods=[ 277 | _descriptor.MethodDescriptor( 278 | name='GpuMetrics', 279 | full_name='nvidia.clara.platform.node_monitor.Monitor.GpuMetrics', 280 | index=0, 281 | containing_service=None, 282 | input_type=_MONITORGPUMETRICSREQUEST, 283 | output_type=_MONITORGPUMETRICSRESPONSE, 284 | serialized_options=None, 285 | ), 286 | ]) 287 | _sym_db.RegisterServiceDescriptor(_MONITOR) 288 | 289 | DESCRIPTOR.services_by_name['Monitor'] = _MONITOR 290 | 291 | # @@protoc_insertion_point(module_scope) 292 | -------------------------------------------------------------------------------- /nvidia_clara/job_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from datetime import datetime 16 | from enum import Enum 17 | from typing import List, Mapping, TypeVar 18 | from nvidia_clara.grpc import common_pb2, jobs_pb2 19 | import nvidia_clara.payload_types as payload_types 20 | import nvidia_clara.pipeline_types as pipeline_types 21 | 22 | T = TypeVar('T') 23 | 24 | 25 | class JobPriority(Enum): 26 | """ 27 | Priority of a pipeline job. 28 | A job's priority affects the order it will be scheduled once "JobsClient.StartJob()" is called. 29 | """ 30 | 31 | # The job priority is unknown. 32 | Unkown = 0 33 | 34 | # Lower than normal priority. 35 | # Lower priority jobs are queued like "Normal" and "Higher" priority jobs, but are scheduled less frequently. 36 | Lower = jobs_pb2.JOB_PRIORITY_LOWER 37 | 38 | # Normal, or default, priority. 39 | # Normal priority jobs are queued like "Lower" and "Higher" priority jobs. 40 | # Normal priority jobs are scheduled more frequently than lower priority jobs and less frequently then higher priority jobs. 41 | Normal = jobs_pb2.JOB_PRIORITY_NORMAL 42 | 43 | # Higher than normal priority. 44 | # Higher priority jobs are queued like "Lower" and "Normal"priority jobs, but are scheduled more frequently. 45 | Higher = jobs_pb2.JOB_PRIORITY_HIGHER 46 | 47 | # Immediate priority jobs are queued in separate queue which must emptied before any other priority jobs can be scheduled. 48 | Immediate = jobs_pb2.JOB_PRIORITY_IMMEDIATE 49 | 50 | # The maximum supported value for "JobPriority" 51 | Maximum = jobs_pb2.JOB_PRIORITY_IMMEDIATE 52 | 53 | # The minimum supported value for "JobPriority" 54 | Minimum = jobs_pb2.JOB_PRIORITY_LOWER 55 | 56 | 57 | class JobState(Enum): 58 | """ 59 | State of a pipeline job. 60 | Related to "JobStatus" 61 | """ 62 | 63 | # The job is unknown or in an unknown state. 64 | Unknown = 0 65 | 66 | # The job has been accepted and queued by the server, but has not yet started running. 67 | Pending = jobs_pb2.JOB_STATE_PENDING 68 | 69 | # The job is currently running. 70 | Running = jobs_pb2.JOB_STATE_RUNNING 71 | 72 | # The job has stopped runing. 73 | Stopped = jobs_pb2.JOB_STATE_STOPPED 74 | 75 | # Maximum supported value of "JobState" 76 | Maximum = Stopped 77 | 78 | # Minimum supported value of "JobState" 79 | Minimum = Pending 80 | 81 | 82 | class JobStatus(Enum): 83 | """ 84 | Status of pipeline job. 85 | Related to "JobState" 86 | """ 87 | # The job is unknown or the status of the job is unknown. 88 | Unknown = 0 89 | 90 | # The job has been canceled. 91 | Canceled = jobs_pb2.JOB_STATUS_CANCELED 92 | 93 | # The job has encountered a terminal error. 94 | Faulted = jobs_pb2.JOB_STATUS_FAULTED 95 | 96 | # The job is healthy. 97 | # If stopped, the job has completed successfully. 98 | Healthy = jobs_pb2.JOB_STATUS_HEALTHY 99 | 100 | # The job was evicted 101 | Evicted = jobs_pb2.JOB_STATUS_EVICTED 102 | 103 | # The job was terminated 104 | Terminated = jobs_pb2.JOB_STATUS_TERMINATED 105 | 106 | # Maximum supported value of "JobStatus" 107 | Maximum = Canceled 108 | 109 | # Minimum supported value of "JobStatus" 110 | Minimum = Healthy 111 | 112 | 113 | class JobOperatorStatus(Enum): 114 | """ 115 | Status of an operator in a job. 116 | """ 117 | # The operator is unknownor the status of the operator is unknown. 118 | Unknown = jobs_pb2.JOB_OPERATOR_STATUS_UNKNOWN 119 | 120 | # The operator has been accepted and queued by the server, but has not yet started running. 121 | Pending = jobs_pb2.JOB_OPERATOR_STATUS_PENDING 122 | 123 | # The operator is currently running. 124 | Running = jobs_pb2.JOB_OPERATOR_STATUS_RUNNING 125 | 126 | # The operator has completed successfully. 127 | Completed = jobs_pb2.JOB_OPERATOR_STATUS_COMPLETED 128 | 129 | # The operator has encountered an error. 130 | Faulted = jobs_pb2.JOB_OPERATOR_STATUS_FAULTED 131 | 132 | 133 | class JobId: 134 | """ 135 | Unique identifier for a Clara Pipeline Job. 136 | """ 137 | 138 | def __init__(self, value: str): 139 | """ 140 | Creates Unique Identifier Object for a Clara Pipeline Job. 141 | """ 142 | if (value is None) or (value == ""): 143 | raise Exception("Job identifier value must be intialized.") 144 | 145 | self._value = value 146 | 147 | @property 148 | def value(self): 149 | return self._value 150 | 151 | def __eq__(self, other) -> bool: 152 | return self._value == other.value 153 | 154 | def __ne__(self, other) -> bool: 155 | return not (self == other) 156 | 157 | def __repr__(self): 158 | return "%s" % self._value 159 | 160 | def __str__(self): 161 | return "%s" % self._value 162 | 163 | def __hash__(self): 164 | return hash(self._value) 165 | 166 | def to_grpc_value(self): 167 | id = common_pb2.Identifier() 168 | id.value = self._value 169 | return id 170 | 171 | 172 | class JobToken: 173 | 174 | def __init__(self, job_id: JobId = None, job_state: JobState = None, job_status: JobStatus = None, 175 | job_priority: JobPriority = None): 176 | self._job_id = job_id 177 | self._job_state = job_state 178 | self._job_status = job_status 179 | self._job_priority = job_priority 180 | 181 | @property 182 | def job_id(self) -> JobId: 183 | """Unique identifier of the job.""" 184 | return self._job_id 185 | 186 | @job_id.setter 187 | def job_id(self, job_id: JobId): 188 | """Unique identifier of the job.""" 189 | self._job_id = job_id 190 | 191 | @property 192 | def job_state(self) -> JobState: 193 | """Current state of the job.""" 194 | return self._job_state 195 | 196 | @job_state.setter 197 | def job_state(self, job_state: JobState): 198 | """Current state of the job.""" 199 | self._job_state = job_state 200 | 201 | @property 202 | def job_status(self) -> JobStatus: 203 | """Current status of the job.""" 204 | return self._job_status 205 | 206 | @job_status.setter 207 | def job_status(self, job_status: JobStatus): 208 | """Current status of the job.""" 209 | self._job_status = job_status 210 | 211 | @property 212 | def job_priority(self) -> JobPriority: 213 | """Priority of the job""" 214 | return self._job_priority 215 | 216 | @job_priority.setter 217 | def job_priority(self, job_priority: JobPriority): 218 | """Priority of the job""" 219 | self._job_priority = job_priority 220 | 221 | 222 | class JobInfo(JobToken): 223 | 224 | def __init__(self, job_id: JobId = None, job_state: JobState = None, job_status: JobStatus = None, 225 | job_priority: JobPriority = None, date_created: datetime = None, date_started: datetime = None, 226 | date_stopped: datetime = None, name: str = None, payload_id: payload_types.PayloadId = None, 227 | pipeline_id: pipeline_types.PipelineId = None, metadata: Mapping[str, str] = None): 228 | super().__init__( 229 | job_id=job_id, 230 | job_state=job_state, 231 | job_status=job_status, 232 | job_priority=job_priority, 233 | ) 234 | if metadata is None: 235 | metadata = dict() 236 | 237 | self._date_created = date_created 238 | self._date_started = date_started 239 | self._date_stopped = date_stopped 240 | self._name = name 241 | self._payload_id = payload_id 242 | self._pipeline_id = pipeline_id 243 | self._metadata = metadata 244 | 245 | @property 246 | def date_created(self) -> datetime: 247 | return self._date_created 248 | 249 | @date_created.setter 250 | def date_created(self, date_created: datetime): 251 | self._date_created = date_created 252 | 253 | @property 254 | def date_started(self) -> datetime: 255 | return self._date_started 256 | 257 | @date_started.setter 258 | def date_started(self, date_started: datetime): 259 | self._date_started = date_started 260 | 261 | @property 262 | def date_stopped(self) -> datetime: 263 | return self._date_stopped 264 | 265 | @date_stopped.setter 266 | def date_stopped(self, date_stopped: datetime): 267 | self._date_stopped = date_stopped 268 | 269 | @property 270 | def name(self) -> str: 271 | return self._name 272 | 273 | @name.setter 274 | def name(self, name: str): 275 | self._name = name 276 | 277 | @property 278 | def payload_id(self) -> payload_types.PayloadId: 279 | return self._payload_id 280 | 281 | @payload_id.setter 282 | def payload_id(self, payload_id: payload_types.PayloadId): 283 | self._payload_id = payload_id 284 | 285 | @property 286 | def pipeline_id(self) -> pipeline_types.PipelineId: 287 | return self._pipeline_id 288 | 289 | @pipeline_id.setter 290 | def pipeline_id(self, pipeline_id: pipeline_types.PipelineId): 291 | self._pipeline_id = pipeline_id 292 | 293 | @property 294 | def metadata(self) -> Mapping[str, str]: 295 | """ 296 | Metadata (set of key/value pairs) associated with the job 297 | """ 298 | return self._metadata 299 | 300 | @metadata.setter 301 | def metadata(self, metadata: Mapping[str, str]): 302 | """ 303 | Metadata (set of key/value pairs) associated with the job 304 | """ 305 | self._metadata = metadata 306 | 307 | 308 | class JobFilter: 309 | 310 | def __init__(self, completed_before: datetime = None, created_after: datetime = None, 311 | has_job_state: List[JobState] = None, has_job_status: List[JobStatus] = None, 312 | pipeline_ids: List[pipeline_types.PipelineId] = None): 313 | if has_job_state is None: 314 | has_job_state = [] 315 | if has_job_status is None: 316 | has_job_status = [] 317 | if pipeline_ids is None: 318 | pipeline_ids = [] 319 | 320 | self._completed_before = completed_before 321 | self._created_after = created_after 322 | self._has_job_state = has_job_state 323 | self._has_job_status = has_job_status 324 | self._pipeline_ids = pipeline_ids 325 | 326 | @property 327 | def completed_before(self) -> datetime: 328 | """When applied, only jobs completed before the supplied date will be returned.""" 329 | return self._completed_before 330 | 331 | @completed_before.setter 332 | def completed_before(self, completed_before: datetime): 333 | """When applied, only jobs completed before the supplied date will be returned.""" 334 | self._completed_before = completed_before 335 | 336 | @property 337 | def created_after(self) -> datetime: 338 | """When applied, only jobs created after the supplied date will be returned.""" 339 | return self._created_after 340 | 341 | @created_after.setter 342 | def created_after(self, created_after: datetime): 343 | """When applied, only jobs created after the supplied date will be returned.""" 344 | self._created_after = created_after 345 | 346 | @property 347 | def has_job_state(self) -> List[JobState]: 348 | """When applied, only jobs having a provided state value will be returned.""" 349 | return self._has_job_state 350 | 351 | @has_job_state.setter 352 | def has_job_state(self, has_job_state: List[JobState]): 353 | """When applied, only jobs having a provided state value will be returned.""" 354 | self._has_job_state = has_job_state 355 | 356 | @property 357 | def has_job_status(self) -> List[JobStatus]: 358 | """When applied, only jobs having a provided status value will be returned.""" 359 | return self._has_job_status 360 | 361 | @has_job_status.setter 362 | def has_job_status(self, has_job_status: List[JobStatus]): 363 | """When applied, only jobs having a provided status value will be returned.""" 364 | self._has_job_status = has_job_status 365 | 366 | @property 367 | def pipeline_ids(self) -> List[pipeline_types.PipelineId]: 368 | """When applied, only jobs with matching pipeline identifiers will be returned.""" 369 | return self._pipeline_ids 370 | 371 | @pipeline_ids.setter 372 | def pipeline_ids(self, pipeline_ids: List[pipeline_types.PipelineId]): 373 | """When applied, only jobs with matching pipeline identifiers will be returned.""" 374 | self._pipeline_ids = pipeline_ids 375 | 376 | 377 | class JobDetails(JobInfo): 378 | 379 | def __init__(self, job_id: JobId = None, job_state: JobState = None, job_status: JobStatus = None, 380 | job_priority: JobPriority = None, date_created: datetime = None, date_started: datetime = None, 381 | date_stopped: datetime = None, name: str = None, payload_id: payload_types.PayloadId = None, 382 | pipeline_id: pipeline_types.PipelineId = None, operator_details: Mapping[str, Mapping[str, T]] = None, 383 | messages: List[str] = None, metadata: Mapping[str, str] = None): 384 | if metadata is None: 385 | metadata = dict() 386 | 387 | super().__init__( 388 | job_id=job_id, 389 | job_state=job_state, 390 | job_status=job_status, 391 | job_priority=job_priority, 392 | date_created=date_created, 393 | date_started=date_started, 394 | date_stopped=date_stopped, 395 | name=name, 396 | payload_id=payload_id, 397 | pipeline_id=pipeline_id, 398 | metadata=metadata 399 | ) 400 | 401 | if messages is None: 402 | messages = [] 403 | if operator_details is None: 404 | operator_details = dict() 405 | 406 | self._messages = messages 407 | self._operator_details = operator_details 408 | 409 | @property 410 | def messages(self) -> List[str]: 411 | """List of messages reported by the job.""" 412 | return self._messages 413 | 414 | @messages.setter 415 | def messages(self, messages: List[str]): 416 | """List of messages reported by the job.""" 417 | self._messages = messages 418 | 419 | @property 420 | def operator_details(self) -> Mapping[str, Mapping[str, T]]: 421 | """Dictionary mapping operator names to operator details""" 422 | return self._operator_details 423 | 424 | @operator_details.setter 425 | def operator_details(self, operator_details: Mapping[str, Mapping[str, T]]): 426 | """Dictionary mapping operator names to operator details""" 427 | self._operator_details = operator_details 428 | -------------------------------------------------------------------------------- /tests/test_jobs_client.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import datetime 16 | 17 | import nvidia_clara.grpc.common_pb2 as common_pb2 18 | import nvidia_clara.grpc.jobs_pb2 as jobs_pb2 19 | 20 | from nvidia_clara.base_client import BaseClient 21 | from nvidia_clara.jobs_client import JobsClient 22 | import nvidia_clara.pipeline_types as pipeline_types 23 | import nvidia_clara.job_types as job_types 24 | 25 | from tests.test_client_tools import run_client_test 26 | 27 | 28 | def run_job_client(stub, method_name, *args, **kwargs): 29 | with JobsClient(target='10.0.0.1:50051', stub=stub) as client: 30 | response = getattr(client, method_name)(*args, **kwargs) 31 | return response 32 | 33 | 34 | class MockClaraJobsServiceClient: 35 | stub_method_handlers = [] 36 | 37 | def __init__(self, channel, stub=None, request_header=None, logger=None): 38 | pass 39 | 40 | def __enter__(self): 41 | return self 42 | 43 | def __exit__(self, exc_type, exc_val, exc_tb): 44 | return False 45 | 46 | def create_job(self, *args, **kwargs): 47 | return run_client_test( 48 | 'Jobs', 49 | 'create_job', 50 | run_job_client, 51 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 52 | *args, **kwargs) 53 | 54 | def cancel_job(self, *args, **kwargs): 55 | return run_client_test( 56 | 'Jobs', 57 | 'cancel_job', 58 | run_job_client, 59 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 60 | *args, **kwargs) 61 | 62 | def get_status(self, *args, **kwargs): 63 | return run_client_test( 64 | 'Jobs', 65 | 'get_status', 66 | run_job_client, 67 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 68 | *args, **kwargs) 69 | 70 | def list_jobs(self, *args, **kwargs): 71 | return run_client_test( 72 | 'Jobs', 73 | 'list_jobs', 74 | run_job_client, 75 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 76 | *args, **kwargs) 77 | 78 | def start_job(self, *args, **kwargs): 79 | return run_client_test( 80 | 'Jobs', 81 | 'start_job', 82 | run_job_client, 83 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 84 | *args, **kwargs) 85 | 86 | def job_logs(self, *args, **kwargs): 87 | return run_client_test( 88 | 'Jobs', 89 | 'job_logs', 90 | run_job_client, 91 | stub_method_handlers=MockClaraJobsServiceClient.stub_method_handlers, 92 | *args, **kwargs) 93 | 94 | def close(self): 95 | pass 96 | 97 | 98 | def test_create_job(): 99 | requests = [ 100 | jobs_pb2.JobsCreateRequest( 101 | header=BaseClient.get_request_header(), 102 | name='test job', 103 | pipeline_id=common_pb2.Identifier( 104 | value='92656d79fa414db6b294069c0e9e6df5' 105 | ), 106 | priority=jobs_pb2.JOB_PRIORITY_NORMAL 107 | ) 108 | ] 109 | 110 | responses = [ 111 | jobs_pb2.JobsCreateResponse( 112 | header=common_pb2.ResponseHeader( 113 | code=0, 114 | messages=[]), 115 | job_id=common_pb2.Identifier( 116 | value='432b274a8f754968888807fe1eba237b' 117 | ), 118 | payload_id=common_pb2.Identifier( 119 | value='7ac5c691e13d4f45894a3a70d9925936' 120 | ) 121 | ) 122 | ] 123 | 124 | stub_method_handlers = [( 125 | 'Create', 126 | 'unary_unary', 127 | ( 128 | requests, 129 | responses 130 | ) 131 | )] 132 | 133 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 134 | 135 | with MockClaraJobsServiceClient('localhost:50051') as client: 136 | job_info = client.create_job( 137 | job_name='test job', 138 | pipeline_id=pipeline_types.PipelineId('92656d79fa414db6b294069c0e9e6df5') 139 | ) 140 | 141 | print(job_info.job_id.value, job_info.payload_id.value) 142 | 143 | assert job_info.job_id.value == '432b274a8f754968888807fe1eba237b' 144 | assert job_info.payload_id.value == '7ac5c691e13d4f45894a3a70d9925936' 145 | 146 | 147 | def test_cancel_job(): 148 | requests = [ 149 | jobs_pb2.JobsCancelRequest( 150 | header=BaseClient.get_request_header(), 151 | job_id=common_pb2.Identifier( 152 | value='432b274a8f754968888807fe1eba237b' 153 | ) 154 | ) 155 | ] 156 | 157 | responses = [ 158 | jobs_pb2.JobsCancelResponse( 159 | header=common_pb2.ResponseHeader( 160 | code=0, 161 | messages=[]), 162 | job_id=common_pb2.Identifier( 163 | value='432b274a8f754968888807fe1eba237b' 164 | ), 165 | job_state=jobs_pb2.JOB_STATE_STOPPED, 166 | job_status=jobs_pb2.JOB_STATUS_CANCELED 167 | ) 168 | ] 169 | 170 | stub_method_handlers = [( 171 | 'Cancel', 172 | 'unary_unary', 173 | ( 174 | requests, 175 | responses 176 | ) 177 | )] 178 | 179 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 180 | 181 | with MockClaraJobsServiceClient('10.0.0.1:50051') as client: 182 | job_token = client.cancel_job( 183 | job_id=job_types.JobId(value='432b274a8f754968888807fe1eba237b') 184 | ) 185 | 186 | print(job_token.job_id.value, job_token.job_state, job_token.job_status) 187 | 188 | assert job_token.job_id.value == '432b274a8f754968888807fe1eba237b' 189 | assert job_token.job_state == 3 190 | assert job_token.job_status == 3 191 | 192 | 193 | def test_get_status(): 194 | requests = [ 195 | jobs_pb2.JobsStatusRequest( 196 | header=BaseClient.get_request_header(), 197 | job_id=common_pb2.Identifier( 198 | value='432b274a8f754968888807fe1eba237b' 199 | ) 200 | ) 201 | ] 202 | 203 | fake_seconds_from_epoch = 63763345820 204 | 205 | responses = [ 206 | jobs_pb2.JobsStatusResponse( 207 | header=common_pb2.ResponseHeader( 208 | code=0, 209 | messages=[]), 210 | name="job_1", 211 | job_id=common_pb2.Identifier( 212 | value='432b274a8f754968888807fe1eba237b' 213 | ), 214 | pipeline_id=common_pb2.Identifier( 215 | value='92656d79fa414db6b294069c0e9e6df5' 216 | ), 217 | payload_id=common_pb2.Identifier( 218 | value='7ac5c691e13d4f45894a3a70d9925936' 219 | ), 220 | state=jobs_pb2.JOB_STATE_RUNNING, 221 | status=jobs_pb2.JOB_STATUS_HEALTHY, 222 | created=common_pb2.Timestamp(value=fake_seconds_from_epoch) 223 | ) 224 | ] 225 | 226 | stub_method_handlers = [( 227 | 'Status', 228 | 'unary_unary', 229 | ( 230 | requests, 231 | responses 232 | ) 233 | )] 234 | 235 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 236 | 237 | with MockClaraJobsServiceClient('10.0.0.1:50051') as client: 238 | job_details = client.get_status( 239 | job_id=job_types.JobId(value='432b274a8f754968888807fe1eba237b') 240 | ) 241 | 242 | print(job_details.job_id.value, job_details.job_state, job_details.job_status) 243 | print(job_details.date_created) 244 | print(datetime.datetime.fromtimestamp(float(fake_seconds_from_epoch) - 62135596800)) 245 | 246 | assert job_details.name == "job_1" 247 | assert job_details.job_id.value == '432b274a8f754968888807fe1eba237b' 248 | assert job_details.pipeline_id.value == '92656d79fa414db6b294069c0e9e6df5' 249 | assert job_details.payload_id.value == '7ac5c691e13d4f45894a3a70d9925936' 250 | assert job_details.job_state == 2 251 | assert job_details.job_status == 1 252 | assert job_details.date_created == datetime.datetime.fromtimestamp( 253 | float(fake_seconds_from_epoch) - 62135596800).astimezone(datetime.timezone.utc) 254 | 255 | 256 | def test_list_jobs(): 257 | requests = [ 258 | jobs_pb2.JobsListRequest( 259 | header=BaseClient.get_request_header() 260 | ) 261 | ] 262 | 263 | responses = [ 264 | jobs_pb2.JobsListResponse( 265 | header=common_pb2.ResponseHeader( 266 | code=0, 267 | messages=[]), 268 | job_details=jobs_pb2.JobsListResponse.JobDetails( 269 | job_name="job_1", 270 | job_id=common_pb2.Identifier( 271 | value="432b274a8f754968888807fe1eba237b" 272 | ), 273 | payload_id=common_pb2.Identifier( 274 | value='532b274a8f754968888807fe1eba237b' 275 | ), 276 | pipeline_id=common_pb2.Identifier( 277 | value='932b274a8f754968888807fe1eba237b' 278 | ), 279 | created=common_pb2.Timestamp( 280 | value=63750823591 281 | ) 282 | ) 283 | ), 284 | jobs_pb2.JobsListResponse( 285 | header=common_pb2.ResponseHeader( 286 | code=0, 287 | messages=[]), 288 | job_details=jobs_pb2.JobsListResponse.JobDetails( 289 | job_name="job_2", 290 | job_id=common_pb2.Identifier( 291 | value='212b274a8f754968888807fe1eba237b' 292 | ), 293 | payload_id=common_pb2.Identifier( 294 | value='212b274a8f754968888807fe1eba237b' 295 | ), 296 | pipeline_id=common_pb2.Identifier( 297 | value='322b274a8f754968888807fe1eba237b' 298 | ), 299 | created=common_pb2.Timestamp( 300 | value=63750823591 301 | ) 302 | ) 303 | ) 304 | ] 305 | 306 | stub_method_handlers = [( 307 | 'List', 308 | 'unary_stream', 309 | ( 310 | requests, 311 | responses 312 | ) 313 | )] 314 | 315 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 316 | 317 | with MockClaraJobsServiceClient('10.0.0.1:50051') as client: 318 | list_jobs = client.list_jobs() 319 | 320 | print("Length of list response: " + str(len(list_jobs))) 321 | 322 | assert len(list_jobs) == 2 323 | 324 | assert list_jobs[0].name == "job_1" 325 | assert list_jobs[0].job_id.value == "432b274a8f754968888807fe1eba237b" 326 | assert list_jobs[0].payload_id.value == "532b274a8f754968888807fe1eba237b" 327 | assert list_jobs[0].pipeline_id.value == "932b274a8f754968888807fe1eba237b" 328 | assert list_jobs[0].date_created == datetime.datetime(2021, 3, 8, 18, 6, 31, tzinfo=datetime.timezone.utc) 329 | 330 | assert list_jobs[1].name == "job_2" 331 | assert list_jobs[1].job_id.value == '212b274a8f754968888807fe1eba237b' 332 | assert list_jobs[1].payload_id.value == '212b274a8f754968888807fe1eba237b' 333 | assert list_jobs[1].pipeline_id.value == '322b274a8f754968888807fe1eba237b' 334 | assert list_jobs[1].date_created == datetime.datetime(2021, 3, 8, 18, 6, 31, tzinfo=datetime.timezone.utc) 335 | 336 | 337 | def test_start_job(): 338 | requests = [ 339 | jobs_pb2.JobsStartRequest( 340 | header=BaseClient.get_request_header(), 341 | job_id=common_pb2.Identifier( 342 | value='432b274a8f754968888807fe1eba237b' 343 | ) 344 | ) 345 | ] 346 | 347 | responses = [ 348 | jobs_pb2.JobsStartResponse( 349 | header=common_pb2.ResponseHeader( 350 | code=0, 351 | messages=[]), 352 | state=jobs_pb2.JOB_STATE_RUNNING, 353 | status=jobs_pb2.JOB_STATUS_HEALTHY, 354 | priority=jobs_pb2.JOB_PRIORITY_NORMAL 355 | ) 356 | ] 357 | 358 | stub_method_handlers = [( 359 | 'Start', 360 | 'unary_unary', 361 | ( 362 | requests, 363 | responses 364 | ) 365 | )] 366 | 367 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 368 | 369 | with MockClaraJobsServiceClient('10.0.0.1:50051') as client: 370 | job_token = client.start_job( 371 | job_id=job_types.JobId(value='432b274a8f754968888807fe1eba237b') 372 | ) 373 | 374 | print(job_token.job_id.value, job_token.job_state, job_token.job_status) 375 | 376 | assert job_token.job_id.value == '432b274a8f754968888807fe1eba237b' 377 | assert job_token.job_state == 2 378 | assert job_token.job_status == 1 379 | 380 | 381 | def test_read_logs(): 382 | requests = [ 383 | jobs_pb2.JobsReadLogsRequest( 384 | header=BaseClient.get_request_header(), 385 | job_id=common_pb2.Identifier( 386 | value='432b274a8f754968888807fe1eba237b' 387 | ), 388 | operator_name="dicom-reader" 389 | ) 390 | ] 391 | 392 | responses = [ 393 | jobs_pb2.JobsReadLogsResponse( 394 | header=common_pb2.ResponseHeader( 395 | code=0, 396 | messages=[]), 397 | job_id=common_pb2.Identifier( 398 | value='432b274a8f754968888807fe1eba237b' 399 | ), 400 | operator_name="Dicom Reader", 401 | logs=["Log_String_0", "Log_String_1"] 402 | ), 403 | jobs_pb2.JobsReadLogsResponse( 404 | header=common_pb2.ResponseHeader( 405 | code=0, 406 | messages=[]), 407 | job_id=common_pb2.Identifier( 408 | value='432b274a8f754968888807fe1eba237b' 409 | ), 410 | operator_name="Dicom Reader", 411 | logs=["Log_String_2", "Log_String_3"] 412 | ) 413 | ] 414 | 415 | stub_method_handlers = [( 416 | 'ReadLogs', 417 | 'unary_stream', 418 | ( 419 | requests, 420 | responses 421 | ) 422 | )] 423 | 424 | MockClaraJobsServiceClient.stub_method_handlers = stub_method_handlers 425 | 426 | with MockClaraJobsServiceClient('10.0.0.1:50051') as client: 427 | job_logs = client.job_logs( 428 | job_id=job_types.JobId(value='432b274a8f754968888807fe1eba237b'), 429 | operator_name="dicom-reader" 430 | ) 431 | 432 | print(len(job_logs)) 433 | 434 | assert len(job_logs) == 4 435 | assert job_logs[0] == "Log_String_0" 436 | assert job_logs[1] == "Log_String_1" 437 | assert job_logs[2] == "Log_String_2" 438 | assert job_logs[3] == "Log_String_3" 439 | -------------------------------------------------------------------------------- /nvidia_clara/grpc/models_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # -*- coding: utf-8 -*- 16 | # Generated by the protocol buffer compiler. DO NOT EDIT! 17 | # source: nvidia/clara/platform/clara.proto 18 | 19 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 20 | import grpc 21 | 22 | from nvidia_clara.grpc import models_pb2 as nvidia_dot_clara_dot_platform_dot_models__pb2 23 | 24 | 25 | class ModelsStub(object): 26 | # missing associated documentation comment in .proto file 27 | pass 28 | 29 | def __init__(self, channel): 30 | """Constructor. 31 | 32 | Args: 33 | channel: A grpc.Channel. 34 | """ 35 | self.AddMetadata = channel.unary_unary( 36 | '/nvidia.clara.platform.Models/AddMetadata', 37 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsAddMetadataRequest.SerializeToString, 38 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsAddMetadataResponse.FromString, 39 | ) 40 | self.CreateCatalog = channel.unary_unary( 41 | '/nvidia.clara.platform.Models/CreateCatalog', 42 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateCatalogRequest.SerializeToString, 43 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateCatalogResponse.FromString, 44 | ) 45 | self.CreateInstance = channel.unary_unary( 46 | '/nvidia.clara.platform.Models/CreateInstance', 47 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateInstanceRequest.SerializeToString, 48 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateInstanceResponse.FromString, 49 | ) 50 | self.DeleteCatalog = channel.unary_unary( 51 | '/nvidia.clara.platform.Models/DeleteCatalog', 52 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteCatalogRequest.SerializeToString, 53 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteCatalogResponse.FromString, 54 | ) 55 | self.DeleteInstance = channel.unary_unary( 56 | '/nvidia.clara.platform.Models/DeleteInstance', 57 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteInstanceRequest.SerializeToString, 58 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteInstanceResponse.FromString, 59 | ) 60 | self.DeleteModel = channel.unary_unary( 61 | '/nvidia.clara.platform.Models/DeleteModel', 62 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteModelRequest.SerializeToString, 63 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteModelResponse.FromString, 64 | ) 65 | self.DownloadModel = channel.unary_stream( 66 | '/nvidia.clara.platform.Models/DownloadModel', 67 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDownloadModelRequest.SerializeToString, 68 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDownloadModelResponse.FromString, 69 | ) 70 | self.ListCatalogs = channel.unary_stream( 71 | '/nvidia.clara.platform.Models/ListCatalogs', 72 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListCatalogsRequest.SerializeToString, 73 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListCatalogsResponse.FromString, 74 | ) 75 | self.ListInstances = channel.unary_stream( 76 | '/nvidia.clara.platform.Models/ListInstances', 77 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListInstancesRequest.SerializeToString, 78 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListInstancesResponse.FromString, 79 | ) 80 | self.ListModels = channel.unary_stream( 81 | '/nvidia.clara.platform.Models/ListModels', 82 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListModelsRequest.SerializeToString, 83 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListModelsResponse.FromString, 84 | ) 85 | self.ReadCatalog = channel.unary_stream( 86 | '/nvidia.clara.platform.Models/ReadCatalog', 87 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadCatalogRequest.SerializeToString, 88 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadCatalogResponse.FromString, 89 | ) 90 | self.ReadInstance = channel.unary_stream( 91 | '/nvidia.clara.platform.Models/ReadInstance', 92 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadInstanceRequest.SerializeToString, 93 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadInstanceResponse.FromString, 94 | ) 95 | self.RemoveMetadata = channel.unary_unary( 96 | '/nvidia.clara.platform.Models/RemoveMetadata', 97 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsRemoveMetadataRequest.SerializeToString, 98 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsRemoveMetadataResponse.FromString, 99 | ) 100 | self.UpdateCatalog = channel.stream_unary( 101 | '/nvidia.clara.platform.Models/UpdateCatalog', 102 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateCatalogRequest.SerializeToString, 103 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateCatalogResponse.FromString, 104 | ) 105 | self.UpdateInstance = channel.stream_unary( 106 | '/nvidia.clara.platform.Models/UpdateInstance', 107 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateInstanceRequest.SerializeToString, 108 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateInstanceResponse.FromString, 109 | ) 110 | self.UploadModel = channel.stream_unary( 111 | '/nvidia.clara.platform.Models/UploadModel', 112 | request_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUploadModelRequest.SerializeToString, 113 | response_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUploadModelResponse.FromString, 114 | ) 115 | 116 | 117 | class ModelsServicer(object): 118 | # missing associated documentation comment in .proto file 119 | pass 120 | 121 | def AddMetadata(self, request, context): 122 | """Requests the addition of metadata to a model. 123 | """ 124 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 125 | context.set_details('Method not implemented!') 126 | raise NotImplementedError('Method not implemented!') 127 | 128 | def CreateCatalog(self, request, context): 129 | """Requests the creation of a model catalog from the model repository. 130 | """ 131 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 132 | context.set_details('Method not implemented!') 133 | raise NotImplementedError('Method not implemented!') 134 | 135 | def CreateInstance(self, request, context): 136 | """Requests the creation of a model catalog instance from the model repository. 137 | """ 138 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 139 | context.set_details('Method not implemented!') 140 | raise NotImplementedError('Method not implemented!') 141 | 142 | def DeleteCatalog(self, request, context): 143 | """Requests the deletion of a model catalog from the model repository. 144 | """ 145 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 146 | context.set_details('Method not implemented!') 147 | raise NotImplementedError('Method not implemented!') 148 | 149 | def DeleteInstance(self, request, context): 150 | """Requests the deletion of a model catalog instance from the model repository. 151 | """ 152 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 153 | context.set_details('Method not implemented!') 154 | raise NotImplementedError('Method not implemented!') 155 | 156 | def DeleteModel(self, request, context): 157 | """Requests the deletion of a model from the model repository. 158 | """ 159 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 160 | context.set_details('Method not implemented!') 161 | raise NotImplementedError('Method not implemented!') 162 | 163 | def DownloadModel(self, request, context): 164 | """Requests the download of an existing model from the model repository. 165 | """ 166 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 167 | context.set_details('Method not implemented!') 168 | raise NotImplementedError('Method not implemented!') 169 | 170 | def ListCatalogs(self, request, context): 171 | """Requests the list of all existing catalogs from the model repository. 172 | """ 173 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 174 | context.set_details('Method not implemented!') 175 | raise NotImplementedError('Method not implemented!') 176 | 177 | def ListInstances(self, request, context): 178 | """Requests a list of all existing model catalog instances from the model repository. 179 | """ 180 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 181 | context.set_details('Method not implemented!') 182 | raise NotImplementedError('Method not implemented!') 183 | 184 | def ListModels(self, request, context): 185 | """Requests a list of available models from the model repository. 186 | """ 187 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 188 | context.set_details('Method not implemented!') 189 | raise NotImplementedError('Method not implemented!') 190 | 191 | def ReadCatalog(self, request, context): 192 | """Requests the contents of a model catalog from the model repository. 193 | """ 194 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 195 | context.set_details('Method not implemented!') 196 | raise NotImplementedError('Method not implemented!') 197 | 198 | def ReadInstance(self, request, context): 199 | """Requests the contents of a model catalog instance from the model repository. 200 | """ 201 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 202 | context.set_details('Method not implemented!') 203 | raise NotImplementedError('Method not implemented!') 204 | 205 | def RemoveMetadata(self, request, context): 206 | """Requests the removal of metadata from a model. 207 | """ 208 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 209 | context.set_details('Method not implemented!') 210 | raise NotImplementedError('Method not implemented!') 211 | 212 | def UpdateCatalog(self, request_iterator, context): 213 | """Requests the update of an existing model catalog in the model repository. 214 | """ 215 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 216 | context.set_details('Method not implemented!') 217 | raise NotImplementedError('Method not implemented!') 218 | 219 | def UpdateInstance(self, request_iterator, context): 220 | """Requests the update of an existing model catalog instance with a new set of models. 221 | """ 222 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 223 | context.set_details('Method not implemented!') 224 | raise NotImplementedError('Method not implemented!') 225 | 226 | def UploadModel(self, request_iterator, context): 227 | """Requests the uploads of a new model to model repository. 228 | """ 229 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 230 | context.set_details('Method not implemented!') 231 | raise NotImplementedError('Method not implemented!') 232 | 233 | 234 | def add_ModelsServicer_to_server(servicer, server): 235 | rpc_method_handlers = { 236 | 'AddMetadata': grpc.unary_unary_rpc_method_handler( 237 | servicer.AddMetadata, 238 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsAddMetadataRequest.FromString, 239 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsAddMetadataResponse.SerializeToString, 240 | ), 241 | 'CreateCatalog': grpc.unary_unary_rpc_method_handler( 242 | servicer.CreateCatalog, 243 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateCatalogRequest.FromString, 244 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateCatalogResponse.SerializeToString, 245 | ), 246 | 'CreateInstance': grpc.unary_unary_rpc_method_handler( 247 | servicer.CreateInstance, 248 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateInstanceRequest.FromString, 249 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsCreateInstanceResponse.SerializeToString, 250 | ), 251 | 'DeleteCatalog': grpc.unary_unary_rpc_method_handler( 252 | servicer.DeleteCatalog, 253 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteCatalogRequest.FromString, 254 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteCatalogResponse.SerializeToString, 255 | ), 256 | 'DeleteInstance': grpc.unary_unary_rpc_method_handler( 257 | servicer.DeleteInstance, 258 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteInstanceRequest.FromString, 259 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteInstanceResponse.SerializeToString, 260 | ), 261 | 'DeleteModel': grpc.unary_unary_rpc_method_handler( 262 | servicer.DeleteModel, 263 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteModelRequest.FromString, 264 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDeleteModelResponse.SerializeToString, 265 | ), 266 | 'DownloadModel': grpc.unary_stream_rpc_method_handler( 267 | servicer.DownloadModel, 268 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDownloadModelRequest.FromString, 269 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsDownloadModelResponse.SerializeToString, 270 | ), 271 | 'ListCatalogs': grpc.unary_stream_rpc_method_handler( 272 | servicer.ListCatalogs, 273 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListCatalogsRequest.FromString, 274 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListCatalogsResponse.SerializeToString, 275 | ), 276 | 'ListInstances': grpc.unary_stream_rpc_method_handler( 277 | servicer.ListInstances, 278 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListInstancesRequest.FromString, 279 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListInstancesResponse.SerializeToString, 280 | ), 281 | 'ListModels': grpc.unary_stream_rpc_method_handler( 282 | servicer.ListModels, 283 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListModelsRequest.FromString, 284 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsListModelsResponse.SerializeToString, 285 | ), 286 | 'ReadCatalog': grpc.unary_stream_rpc_method_handler( 287 | servicer.ReadCatalog, 288 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadCatalogRequest.FromString, 289 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadCatalogResponse.SerializeToString, 290 | ), 291 | 'ReadInstance': grpc.unary_stream_rpc_method_handler( 292 | servicer.ReadInstance, 293 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadInstanceRequest.FromString, 294 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsReadInstanceResponse.SerializeToString, 295 | ), 296 | 'RemoveMetadata': grpc.unary_unary_rpc_method_handler( 297 | servicer.RemoveMetadata, 298 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsRemoveMetadataRequest.FromString, 299 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsRemoveMetadataResponse.SerializeToString, 300 | ), 301 | 'UpdateCatalog': grpc.stream_unary_rpc_method_handler( 302 | servicer.UpdateCatalog, 303 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateCatalogRequest.FromString, 304 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateCatalogResponse.SerializeToString, 305 | ), 306 | 'UpdateInstance': grpc.stream_unary_rpc_method_handler( 307 | servicer.UpdateInstance, 308 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateInstanceRequest.FromString, 309 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUpdateInstanceResponse.SerializeToString, 310 | ), 311 | 'UploadModel': grpc.stream_unary_rpc_method_handler( 312 | servicer.UploadModel, 313 | request_deserializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUploadModelRequest.FromString, 314 | response_serializer=nvidia_dot_clara_dot_platform_dot_models__pb2.ModelsUploadModelResponse.SerializeToString, 315 | ), 316 | } 317 | generic_handler = grpc.method_handlers_generic_handler( 318 | 'nvidia.clara.platform.Models', rpc_method_handlers) 319 | server.add_generic_rpc_handlers((generic_handler,)) 320 | --------------------------------------------------------------------------------